From a07d3c4cd485d99a180161f849317acac2a899e1 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 11 Sep 2015 13:09:51 -0700
Subject: [PATCH 001/196] Display plots are now preserved, instead of
 constantly being thrown out

---
 Packages/vcs/Lib/Canvas.py   | 29 +++++++++++++++++++++--------
 Packages/vcs/Lib/VTKPlots.py |  8 ++++----
 2 files changed, 25 insertions(+), 12 deletions(-)

diff --git a/Packages/vcs/Lib/Canvas.py b/Packages/vcs/Lib/Canvas.py
index 7f066aec8..cf4edd2d4 100644
--- a/Packages/vcs/Lib/Canvas.py
+++ b/Packages/vcs/Lib/Canvas.py
@@ -2374,7 +2374,7 @@ Options:::
                        'xbounds', 'ybounds', 'xname', 'yname', 'xunits', 'yunits', 'xweights', 'yweights',
                        'comment1', 'comment2', 'comment3', 'comment4', 'hms', 'long_name', 'zaxis',
                        'zarray', 'zname', 'zunits', 'taxis', 'tarray', 'tname', 'tunits', 'waxis', 'warray',
-                       'wname', 'wunits', 'bg', 'ratio', 'donotstoredisplay', 'render']
+                       'wname', 'wunits', 'bg', 'ratio', 'donotstoredisplay', 'render', "display_name"]
 
     # def replot(self):
     #    """ Clears and plots with last used plot arguments
@@ -3584,14 +3584,18 @@ Options:::
                     "unknown taylordiagram graphic method: %s" %
                     arglist[4])
             t.plot(arglist[0], canvas=self, template=arglist[2], **keyargs)
-            nm, src = self.check_name_source(None, "default", "display")
-            dn = displayplot.Dp(nm)
+
+            dname = keyargs.get("display_name")
+            if dname is not None:
+                dn = vcs.elements["display"][dname]
+            else:
+                nm, src = self.check_name_source(None, "default", "display")
+                dn = displayplot.Dp(nm)
             dn.template = arglist[2]
             dn.g_type = arglist[3]
             dn.g_name = arglist[4]
             dn.array = arglist[:2]
             dn.extradisplays = t.displays
-# dn.array=arglist[0]
             for p in slab_changed_attributes.keys():
                 tmp = slab_changed_attributes[p]
                 if tmp == (None, None):
@@ -3838,9 +3842,13 @@ Options:::
             else:
                 returned_kargs = self.backend.plot(*arglist, **keyargs)
                 if not keyargs.get("donotstoredisplay", False):
-                    nm, src = self.check_name_source(
-                        None, "default", "display")
-                    dn = displayplot.Dp(nm)
+                    dname = keyargs.get("display_name")
+                    if dname is not None:
+                        dn = vcs.elements['display'][dname]
+                    else:
+                        nm, src = self.check_name_source(
+                            None, "default", "display")
+                        dn = displayplot.Dp(nm)
                     dn.template = arglist[2]
                     dn.g_type = arglist[3]
                     dn.g_name = arglist[4]
@@ -3990,6 +3998,10 @@ Options:::
             self.configurator.stop_animating()
         self.animate_info = []
         self.animate.update_animate_display_list()
+
+        preserve_display = kargs.get("preserve_display", False)
+        if "preserve_display" in kargs:
+            del kargs["preserve_display"]
         self.backend.clear(*args, **kargs)
         for nm in self.display_names:
             # Lets look at elements created by dispaly production
@@ -4004,7 +4016,8 @@ Options:::
                     for k in new_elts[e]:
                         if k in vcs.elements[e].keys():
                             del(vcs.elements[e][k])
-            del(vcs.elements["display"][nm])
+            if not preserve_display:
+                del(vcs.elements["display"][nm])
         self.display_names = []
         return
 
diff --git a/Packages/vcs/Lib/VTKPlots.py b/Packages/vcs/Lib/VTKPlots.py
index eb923f0c4..db6f45d97 100644
--- a/Packages/vcs/Lib/VTKPlots.py
+++ b/Packages/vcs/Lib/VTKPlots.py
@@ -232,15 +232,15 @@ class VTKVCSBackend(object):
             parg.append(d.g_type)
             parg.append(d.g_name)
             plots_args.append(parg)
+            key = {"display_name": dnm}
             if d.ratio is not None:
-                key_args.append({"ratio": d.ratio})
-            else:
-                key_args.append({})
+                key["ratio"] = d.ratio
+            key_args.append(key)
 
         # Have to pull out the UI layer so it doesn't get borked by the clear
         self.hideGUI()
 
-        self.canvas.clear(render=False)
+        self.canvas.clear(render=False, preserve_display=True)
 
         for i, pargs in enumerate(plots_args):
             self.canvas.plot(*pargs, render=False, **key_args[i])
-- 
GitLab


From 10e92c09f824d91719a15e1bf2093e21904f605b Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 26 Jan 2016 14:52:54 -0800
Subject: [PATCH 002/196] Made VCSAddon be a new-style object

---
 Packages/vcsaddons/Lib/core.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index 38932c486..b12bf627d 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -1,7 +1,7 @@
 import vcsaddons,vcs
 import numpy
 
-class VCSaddon:
+class VCSaddon(object):
     def __init__(self,name=None,source='default',x=None,template=None):
         self._saves={}
         self.g_nslabs=1
@@ -139,14 +139,14 @@ class VCSaddon:
             self._saves={}
 
 
-    def getgm(self,name):
+    def getgm(self,source="default"):
         gm = None
         for nm in vcsaddons.gms[self.g_name].keys():
-            if name == nm:
+            if source == nm:
                 return vcsaddons.gms[self.g_name][nm]
 
         if gm is None:
-            raise "Could not find graphic method %s named: %s" % (self.g_type, name)
+            raise "Could not find graphic method %s named: %s" % (self.g_type, source)
 
     def creategm(self,name,source='default'):
         return self.__init__(name,source=source,x=self.x,template=self.template)
-- 
GitLab


From ffe9cff7445b9d94f5c35021aec22f7d67b9b6d2 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 26 Jan 2016 14:53:32 -0800
Subject: [PATCH 003/196] Flake8'd

---
 Packages/vcsaddons/Lib/histograms.py | 95 ++++++++++++++--------------
 1 file changed, 49 insertions(+), 46 deletions(-)

diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 6e974f2a6..f51a1500b 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -1,31 +1,35 @@
 from core import VCSaddon
-import cdms2,MV2,vcs,vcsaddons
+import cdms2
+import MV2
+import vcs
+import vcsaddons
+
 
 class Ghg(VCSaddon):
-    def __init__(self,name=None,source='default',x=None,template = None):
-        self.g_name='Ghg'
-        self.g_type='histogram'
-        VCSaddon.__init__(self,name,source,x,template)
+
+    def __init__(self, name=None, source='default', x=None, template=None):
+        self.g_name = 'Ghg'
+        self.g_type = 'histogram'
+        VCSaddon.__init__(self, name, source, x, template)
         if source == 'default':
-            self.fillareastyles = ['solid',]
-            self.fillareaindices = [1,]
-            self.fillareacolors = [252,]
-            self.line = ['solid',]
-            self.linewidth=[1.0,]
-            self.linecolors=[241,]
+            self.fillareastyles = ['solid']
+            self.fillareaindices = [1]
+            self.fillareacolors = [252]
+            self.line = ['solid']
+            self.linewidth = [1.0]
+            self.linecolors = [241]
         else:
             gm = vcsaddons.gms[self.g_name][source]
-            self.fillareastyle= gm.fillareastyles
+            self.fillareastyle = gm.fillareastyles
             self.fillareaindices = gm.fillareaindices
             self.fillareacolors = gm.fillareacolors
             self.line = gm.line
             self.linewidth = gm.linewidth
             self.linecolors = gm.linecolors
-            
 
     def list(self):
         print '---------- Histogram (Ghg) member (attribute) listings ----------'
-        print 'Canvas Mode = ',self.x.mode
+        print 'Canvas Mode = ', self.x.mode
         VCSaddon.list(self)
         print 'fillareastyles = ', self.fillareastyles
         print 'fillareaindices = ', self.fillareaindices
@@ -33,26 +37,25 @@ class Ghg(VCSaddon):
         print 'line = ', self.line
         print 'linewidth = ', self.linewidth
         print 'linecolors = ', self.linecolors
-        
-    
-    def plot(self,data,template = None, bg=0, x=None):
+
+    def plot(self, data, template=None, bg=0, x=None):
         if x is None:
             x = self.x
         if template is None:
             template = self.template
-        elif isinstance(template,str):
+        elif isinstance(template, str):
             template = x.gettemplate(template)
         elif not vcs.istemplate(template):
             raise "Error did not know what to do with template: %s" % template
-        
-        if not isinstance(data,cdms2.tvariable.TransientVariable):
-            mode= cdms2.getAutoBounds()
+
+        if not isinstance(data, cdms2.tvariable.TransientVariable):
+            mode = cdms2.getAutoBounds()
             cdms2.setAutoBounds("on")
             data = MV2.array(data)
             data.getAxis(-1).getBounds()
             cdms2.setAutoBounds(mode)
 
-        while data.rank()>1:
+        while data.rank() > 1:
             data = data[0]
 
         # ok now we have a good x and a good data
@@ -61,26 +64,27 @@ class Ghg(VCSaddon):
         # create the primitive
         fill = x.createfillarea()
         line = x.createline()
-        fill.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2]
-        line.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2]
+        fill.viewport = [
+            template.data.x1, template.data.x2, template.data.y1, template.data.y2]
+        line.viewport = [
+            template.data.x1, template.data.x2, template.data.y1, template.data.y2]
         axb = data.getAxis(0).getBounds()
-        xmn,xmx = vcs.minmax(axb)
-        ymn,ymx = vcs.minmax(data)
-        
-        xmn,xmx,ymn,ymx = self.prep_plot(xmn,xmx,ymn,ymx)
-        
-        fill.worldcoordinate=[xmn,xmx,ymn,ymx]
-        line.worldcoordinate=[xmn,xmx,ymn,ymx]
-        
-        styles =[]
+        xmn, xmx = vcs.minmax(axb)
+        ymn, ymx = vcs.minmax(data)
+
+        xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
+
+        fill.worldcoordinate = [xmn, xmx, ymn, ymx]
+        line.worldcoordinate = [xmn, xmx, ymn, ymx]
+
+        styles = []
         cols = []
         indices = []
         lt = []
-        lw =[]
+        lw = []
         lc = []
         xs = []
         ys = []
-        
 
         for i in range(nbars):
             if i < len(self.fillareastyles):
@@ -96,21 +100,20 @@ class Ghg(VCSaddon):
             else:
                 indices.append(self.fillareaindices[-1])
             if i < len(self.line):
-                lt.append( self.line[i])
+                lt.append(self.line[i])
             else:
                 lt.append(self.line[-1])
             if i < len(self.linewidth):
-                lw.append( self.linewidth[i])
+                lw.append(self.linewidth[i])
             else:
                 lw.append(self.linewidth[-1])
             if i < len(self.line):
-                lc.append( self.linecolors[i])
+                lc.append(self.linecolors[i])
             else:
                 lc.append(self.linecolors[-1])
-            
-            xs.append( [axb[i][0],axb[i][1],axb[i][1],axb[i][0],axb[i][0]])
-            ys.append( [0,0,data[i],data[i],0])
 
+            xs.append([axb[i][0], axb[i][1], axb[i][1], axb[i][0], axb[i][0]])
+            ys.append([0, 0, data[i], data[i], 0])
 
         fill.style = styles
         fill.x = xs
@@ -123,13 +126,13 @@ class Ghg(VCSaddon):
         line.type = lt
         line.width = lw
         line.color = lc
-
+        fill.list()
         displays = []
-        displays.append(x.plot(fill,bg=bg))
-        displays.append(x.plot(line,bg=bg))
+        displays.append(x.plot(fill, bg=bg))
+        displays.append(x.plot(line, bg=bg))
 
-        x.worldcoordinate = fill.worldcoordinate 
-        dsp = template.plot(data,self,bg=bg)
+        x.worldcoordinate = fill.worldcoordinate
+        dsp = template.plot(data, self, bg=bg)
         for d in dsp:
             displays.append(d)
 
-- 
GitLab


From 26031e3a24b17a54f5dc6f62d2112f00cbf10148 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 5 Feb 2016 08:47:54 -0800
Subject: [PATCH 004/196] Made histogram work and display variance; needs to be
 cleaned up

---
 Packages/vcsaddons/Lib/core.py       |   6 +
 Packages/vcsaddons/Lib/histograms.py | 161 ++++++++++++++++++---------
 2 files changed, 115 insertions(+), 52 deletions(-)

diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index b12bf627d..c39667bb8 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -38,6 +38,7 @@ class VCSaddon(object):
             self.datawc_x2=1.e20
             self.datawc_y1=1.e20
             self.datawc_y2=1.e20
+            self.colormap="default"
             self.xmtics1='*'
             self.xmtics2='*'
             self.ymtics1='*'
@@ -48,6 +49,8 @@ class VCSaddon(object):
             self.yticlabels2='*'
             self.xaxisconvert= 'linear'
             self.yaxisconvert= 'linear'
+            self.color_1 = 16
+            self.color_2 = 239
             self.legend = None
             self.projection='linear'
         else:
@@ -58,6 +61,7 @@ class VCSaddon(object):
             self.datawc_x2=gm.datawc_x2
             self.datawc_y1=gm.datawc_y1
             self.datawc_y2=gm.datawc_x2
+            self.colormap=gm.colormap
             self.xmtics1=gm.xmtics1
             self.xmtics2=gm.xmtics2
             self.ymtics1=gm.ymtics1
@@ -68,6 +72,8 @@ class VCSaddon(object):
             self.yticlabels2=gm.yticlabels2
             self.xaxisconvert=gm.xaxisconvert
             self.yaxisconvert= gm.yaxisconvert
+            self.color_1 = gm.color_1
+            self.color_2 = gm.color_2
             self.legend = gm.legend
             self.projection=gm.projection
         self.name = name
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index f51a1500b..69c575615 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -1,6 +1,7 @@
 from core import VCSaddon
 import cdms2
 import MV2
+import numpy
 import vcs
 import vcsaddons
 
@@ -12,20 +13,22 @@ class Ghg(VCSaddon):
         self.g_type = 'histogram'
         VCSaddon.__init__(self, name, source, x, template)
         if source == 'default':
-            self.fillareastyles = ['solid']
-            self.fillareaindices = [1]
-            self.fillareacolors = [252]
-            self.line = ['solid']
-            self.linewidth = [1.0]
-            self.linecolors = [241]
+            self.line = []
+            self.linewidth = []
+            self.linecolors = []
+            self.fillareastyles = []
+            self.fillareaindices = []
+            self.fillareacolors = []
+            self.bins = []
         else:
             gm = vcsaddons.gms[self.g_name][source]
-            self.fillareastyle = gm.fillareastyles
-            self.fillareaindices = gm.fillareaindices
-            self.fillareacolors = gm.fillareacolors
             self.line = gm.line
             self.linewidth = gm.linewidth
             self.linecolors = gm.linecolors
+            self.fillareastyles = gm.fillareastyles
+            self.fillareaindices = gm.fillareaindices
+            self.fillareacolors = gm.fillareacolors
+            self.bins = gm.bins
 
     def list(self):
         print '---------- Histogram (Ghg) member (attribute) listings ----------'
@@ -37,6 +40,7 @@ class Ghg(VCSaddon):
         print 'line = ', self.line
         print 'linewidth = ', self.linewidth
         print 'linecolors = ', self.linecolors
+        print 'bins = ', self.bins
 
     def plot(self, data, template=None, bg=0, x=None):
         if x is None:
@@ -46,20 +50,34 @@ class Ghg(VCSaddon):
         elif isinstance(template, str):
             template = x.gettemplate(template)
         elif not vcs.istemplate(template):
-            raise "Error did not know what to do with template: %s" % template
-
-        if not isinstance(data, cdms2.tvariable.TransientVariable):
-            mode = cdms2.getAutoBounds()
-            cdms2.setAutoBounds("on")
-            data = MV2.array(data)
-            data.getAxis(-1).getBounds()
-            cdms2.setAutoBounds(mode)
+            raise ValueError("Error did not know what to do with template: %s" % template)
 
-        while data.rank() > 1:
-            data = data[0]
+        # We'll just flatten the data... if they want to be more precise, should pass in more precise data
+        data = data.flatten().asma()
 
         # ok now we have a good x and a good data
-        nbars = len(data)
+        if not self.bins:
+            self.bins = vcs.utils.mkscale(*vcs.minmax(data))
+
+        data_bins = numpy.digitize(data, self.bins) - 1
+        binned = [data[data_bins==i] for i in range(len(self.bins))]
+
+        means = []
+        stds = []
+
+        max_possible_deviance = 0
+
+        for ind, databin in enumerate(binned):
+            means.append(databin.mean())
+            stds.append(databin.std())
+            if len(self.bins) > ind + 1:
+                max_possible_deviance = max(means[ind] - self.bins[ind], self.bins[ind + 1] - means[ind], max_possible_deviance)
+            else:
+                max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance)
+
+        color_values = [std / max_possible_deviance for std in stds]
+        y_values, _ = numpy.histogram(data, self.bins)
+        nbars = len(self.bins) - 1
 
         # create the primitive
         fill = x.createfillarea()
@@ -68,9 +86,9 @@ class Ghg(VCSaddon):
             template.data.x1, template.data.x2, template.data.y1, template.data.y2]
         line.viewport = [
             template.data.x1, template.data.x2, template.data.y1, template.data.y2]
-        axb = data.getAxis(0).getBounds()
-        xmn, xmx = vcs.minmax(axb)
-        ymn, ymx = vcs.minmax(data)
+
+        xmn, xmx = vcs.minmax(self.bins)
+        ymn, ymx = 0, len(data)
 
         xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
 
@@ -86,34 +104,67 @@ class Ghg(VCSaddon):
         xs = []
         ys = []
 
-        for i in range(nbars):
-            if i < len(self.fillareastyles):
-                styles.append(self.fillareastyles[i])
-            else:
-                styles.append(self.fillareastyles[-1])
-            if i < len(self.fillareacolors):
-                cols.append(self.fillareacolors[i])
-            else:
-                cols.append(self.fillareacolors[-1])
-            if i < len(self.fillareaindices):
-                indices.append(self.fillareaindices[i])
-            else:
-                indices.append(self.fillareaindices[-1])
-            if i < len(self.line):
-                lt.append(self.line[i])
-            else:
-                lt.append(self.line[-1])
-            if i < len(self.linewidth):
-                lw.append(self.linewidth[i])
-            else:
-                lw.append(self.linewidth[-1])
-            if i < len(self.line):
-                lc.append(self.linecolors[i])
-            else:
-                lc.append(self.linecolors[-1])
+        levels = [.1 * i for i in range(11)]
 
-            xs.append([axb[i][0], axb[i][1], axb[i][1], axb[i][0], axb[i][0]])
-            ys.append([0, 0, data[i], data[i], 0])
+        # Extend fillarea and line attrs to levels
+        if self.fillareastyles:
+            while len(self.fillareastyles) < len(levels):
+                self.fillareastyles.append(self.fillareastyles[-1])
+        else:
+            self.fillareastyles = ["solid"] * len(levels)
+
+        if self.fillareacolors:
+            while len(self.fillareacolors) < len(levels):
+                self.fillareacolors.append(self.fillareacolors[-1])
+        else:
+            for lev in levels:
+                self.fillareacolors.append(int((self.color_2 - self.color_1) * lev) + self.color_1)
+
+        if self.fillareaindices:
+            while len(self.fillareaindices) < len(levels):
+                self.fillareaindices.append(self.fillareaindices[-1])
+        else:
+            self.fillareaindices = [1] * len(levels)
+
+        if self.line:
+            while len(self.line) < len(levels):
+                self.line.append(self.line[-1])
+        else:
+            self.line = ["solid"] * len(levels)
+
+        if self.linewidth:
+            while len(self.linewidth) < len(levels):
+                self.linewidth.append(self.linewidth[-1])
+        else:
+            self.linewidth = [1] * len(levels)
+
+        if self.linecolors:
+            while len(self.linecolors) < len(levels):
+                self.linecolors.append(self.linecolors[-1])
+        else:
+            self.linecolors = ["black"] * len(levels)
+
+        for i in range(nbars):
+            # Calculate level for bar
+            value = color_values[i]
+            for lev_ind in range(len(levels)):
+                if levels[lev_ind] > value:
+                    if lev_ind > 0:
+                        lev_ind -= 1
+                        break
+                    else:
+                        # Shouldn't ever get here since level 0 is 0
+                        assert False
+
+            styles.append(self.fillareastyles[lev_ind])
+            cols.append(self.fillareacolors[lev_ind])
+            indices.append(self.fillareaindices[lev_ind])
+            lt.append(self.line[lev_ind])
+            lw.append(self.linewidth[lev_ind])
+            lc.append(self.linecolors[lev_ind])
+
+            xs.append([self.bins[i], self.bins[i], self.bins[i + 1], self.bins[i + 1]])
+            ys.append([0, y_values[i], y_values[i], 0])
 
         fill.style = styles
         fill.x = xs
@@ -121,20 +172,26 @@ class Ghg(VCSaddon):
         fill.style
         fill.index = indices
         fill.color = cols
+        fill.colormap = self.colormap
         line.x = xs
         line.y = ys
         line.type = lt
         line.width = lw
         line.color = lc
-        fill.list()
         displays = []
         displays.append(x.plot(fill, bg=bg))
         displays.append(x.plot(line, bg=bg))
 
         x.worldcoordinate = fill.worldcoordinate
-        dsp = template.plot(data, self, bg=bg)
+
+        x_axis = cdms2.createAxis(self.bins, id="x")
+        y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y")
+
+        dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis)
         for d in dsp:
             displays.append(d)
 
         self.restore()
+        # Ugh, hack
+        x.backend.renWin.Render()
         return displays
-- 
GitLab


From 53f54dff490bd99b526ae96e49fc3ecc8362a9e5 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 8 Feb 2016 10:12:14 -0800
Subject: [PATCH 005/196] Adjusted y scale

---
 Packages/vcsaddons/Lib/histograms.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 69c575615..0b7d7de86 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -88,7 +88,8 @@ class Ghg(VCSaddon):
             template.data.x1, template.data.x2, template.data.y1, template.data.y2]
 
         xmn, xmx = vcs.minmax(self.bins)
-        ymn, ymx = 0, len(data)
+        # Make the y scale be slightly larger than the largest bar
+        ymn, ymx = 0, max(y_values) * 1.25
 
         xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
 
-- 
GitLab


From 510230038dc751a8e6803964b6ab09a0e1d24e0e Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 9 Feb 2016 12:09:18 -0800
Subject: [PATCH 006/196] Fixed vcsaddons to use display plots correctly

---
 Packages/vcs/Lib/Canvas.py           | 13 ++++++--
 Packages/vcs/Lib/displayplot.py      |  5 +++-
 Packages/vcsaddons/Lib/core.py       | 44 +++++++++++++++++++++-------
 Packages/vcsaddons/Lib/histograms.py |  6 ++--
 4 files changed, 52 insertions(+), 16 deletions(-)

diff --git a/Packages/vcs/Lib/Canvas.py b/Packages/vcs/Lib/Canvas.py
index 48327f0c0..cbe877c8e 100644
--- a/Packages/vcs/Lib/Canvas.py
+++ b/Packages/vcs/Lib/Canvas.py
@@ -3518,9 +3518,14 @@ Options:::
                     tp = "boxfill"
                 elif tp in ("xvsy", "xyvsy", "yxvsx", "scatter"):
                     tp = "1d"
-                gm = vcs.elements[tp][arglist[4]]
+                if tp in vcsaddons.gms:
+                    gm = vcsaddons.gms[tp][arglist[4]]
+                    arglist[3] = gm
+                else:
+                    gm = vcs.elements[tp][arglist[4]]
                 if hasattr(gm, "priority") and gm.priority == 0:
                     return
+
             p = self.getprojection(gm.projection)
             if p.type in round_projections and (
                     doratio == "0" or doratio[:4] == "auto"):
@@ -3729,20 +3734,22 @@ Options:::
                 del(keyargs["bg"])
             if isinstance(arglist[3], vcsaddons.core.VCSaddon):
                 if arglist[1] is None:
-                    dn = arglist[3].plot(
+                    dn = arglist[3].plot_internal(
                         arglist[0],
                         template=arglist[2],
                         bg=bg,
                         x=self,
                         **keyargs)
                 else:
-                    dn = arglist[3].plot(
+                    dn = arglist[3].plot_internal(
                         arglist[0],
                         arglist[1],
                         template=arglist[2],
                         bg=bg,
                         x=self,
                         **keyargs)
+                self.display_names.append(dn.name)
+                return dn
             else:
                 returned_kargs = self.backend.plot(*arglist, **keyargs)
                 if not keyargs.get("donotstoredisplay", False):
diff --git a/Packages/vcs/Lib/displayplot.py b/Packages/vcs/Lib/displayplot.py
index 1f00450ba..dd66fac1d 100755
--- a/Packages/vcs/Lib/displayplot.py
+++ b/Packages/vcs/Lib/displayplot.py
@@ -25,6 +25,7 @@
 #
 import VCS_validation_functions
 import vcs
+import vcsaddons
 
 
 class Dp(object):
@@ -211,7 +212,7 @@ class Dp(object):
     def _setg_type(self, value):
         value = VCS_validation_functions.checkString(self, 'g_type', value)
         value = value.lower()
-        if value not in vcs.elements and value != "text":
+        if value not in vcs.elements and value != "text" and value not in vcsaddons.gms:
             raise ValueError(
                 "invalid g_type '%s' must be one of: %s " %
                 (value, vcs.elements.keys()))
@@ -259,6 +260,7 @@ class Dp(object):
             self._g_name = "default"
             self._array = []
             self._continents = 1
+            self._continents_line = "default"
             self.ratio = None
         else:
             src = vcs.elements["display"][Dp_name_src]
@@ -269,6 +271,7 @@ class Dp(object):
             self.g_type = src.g_type
             self.g_name = src.g_name
             self.continents = src.continents
+            self.continents_line = src.continents_line
             self.priority = src.priority
             self.ratio = src.ratio
 
diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index c39667bb8..8421e247c 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -5,14 +5,14 @@ class VCSaddon(object):
     def __init__(self,name=None,source='default',x=None,template=None):
         self._saves={}
         self.g_nslabs=1
-        if not self.g_name in vcsaddons.gms.keys():
-            vcsaddons.gms[self.g_name]={}
+        if not self.g_type in vcsaddons.gms.keys():
+            vcsaddons.gms[self.g_type]={}
         if name is None:
             cont = True
             while cont:
                 num= numpy.random.randint(1000)
-                nm = 'Ghg_'+str(num)
-                if not nm in vcsaddons.gms[self.g_name].keys():
+                nm = self.g_type + '_'+str(num)
+                if not nm in vcsaddons.gms[self.g_type].keys():
                     name = nm
                     cont = False
 
@@ -30,7 +30,7 @@ class VCSaddon(object):
         else:
             raise "Error did not know what to do with template: %s" % template
 
-        if name in vcsaddons.gms[self.g_name].keys():
+        if name in vcsaddons.gms[self.g_type].keys():
             raise "Error graphic method %s already exists" % name
 
         if source=='default':
@@ -54,9 +54,9 @@ class VCSaddon(object):
             self.legend = None
             self.projection='linear'
         else:
-            gm =  vcsaddons.gms[self.g_name].get(source,None)
+            gm =  vcsaddons.gms[self.g_type].get(source,None)
             if gm is None:
-                raise "error could not find graphic method %s (of type %s)" % (source, self.g_name)
+                raise "error could not find graphic method %s (of type %s)" % (source, self.g_type)
             self.datawc_x1=gm.datawc_x1
             self.datawc_x2=gm.datawc_x2
             self.datawc_y1=gm.datawc_y1
@@ -77,7 +77,7 @@ class VCSaddon(object):
             self.legend = gm.legend
             self.projection=gm.projection
         self.name = name
-        vcsaddons.gms[self.g_name][name]=self
+        vcsaddons.gms[self.g_type][name]=self
         
 
     def list(self):
@@ -128,6 +128,30 @@ class VCSaddon(object):
                         setattr(self,axes+sec+n,vcs.mklabels(sc))
         return xmn,xmx,ymn,ymx
 
+    def plot_internal(self, slab=None, slab2=None, template=None, bg=0, x=None, **kwargs):
+        """
+        Used by vcs to properly build a display plot for this graphics method.
+        """
+        if x is None:
+            x = self.x
+
+        if slab2 is not None:
+            displays = self.plot(slab, slab2, template, bg, x, **kwargs)
+        else:
+            displays = self.plot(slab, template, bg, x, **kwargs)
+
+        for display in displays:
+            # Remove the display from the canvas
+            if display.name in x.display_names:
+                x.display_names.remove(display.name)
+        nm, src = x.check_name_source(None, "default", "display")
+        display = vcs.displayplot.Dp(nm)
+        display.g_name = self.name
+        display.g_type = self.g_type
+        display.array = [slab, slab2]
+        return display
+
+
     def save(self,attribute = None):
         if attribute is not None:
             self._saves[attribute] = getattr(self,attribute)
@@ -147,9 +171,9 @@ class VCSaddon(object):
 
     def getgm(self,source="default"):
         gm = None
-        for nm in vcsaddons.gms[self.g_name].keys():
+        for nm in vcsaddons.gms[self.g_type].keys():
             if source == nm:
-                return vcsaddons.gms[self.g_name][nm]
+                return vcsaddons.gms[self.g_type][nm]
 
         if gm is None:
             raise "Could not find graphic method %s named: %s" % (self.g_type, source)
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 0b7d7de86..d0e41c4ed 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -42,7 +42,7 @@ class Ghg(VCSaddon):
         print 'linecolors = ', self.linecolors
         print 'bins = ', self.bins
 
-    def plot(self, data, template=None, bg=0, x=None):
+    def plot(self, data, template=None, bg=0, x=None, **kwargs):
         if x is None:
             x = self.x
         if template is None:
@@ -190,9 +190,11 @@ class Ghg(VCSaddon):
 
         dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis)
         for d in dsp:
-            displays.append(d)
+            if d is not None:
+                displays.append(d)
 
         self.restore()
         # Ugh, hack
         x.backend.renWin.Render()
+        print displays
         return displays
-- 
GitLab


From 577499883890dc9bd70c1924a3e53b011641e613 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 10 Feb 2016 09:25:41 -0800
Subject: [PATCH 007/196] Added legend, removed print, fixed the attr lengths

---
 Packages/vcsaddons/Lib/histograms.py | 30 ++++++++++++++++------------
 1 file changed, 17 insertions(+), 13 deletions(-)

diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index d0e41c4ed..bf314f246 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -109,41 +109,41 @@ class Ghg(VCSaddon):
 
         # Extend fillarea and line attrs to levels
         if self.fillareastyles:
-            while len(self.fillareastyles) < len(levels):
+            while len(self.fillareastyles) < (len(levels) - 1):
                 self.fillareastyles.append(self.fillareastyles[-1])
         else:
-            self.fillareastyles = ["solid"] * len(levels)
+            self.fillareastyles = ["solid"] * (len(levels) - 1)
 
         if self.fillareacolors:
-            while len(self.fillareacolors) < len(levels):
+            while len(self.fillareacolors) < (len(levels) - 1):
                 self.fillareacolors.append(self.fillareacolors[-1])
         else:
-            for lev in levels:
+            for lev in levels[:-1]:
                 self.fillareacolors.append(int((self.color_2 - self.color_1) * lev) + self.color_1)
 
         if self.fillareaindices:
-            while len(self.fillareaindices) < len(levels):
+            while len(self.fillareaindices) < (len(levels) - 1):
                 self.fillareaindices.append(self.fillareaindices[-1])
         else:
-            self.fillareaindices = [1] * len(levels)
+            self.fillareaindices = [1] * (len(levels) - 1)
 
         if self.line:
-            while len(self.line) < len(levels):
+            while len(self.line) < (len(levels) - 1):
                 self.line.append(self.line[-1])
         else:
-            self.line = ["solid"] * len(levels)
+            self.line = ["solid"] * (len(levels) - 1)
 
         if self.linewidth:
-            while len(self.linewidth) < len(levels):
+            while len(self.linewidth) < (len(levels) - 1):
                 self.linewidth.append(self.linewidth[-1])
         else:
-            self.linewidth = [1] * len(levels)
+            self.linewidth = [1] * (len(levels) - 1)
 
         if self.linecolors:
-            while len(self.linecolors) < len(levels):
+            while len(self.linecolors) < (len(levels) - 1):
                 self.linecolors.append(self.linecolors[-1])
         else:
-            self.linecolors = ["black"] * len(levels)
+            self.linecolors = ["black"] * (len(levels) - 1)
 
         for i in range(nbars):
             # Calculate level for bar
@@ -193,8 +193,12 @@ class Ghg(VCSaddon):
             if d is not None:
                 displays.append(d)
 
+        dsp = template.drawColorBar(self.fillareacolors, levels, legend={0: "No Variance", .1:"", .2: "", .3:"", .4:"", .5:"", .6:"", .7:"", .8:"", .9:"", 1: "High Variance"}, x=x)
+        for d in dsp:
+            if d is not None:
+                displays.append(d)
+
         self.restore()
         # Ugh, hack
         x.backend.renWin.Render()
-        print displays
         return displays
-- 
GitLab


From 3af99c72693cb37050c8b842206a8c9aaf308b8d Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 10 Feb 2016 12:37:42 -0800
Subject: [PATCH 008/196] Finishing touch-ups on histo

---
 Packages/vcs/Lib/vcs2vtk.py          | 46 ++++++++++++++++++++++++++--
 Packages/vcsaddons/Lib/histograms.py | 17 +++++-----
 2 files changed, 53 insertions(+), 10 deletions(-)

diff --git a/Packages/vcs/Lib/vcs2vtk.py b/Packages/vcs/Lib/vcs2vtk.py
index a7b178e89..a7ab029ab 100644
--- a/Packages/vcs/Lib/vcs2vtk.py
+++ b/Packages/vcs/Lib/vcs2vtk.py
@@ -1880,7 +1880,49 @@ def generateVectorArray(data1, data2, vtk_grid):
 
 def vtkIterate(iterator):
     iterator.InitTraversal()
-    obj = iterator.GetNextItem()
+    obj = iterator.GetNextItemAsObject()
     while obj is not None:
         yield obj
-        obj = iterator.GetNextItem()
+        obj = iterator.GetNextItemAsObject()
+
+
+# transforms [v1,v2] and returns it
+# such that it is in the same order
+# and has the same middle interval as [gm1, gm2]
+def switchAndTranslate(gm1, gm2, v1, v2, wrapModulo):
+    assert(v1 < v2)
+    # keep the same middle of the interval
+    if (wrapModulo):
+        gmMiddle = float(gm1 + gm2) / 2.0
+        half = float(v2 - v1) / 2.0
+        v1 = gmMiddle - half
+        v2 = gmMiddle + half
+    # if gm margins are increasing and dataset bounds are decreasing
+    # or the other way around switch them
+    if ((gm1 - gm2) * (v1 - v2) < 0):
+        v1, v2 = v2, v1
+    return [v1, v2]
+
+
+# TODO: Get rid of this funtion and pass instead: flip and central meridian
+# This function can fail for gmbounds -89, -2 where databounds are 89, 0
+# (the cells in the margins have different sizes: 2 and 4)
+#
+# returns bounds with the same interval size as databounds
+# but in the same order and with the same middle interval
+# as gmbounds. The middle and the order are used for
+# plotting. wrapModule has YWrap, XWrap in degrees, 0 means no wrap
+def getBoundsForPlotting(gmbounds, databounds, wrapModulo):
+    """ Returns the same interval as databounds but it
+    matches the order and also it keeps the same center interval as gmbounds
+    So for instance if databounds is -40, 320 and gmbounds is -180, 180
+    this function returns
+    """
+    x1gm, x2gm, y1gm, y2gm = gmbounds[:4]
+    x1, x2, y1, y2 = databounds[:4]
+    assert (x1 < x2 and y1 < y2)
+    if not numpy.allclose([x1gm, x2gm], 1.e20):
+        x1, x2 = switchAndTranslate(x1gm, x2gm, x1, x2, wrapModulo[1] if wrapModulo else None)
+    if (isinstance(y1gm, numbers.Number) and not numpy.allclose([y1gm, y2gm], 1.e20)):
+        y1, y2 = switchAndTranslate(y1gm, y2gm, y1, y2, wrapModulo[0] if wrapModulo else None)
+    return [x1, x2, y1, y2]
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index bf314f246..3c2b4a033 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -76,7 +76,7 @@ class Ghg(VCSaddon):
                 max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance)
 
         color_values = [std / max_possible_deviance for std in stds]
-        y_values, _ = numpy.histogram(data, self.bins)
+        y_values = [len(databin) for databin in binned]
         nbars = len(self.bins) - 1
 
         # create the primitive
@@ -91,7 +91,7 @@ class Ghg(VCSaddon):
         # Make the y scale be slightly larger than the largest bar
         ymn, ymx = 0, max(y_values) * 1.25
 
-        xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
+        #xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
 
         fill.worldcoordinate = [xmn, xmx, ymn, ymx]
         line.worldcoordinate = [xmn, xmx, ymn, ymx]
@@ -180,14 +180,12 @@ class Ghg(VCSaddon):
         line.width = lw
         line.color = lc
         displays = []
-        displays.append(x.plot(fill, bg=bg))
-        displays.append(x.plot(line, bg=bg))
-
-        x.worldcoordinate = fill.worldcoordinate
 
         x_axis = cdms2.createAxis(self.bins, id="x")
         y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y")
 
+        displays.append(x.plot(fill, bg=bg, render=False))
+
         dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis)
         for d in dsp:
             if d is not None:
@@ -198,7 +196,10 @@ class Ghg(VCSaddon):
             if d is not None:
                 displays.append(d)
 
+
+        displays.append(x.plot(line, bg=bg))
+
+        x.worldcoordinate = fill.worldcoordinate
+
         self.restore()
-        # Ugh, hack
-        x.backend.renWin.Render()
         return displays
-- 
GitLab


From 4c0e5d7862e5a8755a6188e0a49b0cccb1486220 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 12 Feb 2016 08:11:11 -0800
Subject: [PATCH 009/196] Made the axis be labeled correctly

---
 Packages/vcsaddons/Lib/histograms.py | 20 +++++++++++++++-----
 1 file changed, 15 insertions(+), 5 deletions(-)

diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 3c2b4a033..60f866492 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -51,6 +51,16 @@ class Ghg(VCSaddon):
             template = x.gettemplate(template)
         elif not vcs.istemplate(template):
             raise ValueError("Error did not know what to do with template: %s" % template)
+        try:
+            data_name = data.title
+        except AttributeError:
+            try:
+                data_name = data.long_name
+            except AttributeError:
+                try:
+                    data_name = data.id + data.units
+                except AttributeError:
+                    data_name = data.id
 
         # We'll just flatten the data... if they want to be more precise, should pass in more precise data
         data = data.flatten().asma()
@@ -181,12 +191,13 @@ class Ghg(VCSaddon):
         line.color = lc
         displays = []
 
-        x_axis = cdms2.createAxis(self.bins, id="x")
-        y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y")
+        x_axis = cdms2.createAxis(self.bins, id=data_name)
+        y_axis = cdms2.createAxis(vcs.mkscale(0, ymx), id="bin_size")
 
         displays.append(x.plot(fill, bg=bg, render=False))
-
-        dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis)
+        arr = MV2.masked_array(y_values)
+        arr.setAxis(0, x_axis)
+        dsp = template.plot(x, arr, self, bg=bg, X=x_axis, Y=y_axis)
         for d in dsp:
             if d is not None:
                 displays.append(d)
@@ -196,7 +207,6 @@ class Ghg(VCSaddon):
             if d is not None:
                 displays.append(d)
 
-
         displays.append(x.plot(line, bg=bg))
 
         x.worldcoordinate = fill.worldcoordinate
-- 
GitLab


From 1bb3806f83652635c3f8e44be937721ae1c22463 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 25 Mar 2016 15:26:31 -0700
Subject: [PATCH 010/196] Fixes #1894

---
 Packages/vcs/Lib/vcsvtk/boxfillpipeline.py |  4 ++--
 Packages/vcs/Lib/vcsvtk/isofillpipeline.py | 10 +++++-----
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
index 64ad4f00c..54d9b8a60 100644
--- a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
@@ -52,7 +52,7 @@ class BoxfillPipeline(Pipeline2D):
         if numpy.allclose(self._gm.level_1, 1.e20) or \
            numpy.allclose(self._gm.level_2, 1.e20):
             self._contourLevels = vcs.mkscale(self._scalarRange[0],
-                                              self._scalarRange[1])
+                                              self._scalarRange[1], nc=min(nlev, 12))
             if len(self._contourLevels) == 1:  # constant value ?
                 self._contourLevels = [self._contourLevels[0],
                                        self._contourLevels[0] + .00001]
@@ -282,7 +282,7 @@ class BoxfillPipeline(Pipeline2D):
 
         # Colortable bit
         # make sure length match
-        numLevels = len(self._contourLevels)
+        numLevels = len(self._contourLevels) - 1
         while len(self._contourColors) < numLevels:
             self._contourColors.append(self._contourColors[-1])
 
diff --git a/Packages/vcs/Lib/vcsvtk/isofillpipeline.py b/Packages/vcs/Lib/vcsvtk/isofillpipeline.py
index 59b31272a..28c7ee469 100644
--- a/Packages/vcs/Lib/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/Lib/vcsvtk/isofillpipeline.py
@@ -46,11 +46,11 @@ class IsofillPipeline(Pipeline2D):
     def _plotInternal(self):
         """Overrides baseclass implementation."""
 
-        prepedContours = self._prepContours()
-        tmpLevels = prepedContours["tmpLevels"]
-        tmpIndices = prepedContours["tmpIndices"]
-        tmpColors = prepedContours["tmpColors"]
-        tmpOpacities = prepedContours["tmpOpacities"]
+        preppedCountours = self._prepContours()
+        tmpLevels = preppedCountours["tmpLevels"]
+        tmpIndices = preppedCountours["tmpIndices"]
+        tmpColors = preppedCountours["tmpColors"]
+        tmpOpacities = preppedCountours["tmpOpacities"]
         style = self._gm.fillareastyle
 
         luts = []
-- 
GitLab


From eb791bcfb6cfb01bb44f83a916ecd0da659fa862 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 5 Apr 2016 09:37:12 -0700
Subject: [PATCH 011/196] fix #1907

---
 CMake/cdat_modules/docutils_external.cmake               | 2 +-
 CMake/cdat_modules/gdal_external.cmake                   | 4 +++-
 CMake/cdat_modules/spyder_external.cmake                 | 2 +-
 CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in | 1 +
 CMake/cmake_modules/pipinstaller.cmake                   | 2 ++
 5 files changed, 8 insertions(+), 3 deletions(-)

diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake
index ae62d3b59..8e419bd00 100644
--- a/CMake/cdat_modules/docutils_external.cmake
+++ b/CMake/cdat_modules/docutils_external.cmake
@@ -2,5 +2,5 @@
 # and configure and build it
 
 set(nm docutils)
-
+set(OLD "")
 include(pipinstaller)
diff --git a/CMake/cdat_modules/gdal_external.cmake b/CMake/cdat_modules/gdal_external.cmake
index 6efa6ae11..33e4c8e60 100644
--- a/CMake/cdat_modules/gdal_external.cmake
+++ b/CMake/cdat_modules/gdal_external.cmake
@@ -17,7 +17,9 @@ ExternalProject_Add(gdal
   URL_MD5 ${GDAL_MD5}
   BUILD_IN_SOURCE 1
   PATCH_COMMAND ""
-  CONFIGURE_COMMAND sh configure --prefix=${cdat_EXTERNALS} --with-hdf5=${cdat_EXTERNALS} --with-netcdf=${cdat_EXTERNALS} --with-curl=${cdat_EXTERNALS} --with-geos=${cdat_EXTERNALS}/bin/geos-config --with-python=${PYTHON_EXECUTABLE} --with-jpeg=no --with-libtiff=internal --without-jpeg12 --with-geotiff=internal
+  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${gdal_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
+  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
+  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
   DEPENDS "${gdal_deps}"
   ${ep_log_options}
 )
diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake
index 2f1b6631b..ef88a8338 100644
--- a/CMake/cdat_modules/spyder_external.cmake
+++ b/CMake/cdat_modules/spyder_external.cmake
@@ -2,6 +2,6 @@
 # and configure and build it
 
 set (nm spyder)
-
+set(OLD "")
 include(pipinstaller)
 
diff --git a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
index 826b0e209..b8dd0172d 100644
--- a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
+++ b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
@@ -4,6 +4,7 @@ set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
 include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
 
 set(ENV{CC} mpicc)
+set(ENV{CXX} mpicxx)
 
 message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}")
 message("LD_ARGS IS $ENV{@LIBRARY_PATH@}")
diff --git a/CMake/cmake_modules/pipinstaller.cmake b/CMake/cmake_modules/pipinstaller.cmake
index 09b797908..1060a0ca1 100644
--- a/CMake/cmake_modules/pipinstaller.cmake
+++ b/CMake/cmake_modules/pipinstaller.cmake
@@ -49,3 +49,5 @@ unset(GIT_CMD_STR )
 unset(USR_ENVS)
 unset(USER_BUILD_EXT_OPTS)
 unset(USER_INSTALL_OPTIONS)
+unset(OLDSTR)
+unset(OLD)
-- 
GitLab


From 89a2ec9489a6f70ad5c9ef5ad6ddd0c0cede44ed Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 6 Apr 2016 10:59:41 -0700
Subject: [PATCH 012/196] Made change to make labels better match levels, added
 precision-based rounding to labels

---
 Packages/vcs/Lib/utils.py                    | 51 ++++++++++++++------
 Packages/vcs/Lib/vcsvtk/boxfillpipeline.py   | 12 +++--
 testing/vcs/CMakeLists.txt                   |  6 ++-
 testing/vcs/test_fewer_colors_than_levels.py | 29 +++++++++++
 4 files changed, 76 insertions(+), 22 deletions(-)
 create mode 100644 testing/vcs/test_fewer_colors_than_levels.py

diff --git a/Packages/vcs/Lib/utils.py b/Packages/vcs/Lib/utils.py
index 2323200ae..91c2acc2d 100644
--- a/Packages/vcs/Lib/utils.py
+++ b/Packages/vcs/Lib/utils.py
@@ -937,7 +937,24 @@ def __split2contiguous(levels):
     return tmplevs
 
 
-def mklabels(vals, output='dict'):
+def guess_precision(num):
+    """
+    Rough guesstimate of the precision of a number. Don't use this for actual science.
+    """
+    if num == 0:
+        return 1
+    idigleft = int(float(numpy.ma.floor(numpy.ma.log10(num)))) + 1
+    aa = numpy.ma.power(10., -idigleft)
+
+    while abs(round(aa * num) - aa * num) > .000001:
+        aa = aa * 10.
+
+    total_digits = numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft)))
+
+    return max(total_digits, idigleft)
+
+
+def mklabels(vals, output='dict', precision=None):
     '''
     Function : mklabels
 
@@ -957,6 +974,11 @@ def mklabels(vals, output='dict'):
     {2.0000000000000002e-05: '2E-5', 5.0000000000000002e-05: '5E-5'}
     >>> vcs.mklabels ( [.00002,.00005],output='list')
     ['2E-5', '5E-5']
+    >>> a = vcs.mkevenlevels(0, 100, nlev=9)
+    >>> vcs.mklabels(a)
+    {0.0: '  0.000000000000000', 11.11111111111111: ' 11.111111111111111', 22.22222222222222: ' 22.222222222222221', 33.33333333333333: ' 33.333333333333329', 44.44444444444444: ' 44.444444444444443', 55.55555555555556: ' 55.555555555555557', 66.66666666666666: ' 66.666666666666657', 77.77777777777777: ' 77.777777777777771', 88.88888888888889: ' 88.888888888888886', 100.0: '100.000000000000000'}
+    >>> vcs.mklabels(a, precision=4)
+    {0.0: '  0.0', 11.11111111111111: ' 11.1', 22.22222222222222: ' 22.2', 33.33333333333333: ' 33.3', 44.44444444444444: ' 44.4', 55.55555555555556: ' 55.6', 66.66666666666666: ' 66.7', 77.77777777777777: ' 77.8', 88.88888888888889: ' 88.9', 100.0: '100.0'}
     '''
     import string
     import numpy.ma
@@ -995,21 +1017,18 @@ def mklabels(vals, output='dict'):
         amax = float(numpy.ma.maximum(vals))
     #  Number of digit on the left of decimal point
     idigleft = int(numpy.ma.floor(numpy.ma.log10(amax))) + 1
-    # Now determine the number of significant figures
-    idig = 0
-    for i in range(nvals):
-        aa = numpy.ma.power(10., -idigleft)
-        while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
-            aa = aa * 10.
-        idig = numpy.ma.maximum(
-            idig,
-            numpy.ma.floor(
-                numpy.ma.log10(
-                    aa *
-                    numpy.ma.power(
-                        10.,
-                        idigleft))))
-    idig = int(idig)
+
+    if precision is None:
+        # Now determine the number of significant figures
+        idig = 0
+        for i in range(nvals):
+            aa = numpy.ma.power(10., -idigleft)
+            while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
+                aa = aa * 10.
+            idig = numpy.ma.maximum(idig, numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft))))
+        idig = int(idig)
+    else:
+        idig = int(precision)
     # Now does the writing part
     lbls = []
     # First if we need an E format
diff --git a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
index 54d9b8a60..98f61794e 100644
--- a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
@@ -51,12 +51,13 @@ class BoxfillPipeline(Pipeline2D):
         nlev = (self._gm.color_2 - self._gm.color_1) + 1
         if numpy.allclose(self._gm.level_1, 1.e20) or \
            numpy.allclose(self._gm.level_2, 1.e20):
-            self._contourLevels = vcs.mkscale(self._scalarRange[0],
-                                              self._scalarRange[1], nc=min(nlev, 12))
+            self._contourLevels = vcs.mkevenlevels(self._scalarRange[0],
+                                              self._scalarRange[1], nlev=min(nlev, 12))
             if len(self._contourLevels) == 1:  # constant value ?
-                self._contourLevels = [self._contourLevels[0],
-                                       self._contourLevels[0] + .00001]
-            self._contourLabels = vcs.mklabels(self._contourLevels)
+                self._contourLevels = [self._contourLevels[0], self._contourLevels[0] + .00001]
+
+            max_precision = max(vcs.guess_precision(self._contourLevels[0]), vcs.guess_precision(self._contourLevels[-1]))
+            self._contourLabels = vcs.mklabels(self._contourLevels, precision=max_precision)
             dx = (self._contourLevels[-1] - self._contourLevels[0]) / nlev
             self._contourLevels = numpy.arange(self._contourLevels[0],
                                                self._contourLevels[-1] + dx,
@@ -282,6 +283,7 @@ class BoxfillPipeline(Pipeline2D):
 
         # Colortable bit
         # make sure length match
+        print self._contourLevels
         numLevels = len(self._contourLevels) - 1
         while len(self._contourColors) < numLevels:
             self._contourColors.append(self._contourColors[-1])
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 69df44d3e..2bb2cde28 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -928,7 +928,11 @@ cdat_add_test(vcs_test_configurator_resize
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_configurator_resize.py
   ${BASELINE_DIR}/test_vcs_configurator_resize.png
 )
-
+cdat_add_test(vcs_test_fewer_colors_than_levels
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_fewer_colors_than_levels.py
+  ${BASELINE_DIR}/test_fewer_colors_than_levels.png
+)
 cdat_add_test(vcs_test_colorpicker_appearance
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colorpicker_appearance.py
diff --git a/testing/vcs/test_fewer_colors_than_levels.py b/testing/vcs/test_fewer_colors_than_levels.py
new file mode 100644
index 000000000..2779d4b6d
--- /dev/null
+++ b/testing/vcs/test_fewer_colors_than_levels.py
@@ -0,0 +1,29 @@
+import vcs,cdms2,sys,os
+
+baselineImage = sys.argv[1]
+
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+
+dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = dataset("clt")
+
+canvas = vcs.init()
+canvas.setantialiasing(0)
+canvas.setbgoutputdimensions(1200, 1091, units="pixels")
+canvas.drawlogooff()
+
+boxfill = canvas.createboxfill()
+
+boxfill.color_1 = 242
+boxfill.color_2 = 250
+
+canvas.plot(data, boxfill, bg=1)
+
+testImage = os.path.abspath("test_fewer_colors_than_levels.png")
+canvas.png(testImage)
+
+ret = checkimage.check_result_image(testImage, baselineImage, checkimage.defaultThreshold)
+
+sys.exit(ret)
-- 
GitLab


From a75310d17ac964828703dcf63635b9812c5bb75d Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 6 Apr 2016 11:34:34 -0700
Subject: [PATCH 013/196] Update spyder_external.cmake

---
 CMake/cdat_modules/spyder_external.cmake | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake
index ef88a8338..dede73c00 100644
--- a/CMake/cdat_modules/spyder_external.cmake
+++ b/CMake/cdat_modules/spyder_external.cmake
@@ -2,6 +2,6 @@
 # and configure and build it
 
 set (nm spyder)
-set(OLD "")
+set(OLD "OFF")
 include(pipinstaller)
 
-- 
GitLab


From ad2fe28dafab39e92273010beeeb0766c73337d2 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 6 Apr 2016 11:34:58 -0700
Subject: [PATCH 014/196] Update docutils_external.cmake

---
 CMake/cdat_modules/docutils_external.cmake | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake
index 8e419bd00..36bdaedb1 100644
--- a/CMake/cdat_modules/docutils_external.cmake
+++ b/CMake/cdat_modules/docutils_external.cmake
@@ -2,5 +2,5 @@
 # and configure and build it
 
 set(nm docutils)
-set(OLD "")
+set(OLD "OFF")
 include(pipinstaller)
-- 
GitLab


From 6c7b637eff5f87bba2544f332642c4de30cdafd9 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 12 Apr 2016 15:13:32 -0700
Subject: [PATCH 015/196] Added documentation of plot args, deprecated some old
 ones, removed broken ones

---
 Packages/vcs/vcs/Canvas.py | 51 ++++++++++++++++----------------------
 1 file changed, 21 insertions(+), 30 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 1032dba3c..25f8e731d 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -2264,13 +2264,13 @@ Options:::
     # Set alias for the secondary drawtextcombined.
     drawtext = drawtextcombined
 
-    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'yaxis', 'xrev', 'yrev', 'continents', 'xarray', 'yarray',
-                       'name', 'time', 'units', 'ymd', 'file_comment',
-                       'xbounds', 'ybounds', 'xname', 'yname', 'xunits', 'yunits', 'xweights', 'yweights',
-                       'comment1', 'comment2', 'comment3', 'comment4', 'hms', 'long_name', 'zaxis',
-                       'zarray', 'zname', 'zunits', 'taxis', 'tarray', 'tname', 'tunits', 'waxis', 'warray',
-                       'wname', 'wunits', 'bg', 'ratio', 'donotstoredisplay', 'render', 'continents_line', "display_name"]
+    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'xarray',  'xrev', 'yaxis', 'yarray', 'yrev', 'continents',
+                       'xbounds', 'ybounds', 'zaxis', 'zarray', 'taxis', 'tarray', 'waxis', 'warray', 'bg', 'ratio',
+                       'donotstoredisplay', 'render', 'continents_line', "display_name"]
 
+    _deprecated_plot_keywords_ = ["time", "units", "file_comment", "xname", "yname", "zname", "tname", "wname",
+                                  "xunits", "yunits", "zunits", "tunits", "wunits", "comment1", "comment2", "comment3",
+                                  "comment4", "long_name"]
     # def replot(self):
     #    """ Clears and plots with last used plot arguments
     #    """
@@ -2317,28 +2317,10 @@ Options:::
             '3',3: y dim will be 3 times bigger than x dim (restricted to original tempalte.data area
             Adding a 't' at the end of the ratio, makes the tickmarks and boxes move along.
 
-    Variable attribute keys:
-       comment1         = string   #Comment plotted above file_comment
-       comment2         = string   #Comment plotted above comment1
-       comment3         = string   #Comment plotted above comment2
-       comment4         = string   #Comment plotted above comment4
-       file_comment     = string   #Comment (defaults to file.comment)
-       hms              = string (hh:mm:ss) #Hour, minute, second
-       long_name        = string   #Descriptive variable name
-       name             = string   #Variable name (defaults to var.id)
-       time             = cdtime   #instance (relative or absolute),
-                                    cdtime, reltime or abstime value
-       units            = string   #Variable units
-       ymd              = string (yy/mm/dd) #Year, month, day
-
     Dimension attribute keys (dimension length=n):
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y]bounds       = NumPy array of shape (n,2) # x or y Dimension boundaries
-       [x|y|z|t|w]name   = string                     # x or y Dimension name
-       [x|y|z|t|w]units  = string                     # x or y Dimension units
-       [x|y]weights      = NumPy array of length n    # x or y Dimension weights (used to
-                                                        calculate area-weighted mean)
 
     CDMS object:
        [x|y|z|t|w]axis   = CDMS axis object           # x or y Axis
@@ -2365,7 +2347,13 @@ Options:::
                                                       # Values 6 through 11 signify the line type
                                                       # defined by the files data_continent_other7
                                                       # through data_continent_other12.
-
+        continents_line = vcs.getline("default")    # VCS line object to define continent appearance
+        donotstoredisplay = True|False              # Whether the displayplot object generated by this plot are stored
+        render = True|False                         # Whether to actually render the plot or not (useful for doing a
+                                                    # bunch of plots in a row)
+        display_name = "__display_123"                # VCS Display plot name (used to prevent duplicate display plots)
+        ratio = 1.5|"autot"|"auto"                   # Ratio of height/width for the plot; autot and auto will choose a
+                                                    # "good" ratio for you.
     Graphics Output in Background Mode:
        bg                 = 0|1   # if ==1, create images in the background
                                                              (Don't display the VCS Canvas)
@@ -3526,11 +3514,14 @@ Options:::
                     doratio == "0" or doratio[:4] == "auto"):
                 doratio = "1t"
             for keyarg in keyargs.keys():
-                if keyarg not in self.__class__._plot_keywords_ + \
-                        self.backend._plot_keywords:
-                    warnings.warn(
-                        'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
-                        (keyarg, self.backend.type))
+                if keyarg not in self.__class__._plot_keywords_ + self.backend._plot_keywords:
+                    if keyarg in self.__class__._deprecated_plot_keywords_:
+                        warnings.warn("Deprecation Warning: Keyword '%s' will be removed in the next version"
+                                      "of UV-CDAT." % keyarg)
+                    else:
+                        warnings.warn(
+                            'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
+                            (keyarg, self.backend.type))
 
             if arglist[0] is not None or 'variable' in keyargs:
                 arglist[0] = self._reconstruct_tv(arglist, keyargs)
-- 
GitLab


From a82437bfa57add72c02fce63ad3c3224c67d7e3a Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 12 Apr 2016 15:15:40 -0700
Subject: [PATCH 016/196] Added documentation to clear's preserve_display arg

---
 Packages/vcs/vcs/Canvas.py   | 3 +++
 Packages/vcs/vcs/VTKPlots.py | 2 +-
 2 files changed, 4 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 25f8e731d..bd8c82310 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -3891,6 +3891,9 @@ Options:::
     a.plot(array,'default','isofill','quick')
     a.clear()
 
+  Internally, update() calls clear() to assist in removing plots. The preserve_display argument is used to
+  make sure that the display plots that are associated with the current canvas are not eliminated, and
+  are still able to be used to regenerate the plots.
 """
         if self.animate.created():
             self.animate.close()
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index d4a8241fa..4d870cd15 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -306,7 +306,7 @@ class VTKVCSBackend(object):
             key["continents_line"] = d.continents_line
             key_args.append(key)
 
-        # Have to pull out the UI layer so it doesn't get borked by the clear
+        # Have to pull out the UI layer so it doesn't get borked by the z
         self.hideGUI()
 
         if self.canvas.configurator is not None:
-- 
GitLab


From 88d77458579cf71437f4808b26547d1aa6dbda03 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 15 Apr 2016 11:37:57 -0700
Subject: [PATCH 017/196] Fix bug in template's drawColorBox function that
 could prevent some labels from being drawn

---
 Packages/vcs/Lib/template.py | 35 ++++++++++++++++++-----------------
 1 file changed, 18 insertions(+), 17 deletions(-)

diff --git a/Packages/vcs/Lib/template.py b/Packages/vcs/Lib/template.py
index 7d5869eee..2fd255525 100644
--- a/Packages/vcs/Lib/template.py
+++ b/Packages/vcs/Lib/template.py
@@ -1844,7 +1844,7 @@ class P(object):
                    startlong])
         # Now make sure we have a legend
         if isinstance(levels[0], list):
-            # Ok these are nono contiguous levels, we will use legend only if
+            # Ok these are non-contiguous levels, we will use legend only if
             # it's a perfect match
             for i, l in enumerate(levels):
                 lt = l[0]
@@ -1869,29 +1869,30 @@ class P(object):
         else:
             if legend is None:
                 legend = vcs.mklabels(levels)
+            float_epsilon = numpy.finfo(float).eps
             if levels[0] < levels[1]:
-                ecompfunc = numpy.less_equal
-                compfunc = numpy.less
+                ecompfunc = lambda x, y: float_epsilon > x - y
+                compfunc = lambda x, y: -float_epsilon > x - y
             else:
-                ecompfunc = numpy.greater_equal
-                compfunc = numpy.greater
+                ecompfunc = lambda x, y: -float_epsilon < x - y
+                compfunc = lambda x, y: float_epsilon < x - y
             dlong = dD / (len(levels) - 1)
+
             for l in legend.keys():
+                # if legend key is between levels[0] and levels[-1]
                 if not compfunc(l, levels[0]) and not compfunc(levels[-1], l):
                     for i in range(len(levels) - 1):
-                        if ecompfunc(levels[i], l) and ecompfunc(
-                                l, levels[i + 1]):
-                            # Ok we're between 2 levels, let's add the legend
-                            # first let's figure out where to put it
-                            loc = i * dlong  # position at beginnig of level
-                            # Adds the distance from beginnig of level box
-                            loc += (l - levels[i]) / \
-                                (levels[i + 1] - levels[i]) * dlong
-                            loc += startlong  # Figures out the begining
-    # loc=((l-levels[0])/(levels[-1]-levels[0]))*dD+startlong
-                            Ll.append([loc, loc])
+                        # if legend key is (inclusive) between levels[i] and levels[i+1]
+                        if ecompfunc(levels[i], l) and ecompfunc(l, levels[i + 1]):
+                            # first let's figure out where to put the legend label
+                            location = i * dlong  # position at beginning of level
+                            # Adds the distance from beginning of level box
+                            location += (l - levels[i]) / (levels[i + 1] - levels[i]) * dlong
+                            location += startlong  # Figures out the beginning
+
+                            Ll.append([location, location])
                             Sl.append([startshrt, startshrt + dshrt])
-                            Lt.append(loc)
+                            Lt.append(location)
                             St.append(startshrt + dshrt * 1.4)
                             Tt.append(legend[l])
                             break
-- 
GitLab


From 5ab070a1b4b68eb7ccd94808638b1bfd7ed50784 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 15 Apr 2016 11:38:36 -0700
Subject: [PATCH 018/196] Refactored legend/label logic to be in boxfill, fixed
 label generation, eliminated "guess_precision"

---
 Packages/vcs/Lib/boxfill.py                | 44 ++++++++++++++++++++
 Packages/vcs/Lib/utils.py                  | 44 +++++---------------
 Packages/vcs/Lib/vcsvtk/boxfillpipeline.py | 47 +---------------------
 3 files changed, 56 insertions(+), 79 deletions(-)

diff --git a/Packages/vcs/Lib/boxfill.py b/Packages/vcs/Lib/boxfill.py
index 231b22839..76de13e04 100755
--- a/Packages/vcs/Lib/boxfill.py
+++ b/Packages/vcs/Lib/boxfill.py
@@ -24,6 +24,7 @@ import vcs
 import cdtime
 import VCS_validation_functions
 import xmldocs
+import numpy
 import warnings
 
 
@@ -734,6 +735,49 @@ class Gfb(object):
         self.yaxisconvert = yat
     xyscale.__doc__ = xmldocs.xyscaledoc
 
+    def getlevels(self, varmin, varmax):
+        if self.boxfill_type == "custom":
+            return self.levels
+
+        if numpy.allclose(self.level_1, 1.e20) or \
+           numpy.allclose(self.level_2, 1.e20):
+            low_end = varmin
+            high_end = varmax
+        else:
+            low_end = self.level_1
+            high_end = self.level_2
+
+        if self.boxfill_type == "log10":
+            low_end = numpy.ma.log10(low_end)
+            high_end = numpy.ma.log10(high_end)
+
+        nlev = float(self.color_2 - self.color_1 + 1)
+        scale = vcs.mkscale(low_end, high_end)
+
+        low_end = scale[0]
+        high_end = scale[-1]
+
+        dx = (high_end - low_end) / nlev
+
+        contourLevels = numpy.arange(low_end, high_end + dx / 2., dx)
+        return contourLevels
+
+    def getlegendlabels(self, levels):
+        if self.legend:
+            return self.legend
+        nlev = self.color_2 - self.color_1 + 1
+        if nlev >= 12:
+            label_scale = vcs.mkscale(levels[0], levels[-1])
+            return vcs.mklabels(label_scale)
+        else:
+            # Need to line up the levels and the labels, so we'll massage the label positions
+            dx = levels[1] - levels[0]
+            # Determine what decimal place we should round the label to to "see a difference"
+            round_pos = -1 * int(numpy.ma.log10(dx))
+            round_values = [numpy.round(l, round_pos) for l in levels]
+            round_labels = vcs.mklabels(round_values, "list")
+            return {lev: label for lev, label in zip(levels, round_labels)}
+
     ###########################################################################
     #                                                                         #
     # List out boxfill graphics method members (attributes).                  #
diff --git a/Packages/vcs/Lib/utils.py b/Packages/vcs/Lib/utils.py
index 91c2acc2d..5550ae771 100644
--- a/Packages/vcs/Lib/utils.py
+++ b/Packages/vcs/Lib/utils.py
@@ -937,24 +937,7 @@ def __split2contiguous(levels):
     return tmplevs
 
 
-def guess_precision(num):
-    """
-    Rough guesstimate of the precision of a number. Don't use this for actual science.
-    """
-    if num == 0:
-        return 1
-    idigleft = int(float(numpy.ma.floor(numpy.ma.log10(num)))) + 1
-    aa = numpy.ma.power(10., -idigleft)
-
-    while abs(round(aa * num) - aa * num) > .000001:
-        aa = aa * 10.
-
-    total_digits = numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft)))
-
-    return max(total_digits, idigleft)
-
-
-def mklabels(vals, output='dict', precision=None):
+def mklabels(vals, output='dict'):
     '''
     Function : mklabels
 
@@ -974,11 +957,6 @@ def mklabels(vals, output='dict', precision=None):
     {2.0000000000000002e-05: '2E-5', 5.0000000000000002e-05: '5E-5'}
     >>> vcs.mklabels ( [.00002,.00005],output='list')
     ['2E-5', '5E-5']
-    >>> a = vcs.mkevenlevels(0, 100, nlev=9)
-    >>> vcs.mklabels(a)
-    {0.0: '  0.000000000000000', 11.11111111111111: ' 11.111111111111111', 22.22222222222222: ' 22.222222222222221', 33.33333333333333: ' 33.333333333333329', 44.44444444444444: ' 44.444444444444443', 55.55555555555556: ' 55.555555555555557', 66.66666666666666: ' 66.666666666666657', 77.77777777777777: ' 77.777777777777771', 88.88888888888889: ' 88.888888888888886', 100.0: '100.000000000000000'}
-    >>> vcs.mklabels(a, precision=4)
-    {0.0: '  0.0', 11.11111111111111: ' 11.1', 22.22222222222222: ' 22.2', 33.33333333333333: ' 33.3', 44.44444444444444: ' 44.4', 55.55555555555556: ' 55.6', 66.66666666666666: ' 66.7', 77.77777777777777: ' 77.8', 88.88888888888889: ' 88.9', 100.0: '100.0'}
     '''
     import string
     import numpy.ma
@@ -1018,17 +996,15 @@ def mklabels(vals, output='dict', precision=None):
     #  Number of digit on the left of decimal point
     idigleft = int(numpy.ma.floor(numpy.ma.log10(amax))) + 1
 
-    if precision is None:
-        # Now determine the number of significant figures
-        idig = 0
-        for i in range(nvals):
-            aa = numpy.ma.power(10., -idigleft)
-            while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
-                aa = aa * 10.
-            idig = numpy.ma.maximum(idig, numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft))))
-        idig = int(idig)
-    else:
-        idig = int(precision)
+    # Now determine the number of significant figures
+    idig = 0
+    for i in range(nvals):
+        aa = numpy.ma.power(10., -idigleft)
+        while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
+            aa = aa * 10.
+        idig = numpy.ma.maximum(idig, numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft))))
+    idig = int(idig)
+
     # Now does the writing part
     lbls = []
     # First if we need an E format
diff --git a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
index 98f61794e..a27c724cc 100644
--- a/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/Lib/vcsvtk/boxfillpipeline.py
@@ -47,50 +47,8 @@ class BoxfillPipeline(Pipeline2D):
 
     def _updateContourLevelsAndColorsForBoxfill(self):
         """Set contour information for a standard boxfill."""
-        # Compute levels
-        nlev = (self._gm.color_2 - self._gm.color_1) + 1
-        if numpy.allclose(self._gm.level_1, 1.e20) or \
-           numpy.allclose(self._gm.level_2, 1.e20):
-            self._contourLevels = vcs.mkevenlevels(self._scalarRange[0],
-                                              self._scalarRange[1], nlev=min(nlev, 12))
-            if len(self._contourLevels) == 1:  # constant value ?
-                self._contourLevels = [self._contourLevels[0], self._contourLevels[0] + .00001]
-
-            max_precision = max(vcs.guess_precision(self._contourLevels[0]), vcs.guess_precision(self._contourLevels[-1]))
-            self._contourLabels = vcs.mklabels(self._contourLevels, precision=max_precision)
-            dx = (self._contourLevels[-1] - self._contourLevels[0]) / nlev
-            self._contourLevels = numpy.arange(self._contourLevels[0],
-                                               self._contourLevels[-1] + dx,
-                                               dx)
-        else:
-            if self._gm.boxfill_type == "log10":
-                levslbls = vcs.mkscale(numpy.ma.log10(self._gm.level_1),
-                                       numpy.ma.log10(self._gm.level_2))
-                self._contourLevels = vcs.mkevenlevels(
-                    numpy.ma.log10(self._gm.level_1),
-                    numpy.ma.log10(self._gm.level_2), nlev=nlev)
-            else:
-                levslbls = vcs.mkscale(self._gm.level_1, self._gm.level_2)
-                self._contourLevels = vcs.mkevenlevels(self._gm.level_1,
-                                                       self._gm.level_2,
-                                                       nlev=nlev)
-            if len(self._contourLevels) > 25:
-                # Too many colors/levels need to prettyfy this for legend
-                self._contourLabels = vcs.mklabels(levslbls)
-                # Make sure extremes are in
-                legd2 = vcs.mklabels([self._contourLevels[0],
-                                      self._contourLevels[-1]])
-                self._contourLabels.update(legd2)
-            else:
-                self._contourLabels = vcs.mklabels(self._contourLevels)
-            if self._gm.boxfill_type == "log10":
-                logLabels = {}
-                for key in self._contourLabels.keys():
-                    value = self._contourLabels[key]
-                    newKey = float(numpy.ma.log10(value))
-                    logLabels[newKey] = value
-                self._contourLabels = logLabels
-
+        self._contourLevels = self._gm.getlevels(self._scalarRange[0], self._scalarRange[1])
+        self._contourLabels = self._gm.getlegendlabels(self._contourLevels)
         # Use consecutive colors:
         self._contourColors = range(self._gm.color_1, self._gm.color_2 + 1)
 
@@ -283,7 +241,6 @@ class BoxfillPipeline(Pipeline2D):
 
         # Colortable bit
         # make sure length match
-        print self._contourLevels
         numLevels = len(self._contourLevels) - 1
         while len(self._contourColors) < numLevels:
             self._contourColors.append(self._contourColors[-1])
-- 
GitLab


From 602bba871669b09b6e4622ea18d35baafa62ce42 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 18 Apr 2016 15:00:09 -0700
Subject: [PATCH 019/196] Tweaks to get tests to pass

---
 Packages/vcs/vcs/boxfill.py  | 61 ++++++++++++++++++++++++++----------
 Packages/vcs/vcs/template.py | 18 ++++++-----
 2 files changed, 55 insertions(+), 24 deletions(-)

diff --git a/Packages/vcs/vcs/boxfill.py b/Packages/vcs/vcs/boxfill.py
index 76de13e04..e17145437 100755
--- a/Packages/vcs/vcs/boxfill.py
+++ b/Packages/vcs/vcs/boxfill.py
@@ -739,8 +739,12 @@ class Gfb(object):
         if self.boxfill_type == "custom":
             return self.levels
 
+        nlev = float(self.color_2 - self.color_1 + 1)
+        autolevels = False
+
         if numpy.allclose(self.level_1, 1.e20) or \
            numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
             low_end = varmin
             high_end = varmax
         else:
@@ -751,32 +755,57 @@ class Gfb(object):
             low_end = numpy.ma.log10(low_end)
             high_end = numpy.ma.log10(high_end)
 
-        nlev = float(self.color_2 - self.color_1 + 1)
-        scale = vcs.mkscale(low_end, high_end)
 
-        low_end = scale[0]
-        high_end = scale[-1]
+        if autolevels:
+            # Use nice values for the scale
+            scale = vcs.mkscale(low_end, high_end)
+            low_end = scale[0]
+            high_end = scale[-1]
 
         dx = (high_end - low_end) / nlev
 
-        contourLevels = numpy.arange(low_end, high_end + dx / 2., dx)
+        if dx == 0:
+            high_end += .00001
+            return [low_end, high_end]
+
+        contourLevels = numpy.arange(low_end, high_end + dx, dx)
+
         return contourLevels
 
     def getlegendlabels(self, levels):
         if self.legend:
             return self.legend
-        nlev = self.color_2 - self.color_1 + 1
-        if nlev >= 12:
-            label_scale = vcs.mkscale(levels[0], levels[-1])
-            return vcs.mklabels(label_scale)
+
+        if numpy.allclose(self.level_1, 1.e20) or \
+           numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
+        else:
+            autolevels = False
+
+        if len(levels) > 12:
+            scale = vcs.mkscale(levels[0], levels[-1])
+            if autolevels:
+                return vcs.mklabels(scale)
+            else:
+                # Create our own scale
+                dx = (self.level_2 - self.level_1) / float(len(scale))
+                real_values = [self.level_1, self.level_2]
+                levels = numpy.arange(levels[0], levels[-1] + dx, dx)
         else:
-            # Need to line up the levels and the labels, so we'll massage the label positions
-            dx = levels[1] - levels[0]
-            # Determine what decimal place we should round the label to to "see a difference"
-            round_pos = -1 * int(numpy.ma.log10(dx))
-            round_values = [numpy.round(l, round_pos) for l in levels]
-            round_labels = vcs.mklabels(round_values, "list")
-            return {lev: label for lev, label in zip(levels, round_labels)}
+            real_values = levels
+
+        # Need to line up the levels and the labels, so we'll massage the label positions
+        max_round = 0
+        for l in real_values:
+            round_pos = 0
+            while numpy.round(l, round_pos) != l:
+                round_pos += 1
+            max_round = max(max_round, round_pos)
+
+        round_values = [numpy.round(l, round_pos) for l in levels]
+        round_labels = vcs.mklabels(round_values, "list")
+
+        return {lev: label for lev, label in zip(levels, round_labels)}
 
     ###########################################################################
     #                                                                         #
diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py
index cb1c0217f..3eaac046b 100644
--- a/Packages/vcs/vcs/template.py
+++ b/Packages/vcs/vcs/template.py
@@ -1873,21 +1873,23 @@ class P(object):
         else:
             if legend is None:
                 legend = vcs.mklabels(levels)
-            float_epsilon = numpy.finfo(float).eps
+            # We'll use the less precise float epsilon since this is just for labels
+            float_epsilon = numpy.finfo(numpy.float32).eps
             if levels[0] < levels[1]:
-                ecompfunc = lambda x, y: float_epsilon > x - y
-                compfunc = lambda x, y: -float_epsilon > x - y
+                # <=
+                comparison = lambda a, b: float_epsilon > a - b
             else:
-                ecompfunc = lambda x, y: -float_epsilon < x - y
-                compfunc = lambda x, y: float_epsilon < x - y
+                # >=
+                comparison = lambda a, b: -float_epsilon < a - b
+
+            in_bounds = lambda x: comparison(levels[0], x) and comparison(x, levels[-1])
             dlong = dD / (len(levels) - 1)
 
             for l in legend.keys():
-                # if legend key is between levels[0] and levels[-1]
-                if not compfunc(l, levels[0]) and not compfunc(levels[-1], l):
+                if in_bounds(l):
                     for i in range(len(levels) - 1):
                         # if legend key is (inclusive) between levels[i] and levels[i+1]
-                        if ecompfunc(levels[i], l) and ecompfunc(l, levels[i + 1]):
+                        if comparison(levels[i], l) and comparison(l, levels[i + 1]):
                             # first let's figure out where to put the legend label
                             location = i * dlong  # position at beginning of level
                             # Adds the distance from beginning of level box
-- 
GitLab


From de21dafb8092fb258f0053edd9b732e806ff213a Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 19 Apr 2016 13:20:50 -0700
Subject: [PATCH 020/196] readme edits

---
 README.md | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/README.md b/README.md
index f79ed08fb..f1747bb98 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,9 @@
 uvcdat
 ======
 [![build status](https://travis-ci.org/UV-CDAT/uvcdat.svg?branch=master)](https://travis-ci.org/UV-CDAT/uvcdat/builds)
-[![stable version](http://img.shields.io/badge/stable version-2.4.0-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.0)
+[![stable version](http://img.shields.io/badge/stable version-2.4.1-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.1)
 ![platforms](http://img.shields.io/badge/platforms-linux | osx-lightgrey.svg)
-[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.45136.svg)](http://dx.doi.org/10.5281/zenodo.45136)
+[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.50101.svg)](http://dx.doi.org/10.5281/zenodo.50101)
 
 Developed by partnering with ESGF and the community to create a larger problem-solving environment, UV-CDAT is an open source, easy-to-use application that links together disparate software subsystems and packages to form an integrated environment for analysis and visualization. This project seeks to advance climate science by fulfilling computational and diagnostic/visualization capabilities needed for DOE's climate research.
 
@@ -13,4 +13,4 @@ Developing and Contributing
 ------
 We'd love to get contributions from you! Please take a look at the
 [Contribution Documents](CONTRIBUTING.md) to see how to get your changes merged
-in.
\ No newline at end of file
+in.
-- 
GitLab


From 585e4385fa2f517a580244b9cb3755b2ec8537ac Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Wed, 20 Apr 2016 16:31:34 -0400
Subject: [PATCH 021/196] Allow unicode strings for subsetting.

This is how they come from the web.
---
 Packages/cdms2/Lib/axis.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index df2778854..703c0e938 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -2250,7 +2250,7 @@ def axisMatches(axis, specification):
 
        3. an axis object; will match if it is the same object as axis.
     """   
-    if isinstance(specification, types.StringType):
+    if isinstance(specification, basestring):
         s = string.lower(specification)
         s = s.strip()
         while s[0] == '(':
-- 
GitLab


From aa680fd6f24450f4157978745372cd266df839b0 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 21 Apr 2016 22:45:35 -0700
Subject: [PATCH 022/196] Update README.md

Adding badges for Conda
---
 README.md | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/README.md b/README.md
index f1747bb98..5968858fe 100644
--- a/README.md
+++ b/README.md
@@ -5,6 +5,9 @@ uvcdat
 ![platforms](http://img.shields.io/badge/platforms-linux | osx-lightgrey.svg)
 [![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.50101.svg)](http://dx.doi.org/10.5281/zenodo.50101)
 
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/installer/conda.svg)](https://conda.anaconda.org/uvcdat)
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/downloads.svg)](https://anaconda.org/uvcdat/uvcdat)
+
 Developed by partnering with ESGF and the community to create a larger problem-solving environment, UV-CDAT is an open source, easy-to-use application that links together disparate software subsystems and packages to form an integrated environment for analysis and visualization. This project seeks to advance climate science by fulfilling computational and diagnostic/visualization capabilities needed for DOE's climate research.
 
 Led by Lawrence Livermore National Laboratory (LLNL), the consortium consists of four DOE laboratories (Los Alamos, Lawrence Berkeley, LLNL, and Oak Ridge), two universities (University of Utah and the Polytechnic Institute of New York University), NASA, and two private companies (Kitware and tech-X) that will develop reusable software and workflow analysis and visualization applications for large-scale DOE climate modeling and measurements archives. The UV-CDAT concept is simple and flexible enough to interchange parts to expand for future DOE activities.
-- 
GitLab


From 6e372a536414695f0ecf32c240374efb063d6d1e Mon Sep 17 00:00:00 2001
From: McEnerney <mcenerney1@llnl.gov>
Date: Mon, 25 Apr 2016 10:20:54 -0700
Subject: [PATCH 023/196] change diags test 01

---
 CMake/cdat_modules/uvcmetrics_pkg.cmake | 2 +-
 testing/metrics/diagtest01.py           | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake
index 7024a9580..2f8294042 100644
--- a/CMake/cdat_modules/uvcmetrics_pkg.cmake
+++ b/CMake/cdat_modules/uvcmetrics_pkg.cmake
@@ -6,7 +6,7 @@ set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip)
 #set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP})
 set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git )
 set(UVCMETRICS_MD5)
-set(UVCMETRICS_BRANCH uvcdat-2.4.1)
+set(UVCMETRICS_BRANCH master)
 
 if (NOT CDAT_BUILD_LEAN)
   add_cdat_package(UVCMETRICS "" "" ON)
diff --git a/testing/metrics/diagtest01.py b/testing/metrics/diagtest01.py
index b8cf415f0..09197a1e3 100755
--- a/testing/metrics/diagtest01.py
+++ b/testing/metrics/diagtest01.py
@@ -58,7 +58,7 @@ varid = 'T'
 vard = package.all_variables( filetable1, filetable2, sname )
 var = vard[varid]
 
-plot = sclass( [filetable1], [filetable2], varid, seasonid )
+plot = sclass( [filetable1], [filetable2], varid, seasonid, plotparms = { 'model':{}, 'obs':{}, 'diff':{} } )
 res = plot.compute()
 if res is not None:
     if res.__class__.__name__ is 'uvc_composite_plotspec':
-- 
GitLab


From 99d489b10def142ad93aa1959c31d797cfa46896 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 08:53:03 -0700
Subject: [PATCH 024/196] cdscan removed use of old modules string and types

---
 Packages/cdms2/Script/cdscan | 84 ++++++++++++++++++------------------
 1 file changed, 41 insertions(+), 43 deletions(-)

diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
index 59896b1b4..8914abbdd 100755
--- a/Packages/cdms2/Script/cdscan
+++ b/Packages/cdms2/Script/cdscan
@@ -7,11 +7,9 @@ from cdms2.grid import lookupArray
 from cdms2.axis import calendarToTag, tagToCalendar
 from cdms2.cdmsobj import CdFromObject,CdString,CdScalar,CdFloat,CdDouble,CdShort,CdInt,CdLong
 import numpy
-import string
 import cdtime
 import os.path
 import copy
-import types
 from cdms2 import cdmsNode
 import re
 
@@ -258,7 +256,7 @@ def timeindex(value, units, basetime, delta, calendar):
     where t = reltime(value, units)
     and delu is the time interval (delta, delunits) (e.g., 1 month).
     """
-    if string.find(units," as ")==-1:
+    if units.find(" as ")==-1:
         tval = cdtime.reltime(value, units)
     else:
         tval = cdtime.abstime(value, units)
@@ -426,7 +424,7 @@ def disambig(name, dict, num, comparator, value):
         uniqname = '%s_%d'%(name,num)
         if dict.has_key(uniqname) and comparator(value, dict[uniqname]):
             trial_name = uniqname
-            for letter in string.lowercase:
+            for letter in 'abcdefghijklmnopqrstuvwxyz':
                 uniqname = '%s_%s'%(trial_name,letter)
                 if not dict.has_key(uniqname) or not comparator(value, dict[uniqname]):
                     break
@@ -448,7 +446,7 @@ def comparedomains(domain1, domain2):
         item2 = domain2[i]
         if type(item1)!=type(item2):
             return 1
-        if type(item1)==types.StringType:
+        if isinstance(item1, basestring):
             return item1!=item2
         elif compareaxes(item1, item2):
             return 1
@@ -486,14 +484,14 @@ def validateAttrs(node):
             if reqtype==CdFromObject:
                 reqtype = parenttype
             if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
-                if reqtype in (CdFloat,CdDouble) and type(attval)!=types.FloatType:
+                if reqtype in (CdFloat,CdDouble) and not isinstance(attval, float):
                     try:
-                        attval = string.atof(attval)
+                        attval = float(attval)
                     except:
                         if verbose:
                             print >> sys.stderr,  "Warning: %s=%s should be a float, id=%s"%(attname,attval,node.id),
                         try:
-                            attval = string.atoi(attval)
+                            attval = int(attval)
                             attval = float(attval)
                             if verbose:
                                 print "(Recasting)"
@@ -507,14 +505,14 @@ def validateAttrs(node):
                             else:
                                 if verbose:
                                     print ""
-                elif reqtype in (CdShort,CdInt,CdLong) and type(attval)!=types.IntType:
+                elif reqtype in (CdShort,CdInt,CdLong) and not isinstance(attval, int):
                     try:
-                        attval = string.atoi(attval)
+                        attval = int(attval)
                     except:
                         if verbose:
                             print >> sys.stderr,  "Warning: %s=%s should be an integer, id=%s"%(attname,attval,node.id),
                         try:
-                            attval = string.atof(attval)
+                            attval = float(attval)
                             attval = int(attval)
                             if verbose:
                                 print "(Recasting)"
@@ -530,7 +528,7 @@ def cloneWithLatCheck(axis):
     global notrimlat
 
     axisvals = origvals = axis[:]
-    if axis.isLatitude() and hasattr(axis,"units") and string.lower(axis.units[0:6])=="degree":
+    if axis.isLatitude() and hasattr(axis,"units") and axis.units[0:6].lower()=="degree":
         if notrimlat==0:
             axisvals = numpy.maximum(-90.0, numpy.minimum(90.0,axisvals))
         if not numpy.ma.allclose(axisvals, origvals) and verbose:
@@ -563,9 +561,9 @@ def addAttrs(fobj, eattrs):
 def setNodeDict(node, dict):
     for key in dict.keys():
         value = dict[key]
-        if (isinstance(value, numpy.integer) or isinstance(value, types.IntType)):
+        if (isinstance(value, numpy.integer) or isinstance(value, int)):
             datatype = CdLong
-        elif (isinstance(value, numpy.floating) or isinstance(value, types.FloatType)):
+        elif (isinstance(value, numpy.floating) or isinstance(value, float)):
             datatype = CdDouble
         else:
             datatype = CdString
@@ -696,7 +694,7 @@ def main(argv):
         if flag=='-a':
             aliasMapFile = arg
         elif flag=='-c':
-            calenkey = string.lower(arg)
+            calenkey = arg.lower()
             calendar = calendarMap[calenkey]
             overrideCalendar = 1
         elif flag=='-d':
@@ -717,7 +715,7 @@ def main(argv):
         elif flag=='--exclude':
             if arg[0]=='-':
                 raise RuntimeError, "--exclude option requires an argument"
-            excludeList = string.split(arg,',')
+            excludeList = arg.split(',')
         elif flag=='--exclude-file':
             excludePattern = arg
         elif flag=='-f':
@@ -732,7 +730,7 @@ def main(argv):
             sys.exit(0)
         elif flag=='-i':
             splitOnTime = 1
-            referenceDelta = string.atof(arg)
+            referenceDelta = float(arg)
             timeIsLinear = 1
             timeIsVector = None
         elif flag=='--ignore-open-error':
@@ -740,7 +738,7 @@ def main(argv):
         elif flag=='--include':
             if arg[0]=='-':
                 raise RuntimeError, "--include option requires an argument"
-            includeList = string.split(arg,',')
+            includeList = arg.split(',')
         elif flag=='--include-file':
             includePattern = arg
         elif flag=='-j':
@@ -748,8 +746,8 @@ def main(argv):
             timeIsLinear = None
         elif flag=='-l':
             splitOnLevel = 1
-            levelstr = string.split(arg,',')
-            levellist = map(string.atof, levelstr)
+            levelstr = arg.split(',')
+            levellist = map(float, levelstr)
             levels = numpy.array(levellist)
             levels = numpy.sort(levels)
         elif flag=='-m':
@@ -771,19 +769,19 @@ def main(argv):
             timeid = arg
             args.append(('-e','%s.axis=T'%timeid)) # Add axis=T attribute
         elif flag=='--time-linear':
-            targlist = string.split(arg,',')
-            ttzero = string.atof(targlist[0])
-            tdelta = string.atof(targlist[1])
-            tunits = string.strip(targlist[2])
+            targlist = arg.split(',')
+            ttzero = float(targlist[0])
+            tdelta = float(targlist[1])
+            tunits = targlist[2].strip()
             if len(targlist)==4:
-                tcalendar = string.strip(targlist[3])
+                tcalendar = targlist[3].strip()
             else:
                 tcalendar = None
             overrideTimeLinear = [ttzero,tdelta,tunits,tcalendar]
         elif flag=='--var-locate':
             if varLocate is None:
                 varLocate = {}
-            vname, pattern = string.split(arg,',')
+            vname, pattern = arg.split(',')
             varLocate[vname]=pattern
         elif flag=='-x':
             writeToStdout = 0
@@ -806,7 +804,7 @@ def main(argv):
     # Ignore blank paths
     realargs = []
     for arg in lastargs:
-        sarg = string.strip(arg)
+        sarg = arg.strip()
         if len(sarg)>0:
             realargs.append(sarg)
     lastargs = realargs
@@ -816,7 +814,7 @@ def main(argv):
     dsetargs = []
     for arg in lastargs:
         base, suffix = os.path.splitext(arg)
-        if string.lower(suffix) in ['.xml','.cdml']:
+        if suffix.lower() in ['.xml','.cdml']:
             dsetargs.append(arg)
         else:
             fileargs.append(arg)
@@ -872,7 +870,7 @@ def main(argv):
         modelMap = {}
         modelDirs = []
         for line in mfile.readlines():
-            mdirec, model = string.split(line)
+            mdirec, model = line.split()
             modelMap[mdirec] = model
             modelDirs.append(mdirec)
         mfile.close()
@@ -882,10 +880,10 @@ def main(argv):
         aliasMap = {}
         for line in afile.readlines():
             if line[0] not in ["'",'"']: #"
-                varid, alias = string.split(line)
+                varid, alias = line.split()
             else:
-                dummy, varid, alias = string.split(line,line[0])
-                alias = string.strip(alias)
+                dummy, varid, alias = line.split(line[0])
+                alias = alias.strip()
             aliasMap[varid] = alias
         afile.close()
 
@@ -983,7 +981,7 @@ def main(argv):
     boundsmap = {}                      # boundsmap : varid => timebounds_id
     boundsdict = {}                     # Same as vardict for time bounds
     for path in fileargs:
-        path = string.strip(path)
+        path = path.strip()
 
         # Check if the path is included
         if includePattern is not None:
@@ -1021,7 +1019,7 @@ def main(argv):
                 if mo is not None:
                     suffixPattern = modelMap[direc]
                     def gensuffix(m, mo=mo):
-                        i = string.atoi(m.group(1))
+                        i = int(m.group(1))
                         return mo.group(i)
                     varsuffix = re.sub(r'\\g<(\d)>', gensuffix, suffixPattern)
                     break
@@ -1191,7 +1189,7 @@ def main(argv):
                     if verbose and not forecast:
                         print 'Setting reference time units to', referenceTime
                     if timeIsLinear is None and timeIsVector is None:
-                        timeIsLinear = (string.lower(string.split(referenceTime)[0]) in ['hour','hours','minute','minutes','second','seconds'])
+                        timeIsLinear = referenceTime.split()[0].lower() in ['hour','hours','minute','minutes','second','seconds']
                         if timeIsLinear and verbose:
                             print 'Setting time representation to "linear"' #'
                     if timeIsLinear and referenceDelta is None:
@@ -1301,7 +1299,7 @@ def main(argv):
             if prevcal is not None and calendar != prevcal:
                 sameCalendars = 0
             prevcal = calendar
-            if string.find(units," as ")==-1:
+            if units.find(" as ")==-1:
                 time0 = cdtime.reltime(values[0],units)
             else:
                 time0 = cdtime.abstime(values[0],units)
@@ -1319,7 +1317,7 @@ def main(argv):
             if prevcal is not None and calendar != prevcal:
                 sameCalendars = 0
             prevcal = calendar
-            if string.find(units," as ")==-1:
+            if units.find(" as ")==-1:
                 time0 = cdtime.reltime(values[0],units)
             else:
                 time0 = cdtime.abstime(values[0],units)
@@ -1460,7 +1458,7 @@ def main(argv):
             domain, attributes, tcode = vardict[varname]
             for i in range(len(domain)):
                 item = domain[i]
-                if type(item)==types.StringType and item==name:
+                if isinstance(item, basestring) and item==name:
                     domain[i] = axisobj
 
         # Add bounds variables to vardict, varindex
@@ -1479,7 +1477,7 @@ def main(argv):
                         if reprVar in varids:
                             varids.append(boundsname)
                     tmpdom = boundsinfo[0]
-                    if type(tmpdom[1])==types.StringType:
+                    if isinstance(tmpdom[1], basestring):
                         bndsobj = tmpdom[0]
                         boundsdomain = (bndsobj, axisobj)
                     else:
@@ -1547,10 +1545,10 @@ def main(argv):
 
     # Generate the cdms_filemap attribute
     cdms_filemap = str(cdms_filemap_list)
-    cdms_filemap = string.replace(cdms_filemap, ' ', '')
-    cdms_filemap = string.replace(cdms_filemap, 'None', '-')
-    cdms_filemap = string.replace(cdms_filemap, '"', '') #"
-    cdms_filemap = string.replace(cdms_filemap, "'", '')
+    cdms_filemap = cdms_filemap.replace(' ', '')
+    cdms_filemap = cdms_filemap.replace('None', '-')
+    cdms_filemap = cdms_filemap.replace('"', '')
+    cdms_filemap = cdms_filemap.replace("'", '')
 
     # Dump to XML
     datasetnode = cdmsNode.DatasetNode(datasetid)
-- 
GitLab


From c9da8fbba9ae83414658ee040f63c41c0eef6f6c Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 09:07:43 -0700
Subject: [PATCH 025/196] cleaned up avariable from using types or string
 module

---
 Packages/cdms2/Lib/avariable.py | 55 ++++++++++++++-------------------
 1 file changed, 23 insertions(+), 32 deletions(-)

diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index dd2a2792d..bc20b8be5 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -3,8 +3,6 @@
 
 "CDMS Variable objects, abstract interface"
 import numpy
-import types
-import string
 import re
 import warnings
 import cdmsNode
@@ -404,7 +402,7 @@ class AbstractVariable(CdmsObj, Slab):
             
         if asarray==0 and isinstance(mv, numpy.ndarray):
             mv = mv[0]
-        if type(mv) is types.StringType and self.dtype.char not in ['?','c','O','S']:
+        if isinstance(mv, basestring) and self.dtype.char not in ['?','c','O','S']:
             mv = float(mv)
         return mv
 
@@ -423,17 +421,16 @@ class AbstractVariable(CdmsObj, Slab):
             return
             
         selftype = self.typecode()
-        valuetype = type(value)
-        if valuetype is numpy.ndarray:
+        if isnstance(value, numpy.ndarray):
             value = value.astype(selftype).item()
-        elif isinstance(value, numpy.floating) or isinstance(value, numpy.integer):
+        elif isinstance(value, (numpy.floating, numpy.integer)):
             value = numpy.array([value], selftype)
-        elif valuetype in [types.FloatType, types.IntType, types.LongType, types.ComplexType]:
+        elif isinstance(value, (float, int, long, complex)):
             try:
                 value = numpy.array([value], selftype)
             except:                     # Set fill value when ar[i:j] returns a masked value
                 value = numpy.array([numpy.ma.default_fill_value(self)], selftype)
-        elif isinstance(value,(str,numpy.string_,numpy.str,numpy.string0,numpy.str_)) and selftype in ['?','c','O','S']: # '?' for Boolean and object
+        elif isinstance(value,(basestring,numpy.string_,numpy.str,numpy.string0,numpy.str_)) and selftype in ['?','c','O','S']: # '?' for Boolean and object
             pass
         else:
             raise CDMSError, 'Invalid missing value %s'%`value`
@@ -1089,7 +1086,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
             if re.search('^regrid', regridTool, re.I):
                 if keywords.has_key('diag') and \
-                        type(keywords['diag']) == types.DictType:
+                        isinstance(keywords['diag'], dict):
                     keywords['diag']['regridTool'] = 'regrid'
 
                 # the original cdms2 regridder
@@ -1206,7 +1203,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
             if specs[i] is Ellipsis:
                 j = myrank  - (nsupplied - (i+1)) 
             else:
-                if isinstance(specs[i], types.IntType):
+                if isinstance(specs[i], int):
                     singles.append(j)
                 j = j + 1
             i = i + 1
@@ -1227,15 +1224,15 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         slicelist = []
         for i in range(self.rank()):
             key = speclist[i]
-            if isinstance(key, types.IntType):  # x[i]
+            if isinstance(key, int):  # x[i]
                 slicelist.append (slice(key,key+1))
-            elif type(key) is types.SliceType: # x[i:j:k]
+            elif isinstance(key, slice): # x[i:j:k]
                 slicelist.append(key)
             elif key is unspecified or key is None or key == ':':
                 slicelist.append (slice(0, len(self.getAxis(i))))
             elif key is Ellipsis:
                 raise CDMSError, "Misuse of ellipsis in specification."
-            elif type(key) is types.TupleType:
+            elif sinstance(key, tuple):
                 slicelist.append(slice(*key))
             else:
                 raise CDMSError, 'invalid index: %s'% str(key)
@@ -1287,13 +1284,13 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
         for i in range(self.rank()):
             item = speclist[i]
-            if isinstance(item, types.SliceType):
+            if isinstance(item, slice):
                 newitem = item
             elif item==':' or item is None or item is unspecified:
                 axis = self.getAxis(i)
                 newitem = slice(0,len(axis))
-            elif isinstance(item, types.ListType) or \
-                 isinstance(item, types.TupleType):
+            elif isinstance(item, list) or \
+                 isinstance(item, tuple):
                 axis = self.getAxis(i)
                 if len(item)==2:        # (start,end)
                     indexInterval = axis.mapIntervalExt(item)
@@ -1314,13 +1311,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
                 if indexInterval is None:
                     raise CDMSError, OutOfRange + str(item)
                 newitem = slice(indexInterval[0],indexInterval[1],indexInterval[2])
-            elif isinstance(item, numpy.floating) or \
-                 isinstance(item, types.FloatType) or \
-                 isinstance(item, numpy.integer) or \
-                 isinstance(item, types.IntType) or \
-                 isinstance(item, types.LongType) or \
-                 isinstance(item, types.StringType) or \
-                 type(item) in CdtimeTypes:
+            elif isinstance(item, (numpy.floating, float, numpy.integer, int, long, basestring)) or type(item) in CdtimeTypes:
                 axis = self.getAxis(i)
                 #
                 # default is 'ccn' in axis.mapIntervalExt
@@ -1397,10 +1388,10 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
     # numpy.ma overrides
 
     def __getitem__(self, key):
-        if type(key) is types.TupleType:
+        if isinstance(key, tuple):
             speclist = self._process_specs(key, {})
         else:
-            if isinstance(key, types.IntType) and key>=len(self):
+            if isinstance(key, int) and key>=len(self):
                 raise IndexError, "Index too large: %d"%key
             speclist = self._process_specs([key], {})
 
@@ -1509,7 +1500,7 @@ def orderparse (order):
             remaining axes.
           (name) meaning an axis whose id is name
     """
-    if not isinstance(order, types.StringType):
+    if not isinstance(order, basestring):
         raise CDMSError, 'order arguments must be strings.'
     pos = 0
     result=[]
@@ -1523,8 +1514,8 @@ def orderparse (order):
         elif r == '...':
             r = Ellipsis
         elif len(r) == 1:
-            if r in string.digits:
-                r = string.atoi(r)
+            if r in '0123456789':
+                r = int(r)
         result.append(r)
         pos = m.end(0)
 
@@ -1544,9 +1535,9 @@ def order2index (axes, order):
             remaining axes.
           (name) meaning an axis whose id is name
     """
-    if isinstance(order, types.StringType):
+    if isinstance(order, basestring):
         result = orderparse(order)
-    elif isinstance(order, types.ListType):
+    elif isinstance(order, list):
         result = order
     else:
         raise CDMSError, 'order2index, order specified of bad type:' + str(type(order))
@@ -1557,7 +1548,7 @@ def order2index (axes, order):
     pos = 0
     while j < len(result):
         item = result[j]
-        if isinstance(item, types.StringType):
+        if isinstance(item, basestring):
             if item == 't': 
                 spec = 'time'
             elif item == 'x': 
@@ -1581,7 +1572,7 @@ def order2index (axes, order):
                     break
             else:
                 raise CDMSError, 'No axis matching order spec %s' %str(item)
-        elif isinstance(item, types.IntType):
+        elif isinstance(item, int):
             if item in permutation:
                 raise CDMSError, 'Duplicate item in order %s' % order
             if item >= n:
-- 
GitLab


From c9956674eb09da35755504d376765dd4f47630bb Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 09:16:39 -0700
Subject: [PATCH 026/196] cleaned up cdmsobj from using types or string module

---
 Packages/cdms2/Lib/cdmsobj.py | 59 +++++++++++++++++------------------
 1 file changed, 28 insertions(+), 31 deletions(-)

diff --git a/Packages/cdms2/Lib/cdmsobj.py b/Packages/cdms2/Lib/cdmsobj.py
index 14c2d9bf0..b66cf7307 100644
--- a/Packages/cdms2/Lib/cdmsobj.py
+++ b/Packages/cdms2/Lib/cdmsobj.py
@@ -7,10 +7,7 @@ import cdtime
 import glob
 import os
 import re
-import string
 import sys
-import types
-#import internattr
 
 # Data types
 
@@ -204,7 +201,7 @@ def generateTime(matchobj,timespecs):
     if yrspec:
         pat,name,dimtype,pos = _specifierMap[yrspec]
         yrstr = matchobj.group(name)
-        iyr = string.atoi(yrstr)
+        iyr = int(yrstr)
 
         # Map 2-digit year to [1950,2049)
         if yrspec in ('%y','%ey'):
@@ -220,23 +217,23 @@ def generateTime(matchobj,timespecs):
         elif mospec in ('%g','%eg'):
             imo = _monthMapLower[mostr]
         elif mospec in ('%m','%em','%n','%en'):
-            imo = string.atoi(mostr)
+            imo = int(mostr)
     if dyspec:
         pat,name,dimtype,pos = _specifierMap[dyspec]
         dystr = matchobj.group(name)
-        idy = string.atoi(dystr)
+        idy = int(dystr)
     if hrspec:
         pat,name,dimtype,pos = _specifierMap[hrspec]
         hrstr = matchobj.group(name)
-        ihr = string.atoi(hrstr)
+        ihr = int(hrstr)
     if mispec:
         pat,name,dimtype,pos = _specifierMap[mispec]
         mistr = matchobj.group(name)
-        imi = string.atoi(mistr)
+        imi = int(mistr)
     if sespec:
         pat,name,dimtype,pos = _specifierMap[sespec]
         sestr = matchobj.group(name)
-        ise = string.atoi(sestr)
+        ise = int(sestr)
     return cdtime.comptime(iyr,imo,idy,ihr,imi,ise)   
 
 # Find all files in 'direc' which match 'template'.
@@ -280,9 +277,9 @@ def matchingFiles(direc,template):
         if dimtypes['etime'] != [None,None,None,None,None,None]:
             matchnames[_etime] = generateTime(matchobj,dimtypes['etime'])
         if dimtypes['level'] is not None:
-            matchnames[_level] = string.atoi(matchobj.group('level'))
+            matchnames[_level] = int(matchobj.group('level'))
         if dimtypes['elevel'] is not None:
-            matchnames[_elevel] = string.atoi(matchobj.group('elevel'))
+            matchnames[_elevel] = int(matchobj.group('elevel'))
         matchfiles.append((candfile,matchnames))
 
     return matchfiles
@@ -300,22 +297,22 @@ def getTimeAsString(spec,time):
     elif spec in ('%S','%eS'):
         specstr = str(int(time.second))
     elif spec in ('%Y','%eY'):
-        specstr = string.zfill(str(time.year),4)
+        specstr = str(time.year).zfill(4)
     elif spec in ('%d','%ed'):
         specstr = str(time.day)
     elif spec in ('%f','%ef'):
-        specstr = string.zfill(str(time.day),2)
+        specstr = str(time.day).zfill(2)
     elif spec in ('%g','%eg'):
         imo = time.month
         specstr = _monthListLower[imo-1]
     elif spec in ('%h','%eh'):
-        specstr = string.zfill(str(time.hour),2)
+        specstr = str(time.hour).zfill(2)
     elif spec in ('%m','%em'):
         specstr = str(time.month)
     elif spec in ('%n','%en'):
-        specstr = string.zfill(str(time.month),2)
+        specstr = str(time.month).zfill(2)
     elif spec in ('%y','%ey'):
-        specstr = string.zfill(str(time.year%100),2)
+        specstr = str(time.year%100).zfill(2)
     elif spec in ('%z','%ez'):
         specstr = getTimeAsString('%H',time)+'Z'+getTimeAsString('%Y',time)+getTimeAsString('%n',time)+getTimeAsString('%d',time)
     return specstr
@@ -374,9 +371,9 @@ def getPathFromTemplate(template,matchnames):
 
 def searchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
-        tag = string.lower(tag)
+        tag = tag.lower()
     regexp = re.compile(pattern)
-    if type(objlist) is not types.ListType:
+    if not isinstance(objlist, list):
         objlist = [objlist]
 
     returnlist = []
@@ -395,9 +392,9 @@ def searchPattern(objlist, pattern, attribute=None, tag=None):
 
 def matchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
-        tag = string.lower(tag)
+        tag = tag.lower()
     regexp = re.compile(pattern)
-    if type(objlist) is not types.ListType:
+    if not isinstance(objlist, list):
         objlist = [objlist]
 
     returnlist = []
@@ -425,8 +422,8 @@ def matchPattern(objlist, pattern, attribute=None, tag=None):
 
 def searchPredicate(objlist, predicate, tag=None):
     if tag is not None:
-        tag = string.lower(tag)
-    if type(objlist) is not types.ListType:
+        tag = tag.lower()
+    if not isinstance(objlist, list):
         objlist = [objlist]
 
     returnlist = []
@@ -490,12 +487,12 @@ class CdmsObj (object):
                     if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
                         if reqtype in (CdFloat,CdDouble):
                             try:
-                                attval = string.atof(attval)
+                                attval = float(attval)
                             except:
                                 raise RuntimeError,"%s=%s must be a float"%(attname,attval)
                         elif reqtype in (CdShort,CdInt,CdLong,CdInt64):
                             try:
-                                attval = string.atoi(attval)
+                                attval = int(attval)
                             except:
                                 raise RuntimeError,"%s=%s must be an integer"%(attname,attval)
                 adict[attname] = attval
@@ -519,12 +516,12 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if type(attval) is types.StringType and pattern.search(attval) is not None:
+                if isinstance(attval,basestring) and pattern.search(attval) is not None:
                     return 1
             return 0
         elif self.attributes.has_key(attname):
             attval = self.attributes[attname]
-            return (type(attval) is types.StringType and pattern.search(attval) is not None)
+            return isinstance(attval,basestring) and pattern.search(attval) is not None
         else:
             return 0
 
@@ -549,12 +546,12 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if type(attval) is types.StringType and pattern.match(attval) is not None:
+                if isinstance(attval,basestring) and pattern.search(attval) is not None:
                     return 1
             return 0
         elif self.attributes.has_key(attname):
             attval = self.attributes[attname]
-            return (type(attval) is types.StringType and pattern.match(attval) is not None)
+            return isinstance(attval,basestring) and pattern.search(attval) is not None
         else:
             return 0
 
@@ -573,7 +570,7 @@ class CdmsObj (object):
         result :: (list) (0) 
         :::
         """
-        if tag is None or string.lower(tag)==self._node_.tag:
+        if tag is None or tag.lower()==self._node_.tag:
             if self.searchone(pattern,attribute):
                 return [self]
             else:
@@ -596,7 +593,7 @@ class CdmsObj (object):
         result :: (list) (0) 
         :::
         """
-        if tag is None or string.lower(tag)==self._node_.tag:
+        if tag is None or tag.lower()==self._node_.tag:
             if self.matchone(pattern,attribute):
                 return [self]
             else:
@@ -619,7 +616,7 @@ class CdmsObj (object):
         result :: (list) (0) 
         :::
         """
-        if tag is None or string.lower(tag)==self._node_.tag:
+        if tag is None or tag.lower()==self._node_.tag:
             try:
                 if apply(predicate,(self,))==1:
                     result = [self]
-- 
GitLab


From fb948093cd0f4d5eea14203c56fad2c707f1a019 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 09:40:16 -0700
Subject: [PATCH 027/196] cleaned up axis.py

---
 Packages/cdms2/Lib/axis.py       |  92 +++++++++---------
 Packages/cdms2/Lib/internattr.py | 157 -------------------------------
 2 files changed, 46 insertions(+), 203 deletions(-)
 delete mode 100644 Packages/cdms2/Lib/internattr.py

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 703c0e938..458096c22 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -6,7 +6,7 @@ CDMS Axis objects
 """
 _debug = 0
 std_axis_attributes = ['name', 'units', 'length', 'values', 'bounds']
-import string, sys, types, copy
+import sys, copy
 import numpy
 # import regrid2._regrid
 import cdmsNode
@@ -22,13 +22,13 @@ class AliasList (UserList):
     def __init__(self, alist):
         UserList.__init__(self,alist)
     def __setitem__ (self, i, value):
-        self.data[i] = string.lower(value)
+        self.data[i] = value.lower()
     def __setslice(self, i, j, values):
-        self.data[i:j] = map(lambda x: string.lower(x), values)
+        self.data[i:j] = map(lambda x: x.lower(), values)
     def append(self, value):
-        self.data.append(string.lower(value))
+        self.data.append(value.lower())
     def extend(self, values):
-        self.data.extend(map(string.lower, values))
+        self.data.extend(map("".lower, values))
 
 level_aliases = AliasList(['plev'])
 longitude_aliases = AliasList([])
@@ -228,7 +228,7 @@ def mapLinearExt(axis, bounds, interval, indicator ='ccn', epsilon=None, stride=
     intersection is empty.
     """
     
-    indicator = string.lower(indicator)
+    indicator = indicator.lower()
     length = len(axis)
 
     # Make the interval and search array non-decreasing
@@ -615,7 +615,7 @@ class AbstractAxis(CdmsObj):
         self._doubledata_ = None
         
     def __str__ (self):
-        return string.join(self.listall(), "\n") + "\n"
+        return "\n".join(self.listall() + "\n"
 
     __repr__ = __str__
 
@@ -812,14 +812,14 @@ class AbstractAxis(CdmsObj):
         for val in self[:]:
             comptime=cdtime.reltime(val, self.units).tocomp(calendar)
             s=repr(comptime)
-            tt=string.split(s,' ')
+            tt=s.split(' ')
         
-            ttt=string.split(tt[0],'-')
+            ttt=tt[0].split('-')
             yr=int(ttt[0])
             mo=int(ttt[1])
             da=int(ttt[2])
         
-            ttt=string.split(tt[1],':')
+            ttt=tt[1].split(':')
             hr=int(ttt[0])
             dtg="%04d%02d%02d%02d"%(yr,mo,da,hr)
             result.append(dtg)
@@ -974,7 +974,7 @@ class AbstractAxis(CdmsObj):
     # calendar.
     def getCalendar(self):
         if hasattr(self,'calendar'):
-            calendar = string.lower(self.calendar)
+            calendar = self.calendar.lower()
         else:
             calendar = None
 
@@ -1009,7 +1009,7 @@ class AbstractAxis(CdmsObj):
         if self.isTime():
             if type(value) in CdtimeTypes:
                 value = value.torel(self.units, self.getCalendar()).value
-            elif type(value) is types.StringType and value not in [':',unspecified]:
+            elif isinstance(value, basestring) and value not in [':',unspecified]:
                 cal = self.getCalendar()
                 value = cdtime.s2c(value, cal).torel(self.units, cal).value
         return value
@@ -1022,7 +1022,7 @@ class AbstractAxis(CdmsObj):
             #
             # mf 20010419 test if attribute is a string (non CF), set to 360.0
             #
-            if(type(cycle) == types.StringType):
+            if isnstance(cycle, basestring):
                 cycle = 360.0
         else:
             cycle = 360.0
@@ -1106,7 +1106,7 @@ class AbstractAxis(CdmsObj):
         # check length of indicator if overridden by user
         #
 
-        indicator = string.lower(indicator)
+        indicator = indicator.lower()
         if len(indicator)==2: indicator += 'n'
 
         if( ( len(indicator) != 3 ) or
@@ -1530,24 +1530,24 @@ class Axis(AbstractAxis):
         length = len(node)
 
         # Allow key of form (slice(i,j),) etc.
-        if type(key) is types.TupleType and len(key)==1:
+        if isinstance(key, tuple) and len(key)==1:
             key = key[0]
 
-        if isinstance(key, (types.IntType, numpy.int,numpy.int32)):  # x[i]
+        if isinstance(key, (int, numpy.int,numpy.int32)):  # x[i]
             if key>=length:
                 raise IndexError, 'index out of bounds'
             else:
                 # Don't generate the entire array (if linear) just for one value
                 return node.data[key%length]
-        elif type(key) is types.SliceType: # x[i:j:k]
+        elif isinstance(key, slice): # x[i:j:k]
             if self._data_ is None:
                 self._data_ = node.getData()
             return self._data_[key.start:key.stop:key.step]
-        elif type(key) is types.EllipsisType: # x[...]
+        elif isinstance(key, Ellipsis.__class__): # x[...]
             if self._data_ is None:
                 self._data_ = node.getData()
             return self._data_
-        elif type(key) is types.TupleType:
+        elif isinstance(key, tuple):
             raise IndexError,'axis is one-dimensional'
         else:
             raise IndexError,'index must be an integer: %s'%`key`
@@ -1597,9 +1597,9 @@ class Axis(AbstractAxis):
 
     def getCalendar(self):
         if hasattr(self,'calendar'):
-            calendar = string.lower(self.calendar)
+            calendar = self.calendar.lower()
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar = string.lower(self.parent.calendar)
+            calendar = self.parent.calendar.lower()
         else:
             calendar = None
 
@@ -1781,7 +1781,7 @@ class FileAxis(AbstractAxis):
         if obj is not None:
             for attname in self._obj_.__dict__.keys():
                 attval = getattr(self._obj_,attname)
-                if type(attval)!=types.BuiltinFunctionType:
+                if not callable(attval):
                     self.__dict__[attname]  = attval
                     att = self.attributes
                     att[attname]=attval
@@ -1879,29 +1879,29 @@ class FileAxis(AbstractAxis):
         if (self._obj_ is not None) and (self.parent._mode_!='r') and not (hasattr(self.parent,'format') and self.parent.format=="DRS"):
             # For negative strides, get the equivalent slice with positive stride,
             # then reverse the result.
-            if (type(key) is types.SliceType) and (key.step is not None) and key.step<0:
+            if (isinstance(key, slice) and (key.step is not None) and key.step<0:
                 posslice = reverseSlice(key,len(self))
                 result = apply(self._obj_.getitem, (posslice,))
                 return result[::-1]
             else:
-                if isinstance(key, types.IntType) and key>=len(self):
+                if isinstance(key, int) and key>=len(self):
                     raise IndexError, 'Index out of bounds: %d'%key
-                if type(key) is not types.TupleType:
+                    if isinstance(key, tuple):
                     key = (key,)
                 return apply(self._obj_.getitem, key)
         if self._data_ is None:
             self._data_ = self.getData()
         length = len(self._data_)
-        if isinstance(key, types.IntType):  # x[i]
+        if isinstance(key, int):  # x[i]
             if key>=length:
                 raise IndexError, 'index out of bounds'
             else:
                 return self._data_[key%length]
-        elif type(key) is types.SliceType: # x[i:j:k]
+        elif isinstance(key, slice): # x[i:j:k]
             return self._data_[key.start:key.stop:key.step]
-        elif type(key) is types.EllipsisType: # x[...]
+        elif isinstance(key, Ellipsis.__class__): # x[...]
             return self._data_
-        elif type(key) is types.TupleType:
+        elif isinstance(key, tuple):
             raise IndexError,'axis is one-dimensional'
         else:
             raise IndexError,'index must be an integer or slice: %s'%`key`
@@ -2027,9 +2027,9 @@ class FileAxis(AbstractAxis):
 
     def getCalendar(self):
         if hasattr(self,'calendar'):
-            calendar = string.lower(self.calendar)
+            calendar = self.calendar.lower()
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar = string.lower(self.parent.calendar)
+            calendar = self.parent.calendar.lower()
         else:
             calendar = None
 
@@ -2126,15 +2126,15 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
     """
     if specifications is None:
         speclist = axes
-    elif isinstance(specifications, types.StringType):
+    elif isinstance(specifications, basestring):
         speclist = [specifications]
-    elif isinstance(specifications, types.ListType):
+    elif isinstance(specifications, list):
         speclist = specifications
-    elif isinstance(specifications, types.TupleType):
+    elif isinstance(specifications, tuple):
         speclist=list(specifications)
-    elif isinstance(specifications, types.IntType):
+    elif isinstance(specifications, int):
         speclist = [specifications]
-    elif isinstance(specifications, types.FunctionType):
+    elif callable(specifications):
         speclist = [specifications]
     else: # to allow arange, etc.
         speclist = list(numpy.ma.filled(specifications))
@@ -2142,7 +2142,7 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
     candidates = []
     for i in range(len(axes)):
         for s in speclist:
-            if isinstance(s, types.IntType):
+            if isinstance(s, int):
                 r = (s == i)
             else:
                 r = axisMatches(axes[i], s)
@@ -2155,15 +2155,15 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
 
     if omit is None:
         omitlist = []
-    elif isinstance(omit, types.StringType):
+    elif isinstance(omit, basestring):
         omitlist = [omit]
-    elif isinstance(omit, types.ListType):
+    elif isinstance(omit, list):
         omitlist = omit
-    elif isinstance(omit, types.TupleType):
+    elif isinstance(omit, tuple):
         omitlist=list(omit)
-    elif isinstance(omit, types.IntType):
+    elif isinstance(omit, int):
         omitlist = [omit]
-    elif isinstance(omit, types.FunctionType):
+    elif callable(omit):
         omitlist = [omit]
     elif isinstance(omit, AbstractAxis):
         omitlist = [omit]
@@ -2171,7 +2171,7 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
         raise CDMSError, 'Unknown type of omit specifier.'
 
     for s in omitlist:
-        if isinstance(s, types.IntType):
+        if isinstance(s, int):
             for i in range(len(candidates)):
                 if axes[candidates[i]] is axes[s]:
                     del candidates[i]
@@ -2251,13 +2251,13 @@ def axisMatches(axis, specification):
        3. an axis object; will match if it is the same object as axis.
     """   
     if isinstance(specification, basestring):
-        s = string.lower(specification)
+        s = specification.lower()
         s = s.strip()
         while s[0] == '(':
             if s[-1] != ')':
                 raise CDMSError, 'Malformed axis spec, ' + specification
             s = s[1:-1].strip()
-        if string.lower(axis.id) == s:
+        if axis.id.lower() == s:
             return 1
         elif (s == 'time') or (s in time_aliases):
             return axis.isTime() 
@@ -2272,7 +2272,7 @@ def axisMatches(axis, specification):
         else:
             return 0
 
-    elif isinstance(specification, types.FunctionType):
+    elif callable(specification):
         r = specification(axis)
         if r: 
             return 1
diff --git a/Packages/cdms2/Lib/internattr.py b/Packages/cdms2/Lib/internattr.py
deleted file mode 100644
index ff6ebeb0c..000000000
--- a/Packages/cdms2/Lib/internattr.py
+++ /dev/null
@@ -1,157 +0,0 @@
-"InternalAttributes (implmentation class for CDMS)"
-import types
-import PropertiedClasses
-_PCLASS = PropertiedClasses.PropertiedClass
-class AttributeDict:
-    """An attribute dictionary."""
-    def __init__ (self, owner):
-        self._owner = owner
-    
-    def __getitem__ (self, name):
-        if self.has_key(name):
-            return self._owner.__dict__[name]
-        else:
-            raise KeyError, "%s instance has no external attribute %s" % \
-                   (self._owner.__class__.__name__, name)
-
-    def __setitem__ (self, name, value):
-        if self._owner.is_internal_attribute(name):
-            raise RuntimeError, 'Cannot set internal name in external attribute dictionary.'
-        self._owner.__dict__[name] = value
-
-    def clear (self):
-        self._owner.__dict__.clear()
-
-    def get (self, name, default=None):
-        if self.has_key(name):
-            return self._owner.__dict__[name]
-        else:
-            return default
-
-    def has_key(self, name):
-        d = self._owner.__dict__
-        if d.has_key(name) and not self._owner.is_internal_attribute(name):
-            return 1
-        else:
-            return 0
-
-    def items (self):
-        result = []
-        for name, value in self._owner.__dict__.items():
-            if self._owner.is_internal_attribute(name): continue
-            result.append((name, value))
-        return result
-    
-    def keys (self):
-        result = []
-        for name in self._owner.__dict__.keys():
-            if self._owner.is_internal_attribute(name): continue
-            result.append(name)
-        return result
-
-    def update(self, d):
-        for name, value in d.items():
-            if self._owner.is_internal_attribute(name):
-                raise RuntimeError, "Cannot update attribute dict with internal name"
-        self._owner.__dict__[name] = value
-
-    def values (self):
-        result = []
-        for name, value in self._owner.__dict__.items():
-            if self._owner.is_internal_attribute(name): continue
-            result.append(value)
-        return result
-
-    def __repr__(self):
-        return 'AttributeDict (' + \
-        repr(self._owner.__dict__) + \
-        ')'
-
-    def __str__(self):
-        return str(self._owner.__dict__)
-    
-class InternalAttributesClass (_PCLASS):
-    def _getattributes (self, name):
-        """Return a dictionary-like object of the non-internal attributes."""
-        return AttributeDict(self)
-
-    def is_internal_attribute (self, name):
-        """is_internal_attribute(name) is true if name is internal."""
-        if name[0] == '_' or name in self.__class__._internal:
-            return 1
-        return 0
-
-    def replace_external_attributes(self, newAttributes):
-        """replace_external_attributes(newAttributes)
-           Replace the external attributes with dictionary newAttributes.
-        """
-        if not isinstance(newAttributes, types.DictType) and \
-           not isinstance(newAttributes, AttributeDict):
-            raise ValueError, "Argument must be a dictionary"
-        for n in self.__dict__.keys():
-            if not self.is_internal_attribute(n):
-                del self.__dict__[n]
-        for n, v in newAttributes.items():
-            self.__dict__[n] = v
-
-def initialize_internal_attributes (C):
-    "Prepare a class for life as a child of InternalAttributesClass."
-    if C.__dict__.has_key('_internal'): return
-    if not issubclass(C, InternalAttributesClass):
-        raise ValueError, 'Must be subclass of InternalAttributesClass'
-    PropertiedClasses.initialize_property_class (C)
-    C._internal = []
-    for CP in C.__bases__:
-        if issubclass(CP, InternalAttributesClass):
-            initialize_internal_attributes(CP)
-            for name in CP._internal:
-                C._internal.append(name)
-    
-def add_internal_attribute (C, *aname):
-    """add_internal_attribute (C, name, ...)
-       Make attributes name, ... internal in class C.
-    """
-    initialize_internal_attributes(C)
-    for name in aname:
-        if not name in C._internal:
-            C._internal.append(name)
-
-PropertiedClasses.set_property(InternalAttributesClass, 'attributes', 
-                               InternalAttributesClass._getattributes, 
-                               nowrite=1, nodelete=1)
-
-if __name__ == '__main__':
-    class Test(InternalAttributesClass):
-        def __init__ (self):
-            self.node = None
-            self.parent = None
-            self.__dict__['ro'] = 1
-            self.__hide = 3
-            self._p = 4
-            self.value = 1
-
-    PropertiedClasses.set_property(Test, 'ro', nowrite=1, nodelete=1)
-    add_internal_attribute(Test, 'node', 'parent', 'ro')
-
-    t1 = Test()
-    assert t1.value == 1
-    assert not t1.attributes.has_key('__hide')
-    assert not t1.attributes.has_key('_p')
-    assert t1._p == 4
-    t1.value = 2
-    assert t1.value == 2
-    assert 'value' in t1.attributes.keys()
-    t1.b = t1.value + 1
-    assert t1.b == 3
-    assert t1.b == t1.attributes['b']
-    t1.node = 'me'
-    t1.parent = 'dad'
-    assert t1.node == 'me'
-    assert 'node' not in t1.attributes.keys()
-    assert t1.ro == 1
-    try:
-        t1.ro == 2
-    except AttributeError:
-        pass
-    assert t1.ro == 1
-    print "Test passed."
-- 
GitLab


From d40cb17afc176ecc96b35b563a36b8c27966812e Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 09:51:59 -0700
Subject: [PATCH 028/196] cleaned up cdmsNode

---
 Packages/cdms2/Lib/cdmsNode.py | 48 +++++++++++++++-------------------
 1 file changed, 21 insertions(+), 27 deletions(-)

diff --git a/Packages/cdms2/Lib/cdmsNode.py b/Packages/cdms2/Lib/cdmsNode.py
index 794e4ed3b..5d82a9526 100644
--- a/Packages/cdms2/Lib/cdmsNode.py
+++ b/Packages/cdms2/Lib/cdmsNode.py
@@ -9,9 +9,7 @@ from numpy import get_printoptions, set_printoptions, inf
 import CDML
 import cdtime
 import re
-import string
 import sys
-from types import *
 from error import CDMSError
 
 # Regular expressions
@@ -309,9 +307,9 @@ class CdmsNode:
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
                 (attval,datatype)=self.attribute[attname]
                 # attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
-                if type(attval)!=StringType:
+                if not isinstance(attval, basestring):
                     attval = `attval`
-                attvalstr = string.strip(attval)
+                attvalstr = attval.strip()
                 attvalstr = re.sub('\n','\n ',attvalstr) # Make sure continuation lines are preceded with a space
                 if attvalstr=='': attvalstr = "none"
                 fd.write("%s: %s\n"%(attname,attvalstr))
@@ -320,7 +318,7 @@ class CdmsNode:
         for attname in self.attribute.keys():
             if validAttrs and (attname not in validAttrs):
                 (attval,datatype)=self.attribute[attname]
-                if type(attval)!=StringType:
+                if not isinstance(attval, basestring):
                     attval = `attval`
                 attval = re.sub('\n','\n ',attval) # Make sure continuation lines are preceded with a space
                 fd.write("attr: %s=%s\n"%(attname,attval))
@@ -332,7 +330,7 @@ class CdmsNode:
         #     fd.write("value: %s"%(content,))
 
         # Write user attributes
-        if type(userAttrs)==StringType:
+        if isinstance(userAttrs, basestring):
             newAttrs = [userAttrs]
         else:
             newAttrs = userAttrs
@@ -356,7 +354,7 @@ class CdmsNode:
         for attname in self.attribute.keys():
             if attname in validKeys:
                 (atttype,default)=self.dtd[attname]
-                if type(atttype) is TupleType:
+                if isinstance(atttype,tuple):
                     attval=self.getExternalAttr(attname)
                     assert attval in atttype, 'Invalid attribute %s=%s must be in %s'%(attname,attval,`atttype`)
                 elif atttype==CDML.Idref:
@@ -426,7 +424,7 @@ class VariableNode(CdmsNode):
     # Create a variable.
     # If validate is true, validate immediately
     def __init__(self, id, datatype, domain):
-        assert type(datatype) is StringType, 'Invalid datatype: '+`datatype`
+        assert isinstance(datatype,basestring), 'Invalid datatype: '+`datatype`
         assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
         CdmsNode.__init__(self,"variable",id)
         self.datatype = datatype
@@ -456,8 +454,8 @@ class AxisNode(CdmsNode):
     # If datatype is None, assume values [0,1,..,length-1]
     # data is a numpy array, if specified
     def __init__(self, id, length, datatype=CdLong,data=None):
-        assert isinstance(length, IntType), 'Invalid length: '+`length`
-        assert type(datatype) is StringType, 'Invalid datatype: '+`datatype`
+        assert isinstance(length, int), 'Invalid length: '+`length`
+        assert isinstance(datatype, basestring), 'Invalid datatype: '+`datatype`
         assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
         if data is not None: assert isinstance(data, numpy.ndarray), 'data must be a 1-D Numeric array'
         CdmsNode.__init__(self,"axis",id)
@@ -492,7 +490,7 @@ class AxisNode(CdmsNode):
         numlist = []
         for numstring in stringlist:
             if numstring=='': continue
-            numlist.append(string.atof(numstring))
+            numlist.append(float(numstring))
         if len(numlist)>0:
             # NB! len(zero-length array) causes IndexError on Linux!
             dataArray = numpy.array(numlist,numericType)
@@ -506,7 +504,7 @@ class AxisNode(CdmsNode):
         numlist = []
         for numstring in stringlist:
             if numstring=='': continue
-            numlist.append(string.atoi(numstring))
+            numlist.append(int(numstring))
         dataArray = numpy.array(numlist,numpy.int)
         if len(dataArray)>0:
             self.partition = dataArray
@@ -722,13 +720,13 @@ class AxisNode(CdmsNode):
 # Linear data element
 class LinearDataNode(CdmsNode):
 
-    validStartTypes = [IntType,FloatType,type(cdtime.comptime(0)),type(cdtime.reltime(0,"hours"))]
-    validDeltaTypes = [IntType,FloatType,ListType]
+    validStartTypes = [int, float, type(cdtime.comptime(0)), type(cdtime.reltime(0,"hours"))]
+    validDeltaTypes = [int, float, list]
 
     def __init__(self, start, delta, length):
         assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(start) in self.validStartTypes), 'Invalid start argument: '+`start`
         assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(delta) in self.validDeltaTypes), 'Invalid delta argument: '+`delta`
-        assert isinstance(length, IntType), 'Invalid length argument: '+`length`
+        assert isinstance(length, int), 'Invalid length argument: '+`length`
         CdmsNode.__init__(self,"linear")
         self.delta = delta
         self.length = length
@@ -932,12 +930,8 @@ class AttrNode(CdmsNode):
 
     def __init__(self, name, value=None):
         CdmsNode.__init__(self,"attr")
-        if not (isinstance(value,IntType)
-                or isinstance(value,numpy.integer)
-                or isinstance(value,FloatType)
-                or isinstance(value,numpy.floating)
-                or isinstance(value,StringType)
-                or isinstance(value,NoneType)):
+        if not (isinstance(value,(int, numpy.integer, float, numpy.floating, basestring)) \
+                or value is None:
             raise CDMSError, 'Invalid attribute type: '+`value`
         self.name = name
         self.value = value
@@ -954,11 +948,11 @@ class AttrNode(CdmsNode):
     def getDatatype(self):
         if self.datatype:
             return self.datatype
-        elif type(self.value) is StringType:
+        elif isinstance(self.value, basestring):
             return CdString
-        elif isinstance(self.value, FloatType) or isinstance(self.value,numpy.floating):
+        elif isinstance(self.value, (float,numpy.floating)):
             return CdDouble
-        elif isinstance(self.value, IntType) or isinstance(self.value,numpy.integer):
+        elif isinstance(self.value, (int, numpy.integer)):
             return CdLong
         else:
             raise CDMSError, 'Invalid attribute type: '+`self.value`
@@ -970,18 +964,18 @@ class AttrNode(CdmsNode):
     #   Returns ValueError if the conversion fails
     def setValueFromString(self,valString,datatype):
         val = None
-        if type(valString) is not StringType:
+        if not isinstance(valString, basestring):
             raise CDMSError, 'input value is not a string'
         if datatype == CdString:
             val=valString
         elif datatype in (CdShort,CdInt,CdLong):
             try:
-                val=string.atoi(valString)
+                val=int(valString)
             except ValueError:
                 raise CDMSError, 'value is not an integer: '+valString
         elif datatype in (CdFloat,CdDouble):
             try:
-                val=string.atof(valString)
+                val=float(valString)
             except ValueError:
                 raise CDMSError, 'value is not floating-point: '+valString
         self.value=val
-- 
GitLab


From bf68e04105ca4367cbd048732fa6ecd26b531732 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 09:54:00 -0700
Subject: [PATCH 029/196] cleaned up coord

---
 Packages/cdms2/Lib/coord.py | 10 ++++------
 1 file changed, 4 insertions(+), 6 deletions(-)

diff --git a/Packages/cdms2/Lib/coord.py b/Packages/cdms2/Lib/coord.py
index 6f7773fd1..8460485e6 100644
--- a/Packages/cdms2/Lib/coord.py
+++ b/Packages/cdms2/Lib/coord.py
@@ -10,8 +10,6 @@ import cdtime
 import copy
 import numpy
 #import internattr
-import types
-import string
 from cdmsobj import CdmsObj
 from axis import createAxis, TransientVirtualAxis
 from error import CDMSError
@@ -118,7 +116,7 @@ class AbstractCoordinateAxis(CdmsObj):
     # calendar.
     def getCalendar(self):
         if hasattr(self,'calendar'):
-            calendar = string.lower(self.calendar)
+            calendar = self.calendar.lower()
         else:
             calendar = None
 
@@ -147,7 +145,7 @@ class AbstractCoordinateAxis(CdmsObj):
 
     # Return true iff the axis is a level axis
     def isLevel(self):
-        id = string.lower(self.id)
+        id = self.id.lower()
         if (hasattr(self,'axis') and self.axis=='Z'): return 1
         return ((id[0:3] == 'lev') or (id[0:5] == 'depth') or (id in level_aliases))
 
@@ -158,13 +156,13 @@ class AbstractCoordinateAxis(CdmsObj):
 
     # Return true iff the axis is a time axis
     def isTime(self):
-        id = string.lower(self.id)
+        id = self.id.lower()
         if (hasattr(self,'axis') and self.axis=='T'): return 1
         return (id[0:4] == 'time') or (id in time_aliases)
 
     # Return true iff the axis is a forecast axis
     def isForecast(self):
-        id = string.lower(self.id)
+        id = self.id.lower()
         if (hasattr(self,'axis') and self.axis=='F'): return 1
         return (id[0:6] == 'fctau0') or (id in forecast_aliases)
 
-- 
GitLab


From f570c2cf6112133f777fd6073769814c99b2b5e1 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 11:23:01 -0700
Subject: [PATCH 030/196] cleaned up all (most?) files

---
 Packages/cdms2/Lib/cudsinterface.py    |  6 +++---
 Packages/cdms2/Lib/database.py         |  8 +++-----
 Packages/cdms2/Lib/dataset.py          | 19 +++++++++----------
 Packages/cdms2/Lib/fvariable.py        |  1 -
 Packages/cdms2/Lib/grid.py             |  5 ++---
 Packages/cdms2/Lib/gsStaticVariable.py |  1 -
 Packages/cdms2/Lib/mvCdmsRegrid.py     |  3 +--
 Packages/cdms2/Lib/mvSphereMesh.py     | 11 +++++------
 Packages/cdms2/Lib/selectors.py        |  4 ++--
 Packages/cdms2/Lib/slabinterface.py    |  6 +++---
 Packages/cdms2/Lib/tvariable.py        |  9 ++++-----
 Packages/cdms2/Lib/variable.py         |  1 -
 12 files changed, 32 insertions(+), 42 deletions(-)

diff --git a/Packages/cdms2/Lib/cudsinterface.py b/Packages/cdms2/Lib/cudsinterface.py
index c97f5fdef..4232bdf44 100644
--- a/Packages/cdms2/Lib/cudsinterface.py
+++ b/Packages/cdms2/Lib/cudsinterface.py
@@ -2,7 +2,7 @@
 ## Further modified to be pure new numpy June 24th 2008
 
 "Emulation of old cu package"
-import string, types, sys
+import sys
 from error import CDMSError
 from dataset import openDataset, createDataset
 from tvariable import createVariable
@@ -215,7 +215,7 @@ class cuDataset():
         if device is None: device=sys.stdout
         if vname is None: vname = self.default_variable_name
         alist = self.listall(vname, all=all)
-        device.write(string.join(alist, "\n"))
+        device.write("\n".join(alist))
         device.write("\n")
 
     def dimensionobject (self, dname, vname=None):
@@ -357,7 +357,7 @@ class cuDataset():
                 idim = ndims - (nargs - i - 1)
                 i = i + 1
                 ne = 1
-            elif type(x) == types.TupleType:
+            elif isinstance(x,tuple):
                 cdms_args[idim] = x
                 idim = idim + 1
                 i = i + 1
diff --git a/Packages/cdms2/Lib/database.py b/Packages/cdms2/Lib/database.py
index 80f749fb4..ac8b0dc7a 100644
--- a/Packages/cdms2/Lib/database.py
+++ b/Packages/cdms2/Lib/database.py
@@ -7,9 +7,7 @@ import cdurlparse
 import copy
 import os
 import re
-import string
 import sys
-import types
 from CDMLParser import CDMLParser
 from cdmsobj import CdmsObj
 from dataset import Dataset
@@ -205,7 +203,7 @@ class LDAPDatabase(AbstractDatabase):
     
     def normalizedn(self, dn):
         explodeddn = ldap.explode_dn(dn)
-        return string.join(explodeddn,',')
+        return ','.join(explodeddn)
 
     def cachecdml(self, name, cdml, datapath):
         normaldn = self.normalizedn(name)
@@ -239,7 +237,7 @@ class LDAPDatabase(AbstractDatabase):
 
         # Get the parent dataset
         explodeddn = ldap.explode_dn(dn)
-        dsetdn = string.join(explodeddn[1:],',') # Dataset node is parent of variable
+        dsetdn = ','.join(explodeddn[1:]) # Dataset node is parent of variable
         dset = self.getDataset(dsetdn)
         rdn = explodeddn[0]
         matchobj = _Att.match(rdn)
@@ -468,7 +466,7 @@ class LDAPSearchResult(AbstractSearchResult):
 
         """
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
 
         resultlist = []
         for entry in self:
diff --git a/Packages/cdms2/Lib/dataset.py b/Packages/cdms2/Lib/dataset.py
index 977b1d8bb..6074615de 100644
--- a/Packages/cdms2/Lib/dataset.py
+++ b/Packages/cdms2/Lib/dataset.py
@@ -7,7 +7,6 @@ import Cdunif
 import numpy
 import cdmsNode
 import os, sys
-import string
 import urllib
 import cdmsURLopener                    # Import after urllib, to handle errors
 import urlparse
@@ -280,7 +279,7 @@ Output:::
 file :: (cdms2.dataset.CdmsFile) (0) file to read from
 :::
     """
-    uri = string.strip(uri)
+    uri = uri.strip()
     (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(uri)
     if scheme in ('','file'):
         if netloc:
@@ -417,7 +416,7 @@ def parseIndexList(text):
     for i in range(nindices):
         s = m.group(i+1)
         if s!='-':
-            result[i] = string.atoi(s)
+            result[i] = int(s)
     result[nindices] = m.group(nindices+1)
     return result, m.end()
 
@@ -744,7 +743,7 @@ class Dataset(CdmsObj, cuDataset):
     def searchPattern(self,pattern,attribute,tag):
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('dataset',None):
             if self.searchone(pattern,attribute)==1:
                 resultlist = [self]
@@ -769,7 +768,7 @@ class Dataset(CdmsObj, cuDataset):
     def matchPattern(self,pattern,attribute,tag):
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('dataset',None):
             if self.matchone(pattern,attribute)==1:
                 resultlist = [self]
@@ -797,7 +796,7 @@ class Dataset(CdmsObj, cuDataset):
     def searchPredicate(self,predicate,tag):
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('dataset',None):
             try:
                 if apply(predicate,(self,))==1:
@@ -1548,7 +1547,7 @@ class CdmsFile(CdmsObj, cuDataset):
         """
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('cdmsFile',None,'dataset'):
             if self.searchone(pattern,attribute)==1:
                 resultlist = [self]
@@ -1585,7 +1584,7 @@ class CdmsFile(CdmsObj, cuDataset):
         """
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('cdmsFile',None,'dataset'):
             if self.matchone(pattern,attribute)==1:
                 resultlist = [self]
@@ -1624,7 +1623,7 @@ class CdmsFile(CdmsObj, cuDataset):
         """
         resultlist = []
         if tag is not None:
-            tag = string.lower(tag)
+            tag = tag.lower()
         if tag in ('cdmsFile',None,'dataset'):
             try:
                 if apply(predicate,(self,))==1:
@@ -2017,7 +2016,7 @@ class CdmsFile(CdmsObj, cuDataset):
 
     def __repr__(self):
         filerep = `self._file_`
-        loc = string.find(filerep,"file")
+        loc = filerep.find("file")
         if loc==-1: loc=0
         return "<CDMS "+filerep[loc:-1]+", status: %s>"%self._status_
 
diff --git a/Packages/cdms2/Lib/fvariable.py b/Packages/cdms2/Lib/fvariable.py
index 4271e12c6..6ca32df15 100644
--- a/Packages/cdms2/Lib/fvariable.py
+++ b/Packages/cdms2/Lib/fvariable.py
@@ -4,7 +4,6 @@
 "CDMS File-based variables."
 import numpy
 import typeconv
-import types
 import re
 
 from cdmsobj import Max32int
diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index f11ca1764..05fdf9b7c 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -2,7 +2,6 @@
 ## Further modified to be pure new numpy June 24th 2008
 
 """CDMS Grid objects"""
-import types
 import re
 from error import CDMSError
 import numpy #, PropertiedClasses, internattr
@@ -129,14 +128,14 @@ def setRegionSpecs(grid, coordSpec, coordType, resultSpec):
     
     if (coordSpec is None) or (coordSpec==':'):
         canonSpec = None
-    elif type(coordSpec) is types.TupleType:
+    elif isinstance(coordSpec, tuple):
         if len(coordSpec)==2:
             canonSpec = (coordSpec[0],coordSpec[1],'cc',None)
         elif len(coordSpec)==3:
             canonSpec = (coordSpec[0],coordSpec[1],coordSpec[2],None)
         elif len(coordSpec)!=4:
             raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
-    elif type(coordSpec) in [types.IntType, types.FloatType]:
+    elif isinstance(coordSpec, (int, float)):
         canonSpec = (coordSpec, coordSpec, 'cc', None)
     else:
         raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
diff --git a/Packages/cdms2/Lib/gsStaticVariable.py b/Packages/cdms2/Lib/gsStaticVariable.py
index 0332e1415..fdd740c86 100644
--- a/Packages/cdms2/Lib/gsStaticVariable.py
+++ b/Packages/cdms2/Lib/gsStaticVariable.py
@@ -9,7 +9,6 @@ No guarantee is provided whatsoever. Use at your own risk.
 
 import operator
 import cdms2
-import types
 from cdms2.error import CDMSError
 from cdms2.hgrid import AbstractCurveGrid, TransientCurveGrid, FileCurveGrid
 from cdms2.coord import TransientAxis2D, TransientVirtualAxis
diff --git a/Packages/cdms2/Lib/mvCdmsRegrid.py b/Packages/cdms2/Lib/mvCdmsRegrid.py
index 6cc0adf8a..5a1f34361 100644
--- a/Packages/cdms2/Lib/mvCdmsRegrid.py
+++ b/Packages/cdms2/Lib/mvCdmsRegrid.py
@@ -5,7 +5,6 @@ David Kindig and Alex Pletzer, Tech-X Corp. (2012)
 This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
-import types
 import operator
 import re
 import numpy
@@ -450,7 +449,7 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
         attrs = {}
         for a in srcVar.attributes:
             v = srcVar.attributes[a]
-            if type(v) is types.StringType:
+            if isinstance(v, basestring):
                 attrs[a] = v
 
         # if the missing value is present in the destination data, set
diff --git a/Packages/cdms2/Lib/mvSphereMesh.py b/Packages/cdms2/Lib/mvSphereMesh.py
index 2d4b7abad..7b95922e2 100644
--- a/Packages/cdms2/Lib/mvSphereMesh.py
+++ b/Packages/cdms2/Lib/mvSphereMesh.py
@@ -8,7 +8,6 @@ No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
-from types import NoneType
 
 class SphereMesh:
     
@@ -34,7 +33,7 @@ class SphereMesh:
 
         # compute the min/max of elevation, needed
         # for normalization
-        if type(elvs) != NoneType:
+        if elvs is not None:
             self.minElv = min(elvs[:])
             self.maxElv = max(elvs[:])
             if hasattr(elvs, 'positive'):
@@ -44,7 +43,7 @@ class SphereMesh:
         # determine the dimensionality and 
         # whether the grid is rectilinear
         for axis in lons, lats, elvs:
-            if type(axis) != NoneType:
+            if axis is not None:
                 self.ndims += 1
                 if len(axis.shape) != 1:
                     self.isRectilinear = False
@@ -53,7 +52,7 @@ class SphereMesh:
         if self.isRectilinear:
             self.shape = []
             for axis in lons, lats, elvs:
-                if type(axis) != NoneType:
+                if axis is not None:
                     self.shape.append( len(axis) )
             self.shape.reverse()
 
@@ -63,7 +62,7 @@ class SphereMesh:
         # store lon, lat, elv as a curvilinear grid
         if self.isRectilinear:
             # apply tensore product of axes to generat curvilinear coordinates
-            if type(elvs) != NoneType:
+            if elvs is not None:
                 self.elvs = numpy.outer(numpy.outer( numpy.ones(self.shape[:0], numpy.float32), elvs),
                                         numpy.ones(self.shape[0+1:], numpy.float32)).reshape(self.shape)
             else:
@@ -77,7 +76,7 @@ class SphereMesh:
             # already in curvilinear form
             self.lons = lons[:]
             self.lats = lats[:]
-            if type(elvs) != NoneType:
+            if elvs is not None:
                 self.elvs = elvs[:]
             else:
                 self.elvs = numpy.zeros( self.shape, numpy.float32 )
diff --git a/Packages/cdms2/Lib/selectors.py b/Packages/cdms2/Lib/selectors.py
index b2ee04bc6..f22443976 100644
--- a/Packages/cdms2/Lib/selectors.py
+++ b/Packages/cdms2/Lib/selectors.py
@@ -1,6 +1,6 @@
 
 """Classes to support easy selection of climate data"""
-import string, types, cdtime
+import cdtime
 from axis import axisMatches
 from error import CDMSError
 from grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
@@ -393,7 +393,7 @@ def required(values):
     """Creates a selector that requires a certain axis to be present."""
     if values is None:
         return all
-    if isinstance(values, types.StringType):
+    if isinstance(values, basestring):
         values = (values,)
     return Selector(requiredComponent(values))
 
diff --git a/Packages/cdms2/Lib/slabinterface.py b/Packages/cdms2/Lib/slabinterface.py
index e08b49ea5..642aea131 100644
--- a/Packages/cdms2/Lib/slabinterface.py
+++ b/Packages/cdms2/Lib/slabinterface.py
@@ -3,7 +3,7 @@
 
 "Read part of the old cu slab interface implemented over CDMS"
 import numpy
-import string, types, sys
+import sys
 from error import CDMSError
 from axis import std_axis_attributes
 import cdms2 as cdms
@@ -143,7 +143,7 @@ class Slab:
         for nd in range(self.rank()):
             result.append('** Dimension ' + str(nd+1) + ' **')
             result = result + self.getAxis(nd).listall(1)
-        print string.join(result, '\n')
+        print '\n'.join(result)
 
     def listdimnames(self):
         "Return a list of the names of the dimensions."
@@ -179,7 +179,7 @@ class Slab:
     def info(self, flag=None, device=None):
         "Write info about slab; include dimension values and weights if flag"
         if device is None: device = sys.stdout
-        device.write(string.join(self.listall(all=flag), "\n"))
+        device.write('\n'.join(self.listall(all=flag)))
         device.write("\n")
 
 def cdms_bounds2cu_bounds (b):
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 27cab8156..7d9db8b9d 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -8,7 +8,6 @@ Contains also the write part of the old cu interface.
 """
 import json
 import re
-import types
 import typeconv
 import numpy
 from numpy import sctype2char
@@ -166,7 +165,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = sctype2char(dtype)
-        if type(data) is types.TupleType:
+        if isinstance(data, tuple):
             data = list(data)
         
         AbstractVariable.__init__ (self)
@@ -252,7 +251,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = sctype2char(dtype)
-        if type(data) is types.TupleType:
+        if isinstance(data, tuple):
             data = list(data)
         if isinstance(data, AbstractVariable):
             if not isinstance(data, TransientVariable):
@@ -479,7 +478,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
             raise CDMSError, "setdimattribute, dim out of bounds."
         d = self.getAxis(dim)
         if field == "name":
-            if not type(value) == types.StringType:
+            if not isinstance(value, basestring):
                raise CDMSError, "setdimattribute: name not a string"
             d.id = value
             
@@ -492,7 +491,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
             self.setAxis(dim, a)
 
         elif field == "units":
-            if not type(value) == types.StringType:
+            if not isinstance(value, basestring):
                raise CDMSError, "setdimattribute: units not a string"
             d.units = value
 
diff --git a/Packages/cdms2/Lib/variable.py b/Packages/cdms2/Lib/variable.py
index a1f3f7f7d..396dbdf06 100644
--- a/Packages/cdms2/Lib/variable.py
+++ b/Packages/cdms2/Lib/variable.py
@@ -11,7 +11,6 @@ import copy
 import os
 import string
 import sys
-import types
 import cdmsobj
 from cdmsobj import CdmsObj, getPathFromTemplate, Max32int
 from avariable import AbstractVariable
-- 
GitLab


From 05e2675ee1e395a89b18dd6afa713e5d08535aa4 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 11:39:47 -0700
Subject: [PATCH 031/196] removed string odule wheen possible

---
 Packages/cdms2/Lib/CDMLParser.py | 15 +++++++--------
 Packages/cdms2/Lib/cdxmllib.py   |  1 -
 Packages/cdms2/Lib/convention.py | 27 +++++++++++++--------------
 Packages/cdms2/Lib/gengrid.py    |  9 ++++-----
 Packages/cdms2/Lib/hgrid.py      |  9 ++++-----
 Packages/cdms2/Lib/variable.py   |  5 ++---
 6 files changed, 30 insertions(+), 36 deletions(-)

diff --git a/Packages/cdms2/Lib/CDMLParser.py b/Packages/cdms2/Lib/CDMLParser.py
index 1ff9abc56..e741e44d2 100644
--- a/Packages/cdms2/Lib/CDMLParser.py
+++ b/Packages/cdms2/Lib/CDMLParser.py
@@ -6,7 +6,6 @@ from cdxmllib import XMLParser
 import CDML
 import re
 import cdmsNode
-import string
 
 # Error constants
 InvalidAttribute = "Invalid attribute"
@@ -52,7 +51,7 @@ class CDMLParser(XMLParser):
         if not matchObj:
             if self.verbose: print 'data:',data
             if self.root:
-                self.getCurrentNode().setContentFromString(string.strip(data))
+                self.getCurrentNode().setContentFromString(data.strip())
 
     def handle_cdata(self, data):
         if self.verbose: print 'cdata:', `data`
@@ -125,7 +124,7 @@ class CDMLParser(XMLParser):
         datatype = attrs.get('datatype')
         if _Integer.match(length_s) is None:
             raise InvalidAttribute, 'length='+length_s
-        length = string.atoi(length_s)
+        length = int(length_s)
         axis = cdmsNode.AxisNode(id,length,datatype)
         partstring = attrs.get('partition')
         if partstring is not None:
@@ -202,11 +201,11 @@ class CDMLParser(XMLParser):
         start_s = attrs.get('start')
         length_s = attrs.get('length')
         if start_s is not None:
-            start = string.atoi(start_s)
+            start = int(start_s)
         else:
             start = None
         if length_s is not None:
-            length = string.atoi(length_s)
+            length = int(length_s)
         else:
             length = None
         domElem = cdmsNode.DomElemNode(name,start,length)
@@ -250,15 +249,15 @@ class CDMLParser(XMLParser):
         delta_s = attrs['delta']
         length_s = attrs['length']
         try:
-            start=string.atof(start_s)
+            start=float(start_s)
         except ValueError:
             raise InvalidAttribute, 'start='+start_s
         try:
-            delta=string.atof(delta_s)
+            delta=float(delta_s)
         except ValueError:
             raise InvalidAttribute, 'delta='+delta_s
         try:
-            length=string.atoi(length_s)
+            length=int(length_s)
         except ValueError:
             raise InvalidAttribute, 'length='+length_s
         linear = cdmsNode.LinearDataNode(start,delta,length)
diff --git a/Packages/cdms2/Lib/cdxmllib.py b/Packages/cdms2/Lib/cdxmllib.py
index baa0f6dfb..7c1aff308 100644
--- a/Packages/cdms2/Lib/cdxmllib.py
+++ b/Packages/cdms2/Lib/cdxmllib.py
@@ -3,7 +3,6 @@
 # Author: Sjoerd Mullender.
 
 import re
-import string
 
 # import warnings
 # warnings.warn("The xmllib module is obsolete.  Use xml.sax instead.",
diff --git a/Packages/cdms2/Lib/convention.py b/Packages/cdms2/Lib/convention.py
index 4e8c9053e..0ed475877 100644
--- a/Packages/cdms2/Lib/convention.py
+++ b/Packages/cdms2/Lib/convention.py
@@ -1,6 +1,5 @@
 """ metadata conventions """
 
-import string
 from error import CDMSError
 from UserList import UserList
 
@@ -13,11 +12,11 @@ class AliasList (UserList):
     def __init__(self, alist):
         UserList.__init__(self,alist)
     def __setitem__ (self, i, value):
-        self.data[i] = string.lower(value)
+        self.data[i] = value.lower()
     def __setslice(self, i, j, values):
-        self.data[i:j] = map(lambda x: string.lower(x), values)
+        self.data[i:j] = map(lambda x: x.lower(), values)
     def append(self, value):
-        self.data.append(string.lower(value))
+        self.data.append(value.lower())
 
 level_aliases = AliasList(['plev'])
 longitude_aliases = AliasList([])
@@ -37,11 +36,11 @@ class AbstractConvention:
         raise CDMSError, MethodNotImplemented
 
     def axisIsLatitude(self, axis):
-        id = string.lower(axis.id)
+        id = axis.id.lower()
         return (id[0:3] == 'lat') or (id in latitude_aliases)
 
     def axisIsLongitude(self, axis):
-        id = string.lower(axis.id)
+        id = axis.id.lower()
         return (id[0:3] == 'lon') or (id in longitude_aliases)
 
     def getVarLatId(self, var, vardict=None):
@@ -99,7 +98,7 @@ class CFConvention(COARDSConvention):
         coorddict = {}
         for var in vardict.values():
             if hasattr(var, 'coordinates'):
-                coordnames = string.split(var.coordinates)
+                coordnames = var.coordinates.lower()
                 for item in coordnames:
                     # Don't include if already a 1D coordinate axis.
                     if item in axiskeys:
@@ -127,7 +126,7 @@ class CFConvention(COARDSConvention):
         for node in dsetdict.values():
             coordnames = node.getExternalAttr('coordinates')
             if coordnames is not None:
-                coordnames = string.split(coordnames)
+                coordnames = coordnames.split()
                 for item in coordnames:
                     # Don't include if already a 1D coordinate axis.
                     if dsetdict.has_key(item) and dsetdict[item].tag=='axis':
@@ -149,7 +148,7 @@ class CFConvention(COARDSConvention):
             return (lat, nlat)
 
         if hasattr(var, 'coordinates'):
-            coordnames = string.split(var.coordinates)
+            coordnames = var.coordinates.split()
             for name in coordnames:
                 coord = vardict.get(name)
 
@@ -173,7 +172,7 @@ class CFConvention(COARDSConvention):
             return (lon, nlon)
 
         if hasattr(var, 'coordinates'):
-            coordnames = string.split(var.coordinates)
+            coordnames = var.coordinates.split()
             for name in coordnames:
                 coord = vardict.get(name)
 
@@ -191,9 +190,9 @@ class CFConvention(COARDSConvention):
     def axisIsLatitude(self, axis):
         if (hasattr(axis,'axis') and axis.axis=='Y'):
             return 1
-        elif (hasattr(axis, 'units') and string.lower(axis.units) in ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', 'degreen', 'degreesn']):
+        elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', 'degreen', 'degreesn']):
             return 1
-        elif (hasattr(axis, 'standard_name') and string.lower(axis.standard_name)=='latitude'):
+        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower()=='latitude'):
             return 1
         else:
             return AbstractConvention.axisIsLatitude(self, axis)
@@ -201,9 +200,9 @@ class CFConvention(COARDSConvention):
     def axisIsLongitude(self, axis):
         if (hasattr(axis,'axis') and axis.axis=='X'):
             return 1
-        elif (hasattr(axis, 'units') and string.lower(axis.units) in ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese']):
+        elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese']):
             return 1
-        elif (hasattr(axis, 'standard_name') and string.lower(axis.standard_name)=='longitude'):
+        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower()=='longitude'):
             return 1
         else:
             return AbstractConvention.axisIsLongitude(self, axis)
diff --git a/Packages/cdms2/Lib/gengrid.py b/Packages/cdms2/Lib/gengrid.py
index 1204733bc..95e6d1871 100644
--- a/Packages/cdms2/Lib/gengrid.py
+++ b/Packages/cdms2/Lib/gengrid.py
@@ -338,7 +338,6 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    import string
     from auxcoord import TransientAuxAxis1D
     from coord import TransientVirtualAxis
 
@@ -400,7 +399,7 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
         ni = dims[0]
 
     boundsshape = (ni, ncorners)
-    if hasattr(cornerLat, 'units') and string.lower(cornerLat.units)[0:6]=='radian':
+    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6]=='radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
 
@@ -415,21 +414,21 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
         
     if vardict.has_key(gridCenterLatName):
         centerLat = fileobj(gridCenterLatName)
-        if hasattr(centerLat, "units") and string.lower(centerLat.units)=='radians':
+        if hasattr(centerLat, "units") and centerLat.units.lower()=='radians':
             centerLat *= (180.0/numpy.pi)
     else:
         centerLat = cornerLat[:,:,0]
 
     if vardict.has_key(gridCenterLonName):
         centerLon = fileobj(gridCenterLonName)
-        if hasattr(centerLon, "units") and string.lower(centerLon.units)=='radians':
+        if hasattr(centerLon, "units") and centerLon.units.lower()=='radians':
             centerLon *= (180.0/numpy.pi)
     else:
         centerLon = cornerLon[:,:,0]
 
     if hasattr(fileobj,titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = string.replace(string.strip(gridid), ' ','_')
+        gridid = sgridid.strip().replace(' ','_')
     else:
         gridid="<None>"
 
diff --git a/Packages/cdms2/Lib/hgrid.py b/Packages/cdms2/Lib/hgrid.py
index 56758ad72..1f53d3702 100644
--- a/Packages/cdms2/Lib/hgrid.py
+++ b/Packages/cdms2/Lib/hgrid.py
@@ -758,7 +758,6 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    import string
     from coord import TransientAxis2D
 
     if 'S' in fileobj.variables.keys():
@@ -814,7 +813,7 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     nj = dims[0]
     gridshape = (ni, nj)
     boundsshape = (ni, nj, ncorners)
-    if hasattr(cornerLat, 'units') and string.lower(cornerLat.units)[0:6]=='radian':
+    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6]=='radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
     else:
@@ -835,7 +834,7 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     if vardict.has_key(gridCenterLatName):
         centerLat = fileobj(gridCenterLatName).reshape(gridshape)
         gclat = fileobj[gridCenterLatName]
-        if hasattr(gclat, "units") and string.lower(gclat.units)=='radians':
+        if hasattr(gclat, "units") and gclat.units.lower()=='radians':
             centerLat *= (180.0/numpy.pi)
     else:
         centerLat = cornerLat[:,:,0]
@@ -843,14 +842,14 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     if vardict.has_key(gridCenterLonName):
         centerLon = fileobj(gridCenterLonName).reshape(gridshape)
         gclon = fileobj[gridCenterLonName]
-        if hasattr(gclon, "units") and string.lower(gclon.units)=='radians':
+        if hasattr(gclon, "units") and gclon.units.lower()=='radians':
             centerLon *= (180.0/numpy.pi)
     else:
         centerLon = cornerLon[:,:,0]
 
     if hasattr(fileobj,titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = string.replace(string.strip(gridid), ' ','_')
+        gridid = gridid.strip().replace(' ','_')
     else:
         gridid="<None>"
 
diff --git a/Packages/cdms2/Lib/variable.py b/Packages/cdms2/Lib/variable.py
index 396dbdf06..23650e3d8 100644
--- a/Packages/cdms2/Lib/variable.py
+++ b/Packages/cdms2/Lib/variable.py
@@ -9,7 +9,6 @@ import cdmsNode
 import cdtime
 import copy
 import os
-import string
 import sys
 import cdmsobj
 from cdmsobj import CdmsObj, getPathFromTemplate, Max32int
@@ -139,7 +138,7 @@ class DatasetVariable(AbstractVariable):
                     raise CDMSError, InvalidGridElement + dename
             partlenstr = denode.getExternalAttr('partition_length')
             if partlenstr is not None:
-                truelen = string.atoi(partlenstr)
+                truelen = int(partlenstr)
             else:
                 truelen = denode.length
             self.domain.append((domelem, denode.start, denode.length, truelen))
@@ -224,7 +223,7 @@ class DatasetVariable(AbstractVariable):
             else:                       # Use template method
                 time0 = axis[interval[0]]
                 time1 = axis[interval[1]-1]
-                isabs = (string.find(axis.units," as ")!=-1)
+                isabs = (axis.units.find(" as ")!=-1)
                 if isabs:
                     start = cdtime.abstime(time0,axis.units)
                     end = cdtime.abstime(time1,axis.units)
-- 
GitLab


From 0bc3f4d47b9c21175d4cfa651640f92a4c6215d2 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Thu, 21 Apr 2016 11:52:28 -0400
Subject: [PATCH 032/196] ENH #1854: Tests will check for alternate baselines
 using basename_[1-9].ext

Previously the pattern used was baseline.*\.ext
---
 testing/checkimage.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/testing/checkimage.py b/testing/checkimage.py
index e0ad9db82..9871f3557 100644
--- a/testing/checkimage.py
+++ b/testing/checkimage.py
@@ -9,6 +9,7 @@ import numpy
 import vtk
 import os
 import os.path
+import re
 import sys
 import logging
 
@@ -40,13 +41,15 @@ def image_from_file(fname):
         print "Problem opening file '%s': %s"%(fname,err)
         return None
 
+# find alternate baselines for fname of the form basename_d.ext
+# where fname = basename.ext and d is a digit between 1 and 9
 def find_alternates(fname):
     dirname = os.path.dirname(fname)
     prefix, ext = os.path.splitext(os.path.split(fname)[1])
     files = os.listdir(dirname)
     results = [fname]
     for i in files:
-        if i.startswith(prefix) and i.endswith(ext) and i != prefix+ext:
+        if (re.match(prefix + '_[1-9]' + ext, i)):
             results.append(os.path.join(dirname, i))
     return results
 
-- 
GitLab


From 2bc8a27233063bbca4c0c5ec23d8e3a4894cfd19 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 14:34:37 -0700
Subject: [PATCH 033/196] moved cdscan to lib dir so that we can import it

---
 Packages/cdms2/{Script/cdscan => Lib/cdscan.py} | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 rename Packages/cdms2/{Script/cdscan => Lib/cdscan.py} (100%)

diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Lib/cdscan.py
similarity index 100%
rename from Packages/cdms2/Script/cdscan
rename to Packages/cdms2/Lib/cdscan.py
-- 
GitLab


From 50a55c122e0509915a91a7b525359dffc9475ba5 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 26 Apr 2016 14:37:29 -0700
Subject: [PATCH 034/196] symlinked cdscan so that script is still installed

---
 Packages/cdms2/Script/cdscan | 1 +
 1 file changed, 1 insertion(+)
 create mode 120000 Packages/cdms2/Script/cdscan

diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
new file mode 120000
index 000000000..11d1e0bf9
--- /dev/null
+++ b/Packages/cdms2/Script/cdscan
@@ -0,0 +1 @@
+../Lib/cdscan.py
\ No newline at end of file
-- 
GitLab


From d5a9a226ca5fafb65e95fa793cf8144a349adbe3 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 27 Apr 2016 08:46:49 -0700
Subject: [PATCH 035/196] Address @danlipsa comments

---
 Packages/cdms2/Lib/avariable.py | 4 ++--
 Packages/cdms2/Lib/cdmsobj.py   | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index bc20b8be5..db174105e 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -1232,7 +1232,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
                 slicelist.append (slice(0, len(self.getAxis(i))))
             elif key is Ellipsis:
                 raise CDMSError, "Misuse of ellipsis in specification."
-            elif sinstance(key, tuple):
+            elif isinstance(key, tuple):
                 slicelist.append(slice(*key))
             else:
                 raise CDMSError, 'invalid index: %s'% str(key)
@@ -1514,7 +1514,7 @@ def orderparse (order):
         elif r == '...':
             r = Ellipsis
         elif len(r) == 1:
-            if r in '0123456789':
+            if r.isdigit():
                 r = int(r)
         result.append(r)
         pos = m.end(0)
diff --git a/Packages/cdms2/Lib/cdmsobj.py b/Packages/cdms2/Lib/cdmsobj.py
index b66cf7307..8fad6b3ab 100644
--- a/Packages/cdms2/Lib/cdmsobj.py
+++ b/Packages/cdms2/Lib/cdmsobj.py
@@ -546,12 +546,12 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if isinstance(attval,basestring) and pattern.search(attval) is not None:
+                if isinstance(attval,basestring) and pattern.match(attval) is not None:
                     return 1
             return 0
         elif self.attributes.has_key(attname):
             attval = self.attributes[attname]
-            return isinstance(attval,basestring) and pattern.search(attval) is not None
+            return isinstance(attval,basestring) and pattern.match(attval) is not None
         else:
             return 0
 
-- 
GitLab


From 6fc54c9a9537d0e4f47ab84a04bdad2c2ea486fa Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 27 Apr 2016 09:08:08 -0700
Subject: [PATCH 036/196] autopep8ed

---
 Packages/cdms2/Lib/CDML.py             |  425 +++---
 Packages/cdms2/Lib/CDMLParser.py       |  180 ++-
 Packages/cdms2/Lib/MV2.py              |  524 ++++---
 Packages/cdms2/Lib/__init__.py         |   62 +-
 Packages/cdms2/Lib/auxcoord.py         |   80 +-
 Packages/cdms2/Lib/avariable.py        |  962 +++++++------
 Packages/cdms2/Lib/axis.py             | 1827 +++++++++++++-----------
 Packages/cdms2/Lib/bindex.py           |   41 +-
 Packages/cdms2/Lib/cache.py            |  167 ++-
 Packages/cdms2/Lib/cdmsNode.py         |  763 +++++-----
 Packages/cdms2/Lib/cdmsURLopener.py    |   17 +-
 Packages/cdms2/Lib/cdmsobj.py          |  438 +++---
 Packages/cdms2/Lib/cdurllib.py         |   72 +-
 Packages/cdms2/Lib/cdurlparse.py       |  339 ++---
 Packages/cdms2/Lib/cdxmllib.py         |  249 ++--
 Packages/cdms2/Lib/convention.py       |   62 +-
 Packages/cdms2/Lib/coord.py            |  201 +--
 Packages/cdms2/Lib/cudsinterface.py    |  182 +--
 Packages/cdms2/Lib/database.py         |  220 +--
 Packages/cdms2/Lib/dataset.py          | 1245 +++++++++-------
 Packages/cdms2/Lib/error.py            |    4 +-
 Packages/cdms2/Lib/forecast.py         |  226 +--
 Packages/cdms2/Lib/fvariable.py        |  137 +-
 Packages/cdms2/Lib/gengrid.py          |  106 +-
 Packages/cdms2/Lib/grid.py             |  385 ++---
 Packages/cdms2/Lib/gsHost.py           |  212 +--
 Packages/cdms2/Lib/gsMosaic.py         |  251 ++--
 Packages/cdms2/Lib/gsStaticVariable.py |  107 +-
 Packages/cdms2/Lib/gsTimeVariable.py   |  160 ++-
 Packages/cdms2/Lib/gui.py              |   31 +-
 Packages/cdms2/Lib/hgrid.py            |  247 ++--
 Packages/cdms2/Lib/mvBaseWriter.py     |   19 +-
 Packages/cdms2/Lib/mvCdmsRegrid.py     |   26 +-
 Packages/cdms2/Lib/mvSphereMesh.py     |  170 ++-
 Packages/cdms2/Lib/mvVTKSGWriter.py    |   29 +-
 Packages/cdms2/Lib/mvVTKUGWriter.py    |   47 +-
 Packages/cdms2/Lib/mvVsWriter.py       |   36 +-
 Packages/cdms2/Lib/restApi.py          |  965 +++++++------
 Packages/cdms2/Lib/selectors.py        |  286 ++--
 Packages/cdms2/Lib/slabinterface.py    |  130 +-
 Packages/cdms2/Lib/sliceut.py          |  214 +--
 Packages/cdms2/Lib/tvariable.py        |  609 ++++----
 Packages/cdms2/Lib/typeconv.py         |   30 +-
 Packages/cdms2/Lib/variable.py         |  345 ++---
 Packages/cdms2/Lib/xlink.py            |   10 +-
 45 files changed, 7050 insertions(+), 5788 deletions(-)

diff --git a/Packages/cdms2/Lib/CDML.py b/Packages/cdms2/Lib/CDML.py
index 7a6c013f3..963f70ac6 100644
--- a/Packages/cdms2/Lib/CDML.py
+++ b/Packages/cdms2/Lib/CDML.py
@@ -37,6 +37,7 @@ CdArray = "Array"
 # Note: at some point, this should be created dynamically
 # from the XML DTD file. For now, it is built statically.
 
+
 class CDML:
 
     cache = {}
@@ -47,246 +48,254 @@ class CDML:
         self.extra = self.extraCache.get(uri)
         if not self.dtd:
             self.dtd = self.buildDTD(uri)
-            self.cache[uri]=self.dtd
+            self.cache[uri] = self.dtd
             self.extra = self.buildExtra(uri)
-            self.extraCache[uri]=self.extra
+            self.extraCache[uri] = self.extra
 
-    def buildDTD(self,uri):
+    def buildDTD(self, uri):
         dtd = {}
         dtd['attr'] = {
-            'name': (Cdata,Required),
-            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
-            }
+            'name': (Cdata, Required),
+            'datatype':
+                (("Char", "Byte", "Short", "Int", "Long",
+                 "Int64", "Float", "Double", "String"), Required),
+        }
         dtd['axis'] = {
-            'id': (Id,Required),
-            'associate': (Idref,Implied),
-            'axis': (("X","Y","Z","T"),Implied),
-            'bounds': (Idref,Implied),
-            'calendar': (Cdata,Implied),
-            'comment': (Cdata,Implied),
-            'component': (Cdata,Implied),
-            'compress': (Cdata,Implied),
-            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
-            'expand': (Idref,Implied),
-            'interval': (Cdata,Implied),
-            'isvar': ( ("true","false"),"true"),
-            'length': (Cdata,Required),
-            'long_name': (Cdata,Implied),
-            'modulo': (Cdata,Implied),
-            'name_in_file': (Cdata,Implied),
-            'partition': (Cdata,Implied),
-            'partition_length': (Cdata,Implied),
-            'positive': (("up","down"),Implied),
-            'spacing': (("uniform","variable","disjoint"),Implied),
-            'topology': (("linear","circular"),Implied),
-            'weights': (Idref,Implied),
-            'units': (Cdata,Required),
-            }
+            'id': (Id, Required),
+            'associate': (Idref, Implied),
+            'axis': (("X", "Y", "Z", "T"), Implied),
+            'bounds': (Idref, Implied),
+            'calendar': (Cdata, Implied),
+            'comment': (Cdata, Implied),
+            'component': (Cdata, Implied),
+            'compress': (Cdata, Implied),
+            'datatype':
+                (("Char", "Byte", "Short", "Int", "Long",
+                 "Int64", "Float", "Double", "String"), Required),
+            'expand': (Idref, Implied),
+            'interval': (Cdata, Implied),
+            'isvar': (("true", "false"), "true"),
+            'length': (Cdata, Required),
+            'long_name': (Cdata, Implied),
+            'modulo': (Cdata, Implied),
+            'name_in_file': (Cdata, Implied),
+            'partition': (Cdata, Implied),
+            'partition_length': (Cdata, Implied),
+            'positive': (("up", "down"), Implied),
+            'spacing': (("uniform", "variable", "disjoint"), Implied),
+            'topology': (("linear", "circular"), Implied),
+            'weights': (Idref, Implied),
+            'units': (Cdata, Required),
+        }
         dtd['component'] = {
-            'name':(Idref,Required),
-            }
+            'name': (Idref, Required),
+        }
         dtd['dataset'] = {
-            'id': (Id,Required),
-            'Conventions': (Cdata,Required),
-            'appendices': (Cdata,Implied),
-            'calendar': (Cdata,Implied),
-            'cdms_filemap': (Cdata,Implied),
-            'comment': (Cdata,Implied),
-            'directory': (Cdata,Implied),
-            'frequency': (Cdata,Implied),
-            'history': (Cdata,Implied),
-            'institution': (Cdata,Implied),
-            'production': (Cdata,Implied),
-            'project': (Cdata,Implied),
-            'template': (Cdata,Implied),
-            }
+            'id': (Id, Required),
+            'Conventions': (Cdata, Required),
+            'appendices': (Cdata, Implied),
+            'calendar': (Cdata, Implied),
+            'cdms_filemap': (Cdata, Implied),
+            'comment': (Cdata, Implied),
+            'directory': (Cdata, Implied),
+            'frequency': (Cdata, Implied),
+            'history': (Cdata, Implied),
+            'institution': (Cdata, Implied),
+            'production': (Cdata, Implied),
+            'project': (Cdata, Implied),
+            'template': (Cdata, Implied),
+        }
         dtd['doclink'] = {
-            'id': (Id,Implied),
-            'xml:link': (Cdata,(Fixed,"simple")),
-            'href': (Cdata,Required),
-            'role':	(Cdata,Implied),
-            'title': (Cdata,Implied),
-            'show': (("embed","replace","new"),"replace"),
-            'actuate': (("auto","user"),Implied),
-            'behavior':(Cdata,Implied),
-            'content-role': (Cdata,Implied),
-            'content-title': (Cdata,Implied),
-            'inline':(("true","false"),"true"),
-            }
+            'id': (Id, Implied),
+            'xml:link': (Cdata, (Fixed, "simple")),
+            'href': (Cdata, Required),
+            'role': (Cdata, Implied),
+            'title': (Cdata, Implied),
+            'show': (("embed", "replace", "new"), "replace"),
+            'actuate': (("auto", "user"), Implied),
+            'behavior': (Cdata, Implied),
+            'content-role': (Cdata, Implied),
+            'content-title': (Cdata, Implied),
+            'inline': (("true", "false"), "true"),
+        }
         dtd['domain'] = {}
         dtd['domElem'] = {
-            'name':(Idref,Required),
-            'length':(Cdata,Implied),
-            'partition_length':(Cdata,Implied),
-            'start':(Cdata,Implied),
-            }
+            'name': (Idref, Required),
+            'length': (Cdata, Implied),
+            'partition_length': (Cdata, Implied),
+            'start': (Cdata, Implied),
+        }
         dtd['rectGrid'] = {
-            'id': (Id,Required),
-            'type':(("gaussian","uniform","equalarea","unknown"),"unknown"),
-            'latitude':(Idref,Required),
-            'longitude':(Idref,Required),
-            'mask':(Idref,Implied),
-            'order':(("xy","yx"),"yx"),
-            }
+            'id': (Id, Required),
+            'type':
+                (("gaussian", "uniform", "equalarea", "unknown"), "unknown"),
+            'latitude': (Idref, Required),
+            'longitude': (Idref, Required),
+            'mask': (Idref, Implied),
+            'order': (("xy", "yx"), "yx"),
+        }
         dtd['linear'] = {
-            'delta': (Cdata,Required),
-            'length': (Cdata,Required),
-            'start': (Cdata,Required),
-            }
+            'delta': (Cdata, Required),
+            'length': (Cdata, Required),
+            'start': (Cdata, Required),
+        }
         dtd['variable'] = {
-            'id': (Id,Required),
-            'add_offset': (Cdata,Implied),
-            'associate': (Cdata,Implied),
-            'axis': (Cdata,Implied),
-            'comments': (Cdata,Implied),
-            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
-            'grid_name': (Cdata,Implied),
-            'grid_type': (Cdata,Implied),
-            'long_name': (Cdata,Implied),
+            'id': (Id, Required),
+            'add_offset': (Cdata, Implied),
+            'associate': (Cdata, Implied),
+            'axis': (Cdata, Implied),
+            'comments': (Cdata, Implied),
+            'datatype':
+                (("Char", "Byte", "Short", "Int", "Long",
+                 "Int64", "Float", "Double", "String"), Required),
+            'grid_name': (Cdata, Implied),
+            'grid_type': (Cdata, Implied),
+            'long_name': (Cdata, Implied),
             'missing_value': (Cdata, Implied),
-            'name_in_file': (Cdata,Implied),
-            'scale_factor': (Cdata,Implied),
-            'subgrid': (Cdata,Implied),
-            'template': (Cdata,Implied),
-            'units': (Cdata,Implied),
-            'valid_max': (Cdata,Implied),
-            'valid_min': (Cdata,Implied),
-            'valid_range': (Cdata,Implied),
-            }
+            'name_in_file': (Cdata, Implied),
+            'scale_factor': (Cdata, Implied),
+            'subgrid': (Cdata, Implied),
+            'template': (Cdata, Implied),
+            'units': (Cdata, Implied),
+            'valid_max': (Cdata, Implied),
+            'valid_min': (Cdata, Implied),
+            'valid_range': (Cdata, Implied),
+        }
         dtd['xlink'] = {
-            'id': (Id,Implied),
-            'xml:link': (Cdata,(Fixed,"simple")),
-            'href': (Cdata,Required),
-            'role':	(Cdata,Implied),
-            'title': (Cdata,Implied),
-            'show': (("embed","replace","new"),"embed"),
-            'actuate': (("auto","user"),Implied),
-            'behavior':(Cdata,Implied),
-            'content-role': (("dataset","axis","grid","variable","object"),"object"),
-            'content-title': (Cdata,Implied),
-            'inline':(("true","false"),"true"),
-            }
+            'id': (Id, Implied),
+            'xml:link': (Cdata, (Fixed, "simple")),
+            'href': (Cdata, Required),
+            'role': (Cdata, Implied),
+            'title': (Cdata, Implied),
+            'show': (("embed", "replace", "new"), "embed"),
+            'actuate': (("auto", "user"), Implied),
+            'behavior': (Cdata, Implied),
+            'content-role':
+                (("dataset", "axis", "grid", "variable", "object"), "object"),
+            'content-title': (Cdata, Implied),
+            'inline': (("true", "false"), "true"),
+        }
         return dtd
 
     # Extra datatype information not included in the formal DTD.
-    def buildExtra(self,uri):
+    def buildExtra(self, uri):
         extra = {}
         extra['attr'] = {
-            'name': (CdScalar,CdString),
-            'datatype': (CdScalar,CdString),
-            }
+            'name': (CdScalar, CdString),
+            'datatype': (CdScalar, CdString),
+        }
         extra['axis'] = {
-            'id': (CdScalar,CdString),
-            'associate': (CdScalar,CdString),
-            'axis': (CdScalar,CdString),
-            'bounds': (CdArray,CdFromObject),
-            'calendar': (CdScalar,CdString),
-            'comment': (CdScalar,CdString),
-            'component': (CdScalar,CdString),
-            'compress': (CdScalar,CdString),
-            'datatype': (CdScalar,CdString),
-            'expand': (CdScalar,CdString),
-            'interval': (CdScalar,CdFromObject),
-            'isvar': (CdScalar,CdString),
-            'length': (CdScalar,CdInt),
-            'long_name': (CdScalar,CdString),
-            'modulo': (CdScalar,CdAny),
-            'name_in_file': (CdScalar,CdString),
-            'partition': (CdArray,CdInt),
-            'partition_length': (CdScalar,CdInt),
-            'positive': (CdScalar,CdString),
-            'spacing': (CdScalar,CdFromObject),
-            'topology': (CdScalar,CdString),
-            'weights': (CdArray,CdDouble),
-            'units': (CdScalar,CdString),
-            }
+            'id': (CdScalar, CdString),
+            'associate': (CdScalar, CdString),
+            'axis': (CdScalar, CdString),
+            'bounds': (CdArray, CdFromObject),
+            'calendar': (CdScalar, CdString),
+            'comment': (CdScalar, CdString),
+            'component': (CdScalar, CdString),
+            'compress': (CdScalar, CdString),
+            'datatype': (CdScalar, CdString),
+            'expand': (CdScalar, CdString),
+            'interval': (CdScalar, CdFromObject),
+            'isvar': (CdScalar, CdString),
+            'length': (CdScalar, CdInt),
+            'long_name': (CdScalar, CdString),
+            'modulo': (CdScalar, CdAny),
+            'name_in_file': (CdScalar, CdString),
+            'partition': (CdArray, CdInt),
+            'partition_length': (CdScalar, CdInt),
+            'positive': (CdScalar, CdString),
+            'spacing': (CdScalar, CdFromObject),
+            'topology': (CdScalar, CdString),
+            'weights': (CdArray, CdDouble),
+            'units': (CdScalar, CdString),
+        }
         extra['component'] = {
-            'name': (CdScalar,CdString),
-            }
+            'name': (CdScalar, CdString),
+        }
         extra['dataset'] = {
-            'id': (CdScalar,CdString),
-            'Conventions': (CdScalar,CdString),
-            'appendices': (CdScalar,CdString),
-            'calendar': (CdScalar,CdString),
-            'cdms_filemap': (CdScalar,CdString),
-            'comment': (CdScalar,CdString),
-            'directory': (CdScalar,CdString),
-            'frequency': (CdScalar,CdString),
-            'history': (CdScalar,CdString),
-            'institution': (CdScalar,CdString),
-            'production': (CdScalar,CdString),
-            'project': (CdScalar,CdString),
-            'template': (CdScalar,CdString),
-            }
+            'id': (CdScalar, CdString),
+            'Conventions': (CdScalar, CdString),
+            'appendices': (CdScalar, CdString),
+            'calendar': (CdScalar, CdString),
+            'cdms_filemap': (CdScalar, CdString),
+            'comment': (CdScalar, CdString),
+            'directory': (CdScalar, CdString),
+            'frequency': (CdScalar, CdString),
+            'history': (CdScalar, CdString),
+            'institution': (CdScalar, CdString),
+            'production': (CdScalar, CdString),
+            'project': (CdScalar, CdString),
+            'template': (CdScalar, CdString),
+        }
         extra['doclink'] = {
-            'id': (CdScalar,CdString),
-            'xml:link': (CdScalar,CdString),
-            'href': (CdScalar,CdString),
-            'role': (CdScalar,CdString),
-            'title': (CdScalar,CdString),
-            'show': (CdScalar,CdString),
-            'actuate': (CdScalar,CdString),
-            'behavior': (CdScalar,CdString),
-            'content-role': (CdScalar,CdString),
-            'content-title': (CdScalar,CdString),
-            'inline': (CdScalar,CdString),
-            }
+            'id': (CdScalar, CdString),
+            'xml:link': (CdScalar, CdString),
+            'href': (CdScalar, CdString),
+            'role': (CdScalar, CdString),
+            'title': (CdScalar, CdString),
+            'show': (CdScalar, CdString),
+            'actuate': (CdScalar, CdString),
+            'behavior': (CdScalar, CdString),
+            'content-role': (CdScalar, CdString),
+            'content-title': (CdScalar, CdString),
+            'inline': (CdScalar, CdString),
+        }
         extra['domain'] = {}
         extra['domElem'] = {
-            'name': (CdScalar,CdString),
-            'length': (CdScalar,CdInt),
-            'partition_length': (CdScalar,CdInt),
-            'start': (CdScalar,CdInt),
-            }
+            'name': (CdScalar, CdString),
+            'length': (CdScalar, CdInt),
+            'partition_length': (CdScalar, CdInt),
+            'start': (CdScalar, CdInt),
+        }
         extra['rectGrid'] = {
-            'id': (CdScalar,CdString),
-            'type': (CdScalar,CdString),
-            'latitude': (CdScalar,CdString),
-            'longitude': (CdScalar,CdString),
-            'mask': (CdScalar,CdString),
-            'order': (CdScalar,CdString),
-            }
+            'id': (CdScalar, CdString),
+            'type': (CdScalar, CdString),
+            'latitude': (CdScalar, CdString),
+            'longitude': (CdScalar, CdString),
+            'mask': (CdScalar, CdString),
+            'order': (CdScalar, CdString),
+        }
         extra['linear'] = {
-            'delta': (CdScalar,CdFromObject),
-            'length': (CdScalar,CdInt),
-            'start': (CdScalar,CdInt),
-            }
+            'delta': (CdScalar, CdFromObject),
+            'length': (CdScalar, CdInt),
+            'start': (CdScalar, CdInt),
+        }
         extra['variable'] = {
-            'id': (CdScalar,CdString),
-            'add_offset': (CdScalar,CdDouble),
-            'associate': (CdScalar,CdString),
-            'axis': (CdScalar,CdString),
-            'comments': (CdScalar,CdString),
-            'datatype': (CdScalar,CdString),
-            'grid_name': (CdScalar,CdString),
-            'grid_type': (CdScalar,CdString),
-            'long_name': (CdScalar,CdString),
-            'missing_value': (CdScalar,CdFromObject),
-            'name_in_file': (CdScalar,CdString),
-            'scale_factor': (CdScalar,CdDouble),
-            'subgrid': (CdScalar,CdString),
-            'template': (CdScalar,CdString),
-            'units': (CdScalar,CdString),
-            'valid_max': (CdScalar,CdFromObject),
-            'valid_min': (CdScalar,CdFromObject),
-            'valid_range': (CdArray,CdFromObject),
-            }
+            'id': (CdScalar, CdString),
+            'add_offset': (CdScalar, CdDouble),
+            'associate': (CdScalar, CdString),
+            'axis': (CdScalar, CdString),
+            'comments': (CdScalar, CdString),
+            'datatype': (CdScalar, CdString),
+            'grid_name': (CdScalar, CdString),
+            'grid_type': (CdScalar, CdString),
+            'long_name': (CdScalar, CdString),
+            'missing_value': (CdScalar, CdFromObject),
+            'name_in_file': (CdScalar, CdString),
+            'scale_factor': (CdScalar, CdDouble),
+            'subgrid': (CdScalar, CdString),
+            'template': (CdScalar, CdString),
+            'units': (CdScalar, CdString),
+            'valid_max': (CdScalar, CdFromObject),
+            'valid_min': (CdScalar, CdFromObject),
+            'valid_range': (CdArray, CdFromObject),
+        }
         extra['xlink'] = {
-            'id': (CdScalar,CdString),
-            'xml:link': (CdScalar,CdString),
-            'href': (CdScalar,CdString),
-            'role': (CdScalar,CdString),
-            'title': (CdScalar,CdString),
-            'show': (CdScalar,CdString),
-            'actuate': (CdScalar,CdString),
-            'behavior': (CdScalar,CdString),
-            'content-role': (CdScalar,CdString),
-            'content-title': (CdScalar,CdString),
-            'inline': (CdScalar,CdString),
-            }
+            'id': (CdScalar, CdString),
+            'xml:link': (CdScalar, CdString),
+            'href': (CdScalar, CdString),
+            'role': (CdScalar, CdString),
+            'title': (CdScalar, CdString),
+            'show': (CdScalar, CdString),
+            'actuate': (CdScalar, CdString),
+            'behavior': (CdScalar, CdString),
+            'content-role': (CdScalar, CdString),
+            'content-title': (CdScalar, CdString),
+            'inline': (CdScalar, CdString),
+        }
         return extra
 
-if __name__=='__main__':
+if __name__ == '__main__':
     cdml = CDML()
     print cdml.extra
     cdml2 = CDML()
diff --git a/Packages/cdms2/Lib/CDMLParser.py b/Packages/cdms2/Lib/CDMLParser.py
index e741e44d2..6a88eda9c 100644
--- a/Packages/cdms2/Lib/CDMLParser.py
+++ b/Packages/cdms2/Lib/CDMLParser.py
@@ -2,10 +2,10 @@
 Parse a CDML/XML file
 """
 
-from cdxmllib import XMLParser
-import CDML
+from .cdxmllib import XMLParser
+from . import CDML
 import re
-import cdmsNode
+from . import cdmsNode
 
 # Error constants
 InvalidAttribute = "Invalid attribute"
@@ -13,21 +13,23 @@ InvalidAttribute = "Invalid attribute"
 # Regular expressions
 _S = re.compile('[ \t\r\n]+$')
 _opS = '[ \t\r\n]*'
-_Integer = re.compile(_opS+'[0-9]+$'+_opS)
+_Integer = re.compile(_opS + '[0-9]+$' + _opS)
+
 
 class CDMLParser(XMLParser):
 
     def __init__(self, verbose=0):
-	XMLParser.__init__(self)
+        XMLParser.__init__(self)
         self.root = None
         self.currentPath = []         # Current path, a stack
         self.dtd = CDML.CDML().dtd
         self.verbose = verbose
 
     # Push current node on the stack
-    def pushCurrentNode(self,node):
+    def pushCurrentNode(self, node):
         self.currentPath.append(node)
-        if not self.root: self.root = node
+        if not self.root:
+            self.root = node
 
     # Pop the current node off the stack
     def popCurrentNode(self):
@@ -49,62 +51,67 @@ class CDMLParser(XMLParser):
     def handle_data(self, data):
         matchObj = _S.match(data)
         if not matchObj:
-            if self.verbose: print 'data:',data
+            if self.verbose:
+                print 'data:', data
             if self.root:
                 self.getCurrentNode().setContentFromString(data.strip())
 
     def handle_cdata(self, data):
-        if self.verbose: print 'cdata:', `data`
+        if self.verbose:
+            print 'cdata:', repr(data)
 
     def handle_proc(self, name, data):
-        if self.verbose: print 'processing:',name,`data`
+        if self.verbose:
+            print 'processing:', name, repr(data)
 
     def handle_special(self, data):
-        if self.verbose: print 'special:',`data`
+        if self.verbose:
+            print 'special:', repr(data)
 
     def handle_starttag(self, tag, method, attrs):
 
-        if self.dtd.has_key(tag):
+        if tag in self.dtd:
 
             # Check that attributes are valid
-	    validDict = self.dtd[tag]
+            validDict = self.dtd[tag]
             validAttrs = validDict.keys()
             attrnames = attrs.keys()
             for attrname in attrnames:
                 if attrname not in validAttrs:
                     self.cdml_syntax_error(self.lineno,
-                                      'unknown attribute %s of element %s' %
-                                      (attrname, tag))
+                                           'unknown attribute %s of element %s' %
+                                           (attrname, tag))
                 else:
-                    (atttype,attdefault)=validDict[attrname]
-                    if type(atttype)==type((0,)):
+                    (atttype, attdefault) = validDict[attrname]
+                    if isinstance(atttype, tuple):
                         attrval = attrs[attrname]
                         if attrval not in atttype:
                             self.cdml_syntax_error(self.lineno,
-                                              'invalid attribute value %s=%s of element %s, must be one of %s' %
-                                              (attrname,attrval,tag,atttype))
+                                                   'invalid attribute value %s=%s of element %s, must be one of %s' %
+                                                   (attrname, attrval, tag, atttype))
 
             # Check that required attributes are present,
             # and add default values
             for attrname in validAttrs:
-                (atttype,attdefault)=validDict[attrname]
-                if attdefault==CDML.Required and attrname not in attrnames:
+                (atttype, attdefault) = validDict[attrname]
+                if attdefault == CDML.Required and attrname not in attrnames:
                     self.cdml_syntax_error(self.lineno,
-                                      'element %s requires an attribute %s' %
-                                      (tag,attrname))
-                if type(attdefault)==type("") and attrname not in attrnames:
-                    attrs[attrname]=attdefault
-            
-	method(attrs)
+                                           'element %s requires an attribute %s' %
+                                           (tag, attrname))
+                if isinstance(attdefault, basestring) and attrname not in attrnames:
+                    attrs[attrname] = attdefault
+
+        method(attrs)
 
     #------------------------------------------------------------------------
     # CDML tags
 
-    def start_attr(self,attrs):
-        if self.verbose: print 'attr:',attrs
+    def start_attr(self, attrs):
+        if self.verbose:
+            print 'attr:', attrs
         name = attrs['name']
         datatype = attrs['datatype']
-        attr = cdmsNode.AttrNode(name,None)
+        attr = cdmsNode.AttrNode(name, None)
         attr.datatype = datatype
         self.pushCurrentNode(attr)
 
@@ -112,33 +119,35 @@ class CDMLParser(XMLParser):
     def end_attr(self):
         attr = self.popCurrentNode()
         var = self.getCurrentNode()
-        attr.setValueFromString(attr.getContent(),attr.datatype)
+        attr.setValueFromString(attr.getContent(), attr.datatype)
         var.setExternalAttrFromAttr(attr)
 
     #------------------------------------------------------------------------
 
-    def start_axis(self,attrs):
-        if self.verbose: print 'axis:',attrs
+    def start_axis(self, attrs):
+        if self.verbose:
+            print 'axis:', attrs
         id = attrs['id']
         length_s = attrs['length']
         datatype = attrs.get('datatype')
         if _Integer.match(length_s) is None:
-            raise InvalidAttribute, 'length='+length_s
+            raise InvalidAttribute('length=' + length_s)
         length = int(length_s)
-        axis = cdmsNode.AxisNode(id,length,datatype)
+        axis = cdmsNode.AxisNode(id, length, datatype)
         partstring = attrs.get('partition')
         if partstring is not None:
             axis.setPartitionFromString(partstring)
         axis.setExternalDict(attrs)
-        self.getCurrentNode().addId(id,axis)
+        self.getCurrentNode().addId(id, axis)
         self.pushCurrentNode(axis)
-                
+
     def end_axis(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
     def start_cdml(self, attrs):
-        if self.verbose: print 'cdml:',attrs
+        if self.verbose:
+            print 'cdml:', attrs
 
     def end_cdml(self):
         pass
@@ -146,36 +155,40 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_component(self, attrs):
-        if self.verbose: print 'component:',attrs
+        if self.verbose:
+            print 'component:', attrs
 
     def end_component(self):
         pass
 
     #------------------------------------------------------------------------
     def start_compoundAxis(self, attrs):
-        if self.verbose: print 'compoundAxis:',attrs
+        if self.verbose:
+            print 'compoundAxis:', attrs
 
     def end_compoundAxis(self):
         pass
 
     #------------------------------------------------------------------------
     def start_data(self, attrs):
-        if self.verbose: print 'data:',attrs
+        if self.verbose:
+            print 'data:', attrs
 
     def end_data(self):
         pass
 
     #------------------------------------------------------------------------
 
-    def start_dataset(self,attrs):
-        if self.verbose: print 'dataset:',attrs
+    def start_dataset(self, attrs):
+        if self.verbose:
+            print 'dataset:', attrs
         id = attrs['id']
         dataset = cdmsNode.DatasetNode(id)
         dataset.setExternalDict(attrs)
         if self.root:
-            self.getCurrentNode().addId(id,dataset)
+            self.getCurrentNode().addId(id, dataset)
         self.pushCurrentNode(dataset)
-                
+
     def end_dataset(self):
         dataset = self.popCurrentNode()
         dataset.validate()
@@ -183,7 +196,8 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_doclink(self, attrs):
-        if self.verbose: print 'docLink:',attrs
+        if self.verbose:
+            print 'docLink:', attrs
         uri = attrs['href']
         doclink = cdmsNode.DocLinkNode(uri)
         doclink.setExternalDict(attrs)
@@ -196,7 +210,8 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_domElem(self, attrs):
-        if self.verbose: print 'domElem:',attrs
+        if self.verbose:
+            print 'domElem:', attrs
         name = attrs['name']
         start_s = attrs.get('start')
         length_s = attrs.get('length')
@@ -208,7 +223,7 @@ class CDMLParser(XMLParser):
             length = int(length_s)
         else:
             length = None
-        domElem = cdmsNode.DomElemNode(name,start,length)
+        domElem = cdmsNode.DomElemNode(name, start, length)
         domElem.setExternalDict(attrs)
         self.getCurrentNode().add(domElem)
 
@@ -217,7 +232,8 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
     def start_domain(self, attrs):
-        if self.verbose: print 'domain:',attrs
+        if self.verbose:
+            print 'domain:', attrs
         domain = cdmsNode.DomainNode()
         self.getCurrentNode().setDomain(domain)
         self.pushCurrentNode(domain)
@@ -227,40 +243,42 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
 
-    def start_rectGrid(self,attrs):
-        if self.verbose: print 'rectGrid:',attrs
+    def start_rectGrid(self, attrs):
+        if self.verbose:
+            print 'rectGrid:', attrs
         id = attrs['id']
         gridtype = attrs['type']
         latitude = attrs['latitude']
         longitude = attrs['longitude']
-        grid = cdmsNode.RectGridNode(id,latitude,longitude,gridtype)
+        grid = cdmsNode.RectGridNode(id, latitude, longitude, gridtype)
         grid.setExternalDict(attrs)
-        self.getCurrentNode().addId(id,grid)
+        self.getCurrentNode().addId(id, grid)
         self.pushCurrentNode(grid)
-                
+
     def end_rectGrid(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
 
     def start_linear(self, attrs):
-        if self.verbose: print 'linear:',attrs
+        if self.verbose:
+            print 'linear:', attrs
         start_s = attrs['start']
         delta_s = attrs['delta']
         length_s = attrs['length']
         try:
-            start=float(start_s)
+            start = float(start_s)
         except ValueError:
-            raise InvalidAttribute, 'start='+start_s
+            raise InvalidAttribute('start=' + start_s)
         try:
-            delta=float(delta_s)
+            delta = float(delta_s)
         except ValueError:
-            raise InvalidAttribute, 'delta='+delta_s
+            raise InvalidAttribute('delta=' + delta_s)
         try:
-            length=int(length_s)
+            length = int(length_s)
         except ValueError:
-            raise InvalidAttribute, 'length='+length_s
-        linear = cdmsNode.LinearDataNode(start,delta,length)
+            raise InvalidAttribute('length=' + length_s)
+        linear = cdmsNode.LinearDataNode(start, delta, length)
         self.getCurrentNode().setLinearData(linear)
 
     def end_linear(self):
@@ -268,28 +286,30 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
 
-    def start_variable(self,attrs):
-        if self.verbose: print 'variable:',attrs
+    def start_variable(self, attrs):
+        if self.verbose:
+            print 'variable:', attrs
         id = attrs['id']
         datatype = attrs['datatype']
-        variable = cdmsNode.VariableNode(id,datatype,None)
+        variable = cdmsNode.VariableNode(id, datatype, None)
         variable.setExternalDict(attrs)
-        self.getCurrentNode().addId(id,variable)
+        self.getCurrentNode().addId(id, variable)
         self.pushCurrentNode(variable)
-                
+
     def end_variable(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
 
     def start_xlink(self, attrs):
-        if self.verbose: print 'xlink:',attrs
+        if self.verbose:
+            print 'xlink:', attrs
         id = attrs['id']
         uri = attrs['href']
         contentRole = attrs['content-role']
-        xlink = cdmsNode.XLinkNode(id,uri,contentRole)
+        xlink = cdmsNode.XLinkNode(id, uri, contentRole)
         xlink.setExternalDict(attrs)
-        self.getCurrentNode().addId(id,xlink)
+        self.getCurrentNode().addId(id, xlink)
         self.pushCurrentNode(xlink)
 
     def end_xlink(self):
@@ -301,30 +321,33 @@ class CDMLParser(XMLParser):
         print 'error near line %d:' % lineno, message
 
     def unknown_starttag(self, tag, attrs):
-        if self.verbose: print '**'+tag+'**:',attrs
+        if self.verbose:
+            print '**' + tag + '**:', attrs
 
     def unknown_endtag(self, tag):
         pass
 
     def unknown_entityref(self, ref):
-	self.flush()
-        if self.verbose: print '*** unknown entity ref: &' + ref + ';'
+        self.flush()
+        if self.verbose:
+            print '*** unknown entity ref: &' + ref + ';'
 
     def unknown_charref(self, ref):
-	self.flush()
-        if self.verbose: print '*** unknown char ref: &#' + ref + ';'
+        self.flush()
+        if self.verbose:
+            print '*** unknown char ref: &#' + ref + ';'
 
     def close(self):
-	XMLParser.close(self)
+        XMLParser.close(self)
 
 if __name__ == '__main__':
     import sys
-    
+
     sampfile = open(sys.argv[1])
     text = sampfile.read()
     sampfile.close()
 
-    if len(sys.argv)==2:
+    if len(sys.argv) == 2:
         verbose = 0
     else:
         verbose = 1
@@ -332,4 +355,3 @@ if __name__ == '__main__':
     p.feed(text)
     p.close()
     p.root.dump()
-    
diff --git a/Packages/cdms2/Lib/MV2.py b/Packages/cdms2/Lib/MV2.py
index b203bc6af..297cd7362 100644
--- a/Packages/cdms2/Lib/MV2.py
+++ b/Packages/cdms2/Lib/MV2.py
@@ -1,31 +1,31 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 "CDMS Variable objects, MaskedArray interface"
 import numpy
-import typeconv
+from . import typeconv
 from numpy import character, float, float32, float64, int, int8, int16, int32
 from numpy.ma import allclose, allequal, common_fill_value, compress, make_mask_none, dot, filled, \
-     getmask, getmaskarray, identity, indices, innerproduct, masked, put, putmask, rank, ravel, \
+    getmask, getmaskarray, identity, indices, innerproduct, masked, put, putmask, rank, ravel, \
      set_fill_value, shape, size, isMA, isMaskedArray, is_mask, isarray, \
      make_mask, make_mask_none, mask_or, nomask
 from numpy import sctype2char, get_printoptions, set_printoptions
-from avariable import AbstractVariable, getNumericCompatibility
-from tvariable import TransientVariable, asVariable
-from grid import AbstractRectGrid
-from error import CDMSError
-#from numpy.ma import *
-from axis import allclose as axisAllclose, TransientAxis, concatenate as axisConcatenate, take as axisTake
-
+from .avariable import AbstractVariable, getNumericCompatibility
+from .tvariable import TransientVariable, asVariable
+from .grid import AbstractRectGrid
+from .error import CDMSError
+# from numpy.ma import *
+from .axis import allclose as axisAllclose, TransientAxis, concatenate as axisConcatenate, take as axisTake
 
 
 create_mask = make_mask_none
 e = numpy.e
 pi = numpy.pi
-#NewAxis = numpy.oldnumeric.NewAxis
+# NewAxis = numpy.oldnumeric.NewAxis
 newaxis = numpy.newaxis
 counter = 0
 
+
 def fill_value(ar):
     return ar.fill_value
 
@@ -34,13 +34,14 @@ def _makeMaskedArg(x):
     """If x is a variable, turn it into a TransientVariable."""
     if isinstance(x, AbstractVariable) and not isinstance(x, TransientVariable):
         return x.subSlice()
-    elif isinstance(x,TransientVariable):
+    elif isinstance(x, TransientVariable):
         return x
     else:
         return array(x)
 
 
-def _extractMetadata(a, axes=None, attributes=None, id=None, omit=None, omitall=False):
+def _extractMetadata(
+        a, axes=None, attributes=None, id=None, omit=None, omitall=False):
     """Extract axes, attributes, id from 'a', if arg is None."""
     resultgrid = None
     if isinstance(a, AbstractVariable):
@@ -52,7 +53,7 @@ def _extractMetadata(a, axes=None, attributes=None, id=None, omit=None, omitall=
             attributes = a.attributes
         if id is None:
             id = "variable_%i" % TransientVariable.variable_count
-            TransientVariable.variable_count+=1
+            TransientVariable.variable_count += 1
 
         # If the grid is rectilinear, don't return an explicit grid: it's implicitly defined
         # by the axes.
@@ -60,13 +61,16 @@ def _extractMetadata(a, axes=None, attributes=None, id=None, omit=None, omitall=
         if (resultgrid is None) or (isinstance(resultgrid, AbstractRectGrid)) or (axes is None):
             resultgrid = None
 
-        # If the omitted axis was associated with the grid, the result will not be gridded.
+        # If the omitted axis was associated with the grid, the result will not
+        # be gridded.
         elif (omit is not None) and (resultgrid is not None) and (a.getAxis(omit) in resultgrid.getAxisList()):
             resultgrid = None
 
     return axes, attributes, id, resultgrid
 
+
 class var_unary_operation:
+
     def __init__(self, mafunc):
         """ var_unary_operation(mafunc)
         mafunc is an numpy.ma masked_unary_function.
@@ -74,26 +78,31 @@ class var_unary_operation:
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
 
-    def __call__ (self, a):
+    def __call__(self, a):
         axes, attributes, id, grid = _extractMetadata(a)
         maresult = self.mafunc(_makeMaskedArg(a))
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
+
 class var_unary_operation_with_axis:
+
     def __init__(self, mafunc):
         """ var_unary_operation(mafunc)
         mafunc is an numpy.ma masked_unary_function.
         """
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
-    def __call__ (self, a, axis=0):
+
+    def __call__(self, a, axis=0):
         axis = _conv_axis_arg(axis)
         ta = _makeMaskedArg(a)
         maresult = self.mafunc(ta, axis=axis)
-        axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
+        axes, attributes, id, grid = _extractMetadata(
+            a, omit=axis, omitall=(axis is None))
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
-def commonDomain(a,b,omit=None):
+
+def commonDomain(a, b, omit=None):
     """commonDomain(a,b) tests that the domains of variables/arrays a and b are equal,
     and returns the common domain if equal, or None if not equal. The domains may
     differ in that one domain may have leading axes not common
@@ -101,14 +110,15 @@ def commonDomain(a,b,omit=None):
     If <omit> is specified, as an integer i, skip comparison of the ith dimension
     and return None for the ith (common) dimension.
     """
-    
+
     if isinstance(b, AbstractVariable):
         bdom = b.getAxisList()
     else:
         bdom = None
-    return commonAxes(a,bdom,omit=omit)
+    return commonAxes(a, bdom, omit=omit)
 
-def commonAxes(a,bdom,omit=None):
+
+def commonAxes(a, bdom, omit=None):
     """Helper function for commonDomain. 'a' is a variable or array,
     'b' is an axislist or None.
     """
@@ -116,40 +126,40 @@ def commonAxes(a,bdom,omit=None):
         adom = a.getAxisList()
         arank = len(adom)
         brank = len(bdom)
-        if arank>brank:
+        if arank > brank:
             maxrank = arank
             minrank = brank
         else:
             maxrank = brank
             minrank = arank
-        diffrank = maxrank-minrank
-        if maxrank==arank:
+        diffrank = maxrank - minrank
+        if maxrank == arank:
             maxdom = adom
         else:
             maxdom = bdom
-        common = [None]*maxrank
+        common = [None] * maxrank
         if omit is None:
             iomit = None
         else:
-            iomit = omit-minrank
+            iomit = omit - minrank
 
         # Check shared dimensions, last to first
         for i in range(minrank):
-            j = -i-1
-            if j==iomit:
+            j = -i - 1
+            if j == iomit:
                 continue
             aj = adom[j]
             bj = bdom[j]
-            if len(aj)!=len(bj):
+            if len(aj) != len(bj):
                 return None
-            elif axisAllclose(aj,bj):
+            elif axisAllclose(aj, bj):
                 common[j] = aj
             else:
                 common[j] = TransientAxis(numpy.arange(len(aj)))
 
         # Copy leading (non-shared) axes
         for i in range(diffrank):
-            common[i]=maxdom[i]
+            common[i] = maxdom[i]
 
         return common
     elif isinstance(a, AbstractVariable):
@@ -162,6 +172,7 @@ def commonAxes(a,bdom,omit=None):
             bdom[omit] = None
         return bdom
 
+
 def commonGrid(a, b, axes):
     """commonGrid(a,b,axes) tests if the grids associated with variables a, b are equal,
     and consistent with the list of axes. If so, the common grid is returned, else None
@@ -182,6 +193,7 @@ def commonGrid(a, b, axes):
 
     return commonGrid1(a, gb, axes)
 
+
 def commonGrid1(a, gb, axes):
     """Helper function for commonGrid."""
     if isinstance(a, AbstractVariable):
@@ -210,7 +222,9 @@ def commonGrid1(a, gb, axes):
 
     return result
 
+
 class var_binary_operation:
+
     def __init__(self, mafunc):
         """ var_binary_operation(mafunc)
         mafunc is an numpy.ma masked_binary_function.
@@ -218,29 +232,29 @@ class var_binary_operation:
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
 
-    def __call__ (self, a, b):
+    def __call__(self, a, b):
         id = "variable_%i" % TransientVariable.variable_count
-        TransientVariable.variable_count+=1
-        axes = commonDomain(a,b)
-        grid = commonGrid(a,b,axes)
+        TransientVariable.variable_count += 1
+        axes = commonDomain(a, b)
+        grid = commonGrid(a, b, axes)
         ta = _makeMaskedArg(a)
         tb = _makeMaskedArg(b)
-        maresult = self.mafunc(ta,tb)
-        return TransientVariable(maresult, axes=axes, grid=grid,no_update_from=True,id=id)
+        maresult = self.mafunc(ta, tb)
+        return TransientVariable(maresult, axes=axes, grid=grid, no_update_from=True, id=id)
 
-    def reduce (self, target, axis=0):
+    def reduce(self, target, axis=0):
         ttarget = _makeMaskedArg(target)
         maresult = self.mafunc.reduce(ttarget, axis=axis)
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
-    def accumulate (self, target, axis=0):
+    def accumulate(self, target, axis=0):
         ttarget = _makeMaskedArg(target)
         maresult = self.mafunc.accumulate(ttarget, axis=axis)
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
-        
-    def outer (self, a, b):
+
+    def outer(self, a, b):
         """Return the function applied to the outer product of a and b"""
         a1 = _makeMaskedArg(a)
         b1 = _makeMaskedArg(b)
@@ -248,11 +262,13 @@ class var_binary_operation:
         return TransientVariable(maresult)
 
 
-def compress(a,b):
-   __doc__=numpy.ma.__doc__
-   import warnings
-   warnings.warn("arguments order for compress function has changed\nit is now: MV2.copmress(array,condition), if your code seems to not react or act wrong to a call to compress, please check this", Warning)
-   return TransientVariable(numpy.ma.compress(a,b),copy=1)
+def compress(a, b):
+    __doc__ = numpy.ma.__doc__
+    import warnings
+    warnings.warn(
+        "arguments order for compress function has changed\nit is now: MV2.copmress(array,condition), if your code seems to not react or act wrong to a call to compress, please check this",
+        Warning)
+    return TransientVariable(numpy.ma.compress(a, b), copy=1)
 
 
 sqrt = var_unary_operation(numpy.ma.sqrt)
@@ -269,27 +285,31 @@ less_equal = var_binary_operation(numpy.ma.less_equal)
 greater_equal = var_binary_operation(numpy.ma.greater_equal)
 less = var_binary_operation(numpy.ma.less)
 greater = var_binary_operation(numpy.ma.greater)
-def power (a, b, third=None):
+
+
+def power(a, b, third=None):
     "a**b"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(b)
-    maresult = numpy.ma.power(ta,tb,third)
+    maresult = numpy.ma.power(ta, tb, third)
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
-def left_shift (a, n):
+
+def left_shift(a, n):
     "Left shift n bits"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(n)
-    maresult = numpy.ma.left_shift(ta,numpy.ma.filled(tb))
+    maresult = numpy.ma.left_shift(ta, numpy.ma.filled(tb))
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
-def right_shift (a, n):
+
+def right_shift(a, n):
     "Right shift n bits"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(n)
-    maresult = numpy.ma.right_shift(ta,numpy.ma.filled(tb))
+    maresult = numpy.ma.right_shift(ta, numpy.ma.filled(tb))
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
@@ -300,27 +320,33 @@ def _convdtype(dtype, typecode):
         dtype = typeconv.convtypecode2(typecode)
     return dtype
 
+
 def _conv_axis_arg(axis):
     "Handle backward compatibility with numpy for axis arg"
     if getNumericCompatibility() and axis is None:
-        axis=0
+        axis = 0
     return axis
 
+
 def is_masked(x):
     "Is x a 0-D masked value?"
-    return isMaskedArray(x) and x.size==1 and x.ndim==0 and x.mask.item()
+    return isMaskedArray(x) and x.size == 1 and x.ndim == 0 and x.mask.item()
+
 
 def is_floating(x):
     "Is x a scalar float, either python or numpy?"
     return (isinstance(x, numpy.floating) or isinstance(x, float))
 
+
 def is_integer(x):
     "Is x a scalar integer, either python or numpy?"
     return (isinstance(x, numpy.integer) or isinstance(x, int) or isinstance(x, long))
 
+
 def get_print_limit():
     return get_printoptions()['threshold']
 
+
 def set_print_limit(limit=numpy.inf):
     set_printoptions(threshold=limit)
 
@@ -369,75 +395,113 @@ bitwise_or = var_binary_operation(numpy.ma.bitwise_or)
 bitwise_xor = var_binary_operation(numpy.ma.bitwise_xor)
 
 
-def count (a, axis = None):
-    "Count of the non-masked elements in a, or along a certain axis."   
+def count(a, axis=None):
+    "Count of the non-masked elements in a, or along a certain axis."
     if axis is None:
-        return numpy.ma.count(a,axis)
+        return numpy.ma.count(a, axis)
     else:
         ta = _makeMaskedArg(a)
-        maresult = numpy.ma.count(ta,axis)
-        axes, attributes, id, grid = _extractMetadata(a,omit=axis)
-        F=getattr(a,"fill_value",1.e20)
+        maresult = numpy.ma.count(ta, axis)
+        axes, attributes, id, grid = _extractMetadata(a, omit=axis)
+        F = getattr(a, "fill_value", 1.e20)
         return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-def sum (a, axis = None, fill_value=0, dtype=None):
+
+def sum(a, axis=None, fill_value=0, dtype=None):
     "Sum of elements along a certain axis."
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.sum(ta, axis, dtype=dtype)
-    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
-    F=getattr(a,"fill_value",1.e20)
+    axes, attributes, id, grid = _extractMetadata(
+        a, omit=axis, omitall=(axis is None))
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-def product (a, axis = 0, dtype=None):
+
+def product(a, axis=0, dtype=None):
     "Product of elements along axis."
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.product(ta, axis, dtype=dtype)
     axes, attributes, id, grid = _extractMetadata(a, omit=axis)
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-def average (a, axis=None, weights=None, returned=False):
+
+def average(a, axis=None, weights=None, returned=False):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.average(ta, axis, weights, returned)
-    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
+    axes, attributes, id, grid = _extractMetadata(
+        a, omit=axis, omitall=(axis is None))
     if returned:
-      if isinstance(maresult,tuple):
-        maresult, wresult = maresult
-      else:
-        #ok it's masked constant need to return both things by hand
-        wresult = numpy.ma.masked
-    F=getattr(a,"fill_value",1.e20)
-    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
+        if isinstance(maresult, tuple):
+            maresult, wresult = maresult
+        else:
+            # ok it's masked constant need to return both things by hand
+            wresult = numpy.ma.masked
+    F = getattr(a, "fill_value", 1.e20)
+    r1 = TransientVariable(
+        maresult,
+        axes=axes,
+     attributes=attributes,
+     grid=grid,
+     id=id,
+     no_update_from=True,
+     fill_value=F)
     if returned:
-        F=getattr(a,"fill_value",1.e20)
-        w1 = TransientVariable(wresult, axes=axes, grid=grid, id=id,no_update_from=True, fill_value=F)
+        F = getattr(a, "fill_value", 1.e20)
+        w1 = TransientVariable(
+            wresult,
+            axes=axes,
+            grid=grid,
+            id=id,
+            no_update_from=True,
+            fill_value=F)
         return r1, w1
     else:
         return r1
 average.__doc__ = numpy.ma.average.__doc__
 
-def max (a, axis=None):
+
+def max(a, axis=None):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.max(ta, axis)
-    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
-    F=getattr(a,"fill_value",1.e20)
-    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
+    axes, attributes, id, grid = _extractMetadata(
+        a, omit=axis, omitall=(axis is None))
+    F = getattr(a, "fill_value", 1.e20)
+    r1 = TransientVariable(
+        maresult,
+        axes=axes,
+     attributes=attributes,
+     grid=grid,
+     id=id,
+     no_update_from=True,
+     fill_value=F)
     return r1
 max.__doc__ = numpy.ma.max.__doc__
-def min (a, axis=None):
+
+
+def min(a, axis=None):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.min(ta, axis)
-    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
-    F=getattr(a,"fill_value",1.e20)
-    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
+    axes, attributes, id, grid = _extractMetadata(
+        a, omit=axis, omitall=(axis is None))
+    F = getattr(a, "fill_value", 1.e20)
+    r1 = TransientVariable(
+        maresult,
+        axes=axes,
+     attributes=attributes,
+     grid=grid,
+     id=id,
+     no_update_from=True,
+     fill_value=F)
     return r1
 min.__doc__ = numpy.ma.min.__doc__
 
-def sort (a, axis=-1):
+
+def sort(a, axis=-1):
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.sort(a.asma(), axis)
     axes, attributes, id, grid = _extractMetadata(a)
@@ -445,11 +509,13 @@ def sort (a, axis=-1):
     if (grid is not None) and (sortaxis in grid.getAxisList()):
         grid = None
     axes[axis] = TransientAxis(numpy.arange(len(sortaxis)))
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
-sort.__doc__ = numpy.ma.sort.__doc__ + "The sort axis is replaced with a dummy axis."
+sort.__doc__ = numpy.ma.sort.__doc__ + \
+    "The sort axis is replaced with a dummy axis."
 
-def choose (indices, t):
+
+def choose(indices, t):
     """Returns an array shaped like indices containing elements chosen
       from t.
       If an element of t is the special element masked, any element
@@ -458,35 +524,38 @@ def choose (indices, t):
       The result has only the default axes.
     """
     maresult = numpy.ma.choose(indices, map(_makeMaskedArg, t))
-    F=getattr(t,"fill_value",1.e20)
+    F = getattr(t, "fill_value", 1.e20)
     return TransientVariable(maresult, fill_value=F)
 
-def where (condition, x, y):
-    "where(condition, x, y) is x where condition is true, y otherwise" 
-##    axes = commonDomain(x,y)
-##    grid = commonGrid(x,y,axes)
+
+def where(condition, x, y):
+    "where(condition, x, y) is x where condition is true, y otherwise"
+# axes = commonDomain(x,y)
+# grid = commonGrid(x,y,axes)
     maresult = numpy.ma.where(condition, _makeMaskedArg(x), _makeMaskedArg(y))
     axes, attributes, id, grid = _extractMetadata(condition)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
+
 def masked_where(condition, x, copy=1):
-    """Return x as an array masked where condition is true. 
+    """Return x as an array masked where condition is true.
        Also masked where x or condition masked.
     """
     tx = _makeMaskedArg(x)
     tcondition = _makeMaskedArg(condition)
     maresult = numpy.ma.masked_where(tcondition, tx, copy)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_greater(x, value):
     "masked_greater(x, value) = x masked where x > value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_greater(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
 
@@ -495,33 +564,37 @@ def masked_greater_equal(x, value):
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_greater_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_less(x, value):
     "masked_less(x, value) = x masked where x < value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_less(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_less_equal(x, value):
     "masked_less_equal(x, value) = x masked where x <= value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_less_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_not_equal(x, value):
     "masked_not_equal(x, value) = x masked where x != value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_not_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_equal(x, value):
     """masked_equal(x, value) = x masked where x == value
        For floating point consider masked_values(x, value) instead.
@@ -529,40 +602,43 @@ def masked_equal(x, value):
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_outside(x, v1, v2):
     "x with mask of all values of x that are outside [v1,v2]"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_outside(tx, v1, v2)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def masked_inside(x, v1, v2):
     "x with mask of all values of x that are inside [v1,v2]"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_inside(tx, v1, v2)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-def concatenate (arrays, axis=0, axisid=None, axisattributes=None):
+
+def concatenate(arrays, axis=0, axisid=None, axisattributes=None):
     """Concatenate the arrays along the given axis. Give the extended axis the id and
     attributes provided - by default, those of the first array."""
 
     tarrays = [_makeMaskedArg(a) for a in arrays]
     maresult = numpy.ma.concatenate(arrays, axis=axis)
-    if len(arrays)>1:
+    if len(arrays) > 1:
         varattributes = None
         varid = None
-        axes = commonDomain(tarrays[0],tarrays[1],omit=axis)
+        axes = commonDomain(tarrays[0], tarrays[1], omit=axis)
         grid = commonGrid(tarrays[0], tarrays[1], axes)
-        for i in range(len(arrays)-2):
+        for i in range(len(arrays) - 2):
             if axes is None:
                 break
-            axes = commonAxes(tarrays[i+2],axes,omit=axis)
+            axes = commonAxes(tarrays[i + 2], axes, omit=axis)
             grid = commonGrid1(a, grid, axes)
     else:
         axes = tarrays[0].getAxisList()
@@ -575,32 +651,37 @@ def concatenate (arrays, axis=0, axisid=None, axisattributes=None):
     if axes is not None:
         if axisid is None:
             axisid = tarrays[0].getAxis(axis).id
-        allunitsequal=True
+        allunitsequal = True
         try:
-            allunits=tarrays[0].getAxis(axis).units
+            allunits = tarrays[0].getAxis(axis).units
         except:
-            allunits=None
+            allunits = None
         for t in tarrays[1:]:
             try:
-                tunits=t.getAxis(axis).units
+                tunits = t.getAxis(axis).units
             except:
-                tunits=None
-            if tunits!=allunits:
-                allunitsequal=False
+                tunits = None
+            if tunits != allunits:
+                allunitsequal = False
         if allunitsequal:
             if axisattributes is None:
                 axisattributes = tarrays[0].getAxis(axis).attributes
-            axes[axis] = axisConcatenate([t.getAxis(axis) for t in tarrays], axisid, axisattributes)
+            axes[axis] = axisConcatenate(
+                [t.getAxis(axis) for t in tarrays],
+                axisid,
+                axisattributes)
 
-    # If the grid doesn't match the axislist (e.g., catenation was on latitude) then omit it.
+    # If the grid doesn't match the axislist (e.g., catenation was on
+    # latitude) then omit it.
     if grid is not None:
         for item in grid.getAxisList():
             if item not in axes:
                 grid = None
-    F=getattr(arrays[0],"fill_value",1.e20)
-    return TransientVariable(maresult, axes=axes, attributes=varattributes,id=varid,grid=grid, fill_value=F)
+    F = getattr(arrays[0], "fill_value", 1.e20)
+    return TransientVariable(maresult, axes=axes, attributes=varattributes, id=varid, grid=grid, fill_value=F)
+
 
-def take (a, indices, axis=None):
+def take(a, indices, axis=None):
     "take(a, indices, axis=None) returns selection of items from a."
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
@@ -608,15 +689,16 @@ def take (a, indices, axis=None):
     # ma compatibility interface has a bug
     maresult = numpy.ma.take(ta, indices, axis=axis)
     axes, attributes, id, grid = _extractMetadata(a, omitall=(axis is None))
-    
+
     # If the take is on a grid axis, omit the grid.
     if (grid is not None) and (axes[axis] in grid.getAxisList()):
         grid = None
     if axes is not None:
         axes[axis] = axisTake(axes[axis], indices)
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
+
 def transpose(a, axes=None):
     "transpose(a, axes=None) reorder dimensions per tuple axes"
     ta = _makeMaskedArg(a)
@@ -628,49 +710,53 @@ def transpose(a, axes=None):
     newaxes = None
     if oldaxes is not None:
         newaxes = [oldaxes[i] for i in axes]
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=newaxes, attributes=attributes, id=id, grid=grid, copy=1, fill_value=F)
 
+
 class _minimum_operation:
+
     "Object to calculate minima"
-    def __init__ (self):
+
+    def __init__(self):
         """minimum(a, b) or minimum(a)
            In one argument case returns the scalar minimum.
         """
         pass
 
-    def __call__ (self, a, b=None):
+    def __call__(self, a, b=None):
         "Execute the call behavior."
         a = _makeMaskedArg(a)
         if b is None:
             m = getmask(a)
-            if m is nomask: 
+            if m is nomask:
                 d = numpy.min(filled(a).ravel())
                 return d
-##             ac = a.compressed()
-##             if len(ac) == 0:
-##                 return masked
+# ac = a.compressed()
+# if len(ac) == 0:
+# return masked
             else:
                 return numpy.ma.min(a)
         else:
             return where(less(a, b), a, b)[...]
-       
-    def reduce (self, target, axis=0):
+
+    def reduce(self, target, axis=0):
         """Reduce target along the given axis."""
         a = _makeMaskedArg(target)
         axes, attributes, id, grid = _extractMetadata(a, omit=axis)
         m = getmask(a)
         if m is nomask:
             t = filled(a)
-            result = masked_array (numpy.minimum.reduce (t, axis))
+            result = masked_array(numpy.minimum.reduce(t, axis))
         else:
-            t = numpy.minimum.reduce(filled(a, numpy.ma.minimum_fill_value(a)), axis)
+            t = numpy.minimum.reduce(
+                filled(a, numpy.ma.minimum_fill_value(a)), axis)
             m = numpy.logical_and.reduce(m, axis)
             result = masked_array(t, m, fill_value(a))
         return TransientVariable(result, axes=axes, copy=0,
-                     fill_value=fill_value(a), grid=grid, id=id)
+                                 fill_value=fill_value(a), grid=grid, id=id)
 
-    def outer (self, a, b):
+    def outer(self, a, b):
         "Return the function applied to the outer product of a and b."
         a = _makeMaskedArg(a)
         b = _makeMaskedArg(b)
@@ -685,47 +771,51 @@ class _minimum_operation:
         d = numpy.minimum.outer(filled(a), filled(b))
         return TransientVariable(d, mask=m, copy=0)
 
-minimum = _minimum_operation () 
-    
+minimum = _minimum_operation()
+
+
 class _maximum_operation:
+
     "Object to calculate maxima"
-    def __init__ (self):
+
+    def __init__(self):
         """maximum(a, b) or maximum(a)
            In one argument case returns the scalar maximum.
         """
         pass
 
-    def __call__ (self, a, b=None):
+    def __call__(self, a, b=None):
         "Execute the call behavior."
         a = _makeMaskedArg(a)
         if b is None:
             m = getmask(a)
-            if m is nomask: 
+            if m is nomask:
                 d = numpy.max(filled(a).ravel())
                 return d
-##             ac = a.compressed()
-##             if len(ac) == 0:
-##                 return masked
+# ac = a.compressed()
+# if len(ac) == 0:
+# return masked
             else:
                 return numpy.ma.max(a)
         else:
             return where(greater(a, b), a, b)[...]
-       
-    def reduce (self, target, axis=0):
+
+    def reduce(self, target, axis=0):
         """Reduce target along the given axis."""
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         a = _makeMaskedArg(target)
         m = getmask(a)
         if m is nomask:
             t = filled(a)
-            return masked_array (numpy.maximum.reduce (t, axis))
+            return masked_array(numpy.maximum.reduce(t, axis))
         else:
-            t = numpy.maximum.reduce(filled(a, numpy.ma.maximum_fill_value(a)), axis)
+            t = numpy.maximum.reduce(
+                filled(a, numpy.ma.maximum_fill_value(a)), axis)
             m = numpy.logical_and.reduce(m, axis)
             return TransientVariable(t, mask=m, fill_value=fill_value(a),
-                        axes = axes, grid=grid, id=id)
+                                     axes=axes, grid=grid, id=id)
 
-    def outer (self, a, b):
+    def outer(self, a, b):
         "Return the function applied to the outer product of a and b."
         a = _makeMaskedArg(a)
         b = _makeMaskedArg(b)
@@ -740,8 +830,9 @@ class _maximum_operation:
         d = numpy.maximum.outer(filled(a), filled(b))
         return TransientVariable(d, mask=m)
 
-maximum = _maximum_operation () 
-    
+maximum = _maximum_operation()
+
+
 def asarray(data, typecode=None, dtype=None):
     """asarray(data, typecode=None, dtype=None) is equivalent to array(data, dtype=None, copy=0)
        Returns data if dtype is None or data is a MaskedArray of the same dtype.
@@ -751,10 +842,12 @@ def asarray(data, typecode=None, dtype=None):
     if isinstance(data, AbstractVariable) and (dtype is None or sctype2char(dtype) == data.dtype.char):
         return data
     else:
-        F=getattr(data,"fill_value",1.e20)
+        F = getattr(data, "fill_value", 1.e20)
         return TransientVariable(data, dtype=dtype, copy=0, fill_value=F)
 
-def arrayrange(start, stop=None, step=1, typecode=None, axis=None, attributes=None, id=None, dtype=None):
+
+def arrayrange(start, stop=None, step=1, typecode=None,
+               axis=None, attributes=None, id=None, dtype=None):
     """Just like range() except it returns a variable whose type can be specfied
     by the keyword argument typecode. The axis of the result variable may be specified.
     """
@@ -767,15 +860,19 @@ def arrayrange(start, stop=None, step=1, typecode=None, axis=None, attributes=No
 
 arange = arrayrange
 
-def zeros (shape, typecode=float, savespace=0, axes=None, attributes=None, id=None, grid=None, dtype=None):
-    """zeros(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) = 
+
+def zeros(shape, typecode=float, savespace=0, axes=None,
+          attributes=None, id=None, grid=None, dtype=None):
+    """zeros(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) =
      an array of all zeros of the given length or shape."""
     dtype = _convdtype(dtype, typecode)
     maresult = numpy.ma.zeros(shape, dtype=dtype)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
-    
-def ones (shape, typecode=float, savespace=0, axes=None, attributes=None, id=None, grid=None, dtype=None):
-    """ones(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) = 
+
+
+def ones(shape, typecode=float, savespace=0, axes=None,
+         attributes=None, id=None, grid=None, dtype=None):
+    """ones(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) =
      an array of all ones of the given length or shape."""
     dtype = _convdtype(dtype, typecode)
     maresult = numpy.ma.ones(shape, dtype=dtype)
@@ -783,28 +880,31 @@ def ones (shape, typecode=float, savespace=0, axes=None, attributes=None, id=Non
 
 as_masked = numpy.ma.array
 
+
 def outerproduct(a, b):
     """outerproduct(a,b) = {a[i]*b[j]}, has shape (len(a),len(b))"""
-    ta = asVariable(a,writeable=1)
-    tb = asVariable(b,writeable=1)
-    maresult = numpy.ma.outerproduct(ta,tb)
-    axes = (ta.getAxis(0),tb.getAxis(0))
-    F=getattr(a,"fill_value",1.e20)
+    ta = asVariable(a, writeable=1)
+    tb = asVariable(b, writeable=1)
+    maresult = numpy.ma.outerproduct(ta, tb)
+    axes = (ta.getAxis(0), tb.getAxis(0))
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, fill_value=F)
 
-def argsort (x, axis = -1, fill_value=None):
+
+def argsort(x, axis=-1, fill_value=None):
     """Treating masked values as if they have the value fill_value,
        return sort indices for sorting along given axis.
        if fill_value is None, use fill_value(x)
-    """        
+    """
     tx = _makeMaskedArg(x)
-    maresult = numpy.ma.argsort(tx,axis=axis,fill_value=fill_value)
+    maresult = numpy.ma.argsort(tx, axis=axis, fill_value=fill_value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F=getattr(x,"fill_value",1.e20)
+    F = getattr(x, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
 array = TransientVariable
 
+
 def repeat(a, repeats, axis=None):
     """repeat elements of a repeats times along axis
        repeats is a sequence of length a.shape[axis]
@@ -818,68 +918,85 @@ def repeat(a, repeats, axis=None):
         grid = None
     if axes is not None:
         axes[axis] = None
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, no_update_from=True, fill_value=F)
 
-def reshape (a, newshape, axes=None, attributes=None, id=None, grid=None):
+
+def reshape(a, newshape, axes=None, attributes=None, id=None, grid=None):
     ignore, attributes, id, ignore = _extractMetadata(a, axes, attributes, id)
     if axes is not None:
         axesshape = [len(item) for item in axes]
-        if axesshape!=list(newshape):
-            raise CDMSError, 'axes must be shaped %s'%`newshape`
+        if axesshape != list(newshape):
+            raise CDMSError('axes must be shaped %s' % repr(newshape))
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.reshape(ta, newshape)
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, no_update_from=True, fill_value=F)
-reshape.__doc__="numpy doc: %s\naxes/attributes/grid are applied onto the new variable" % numpy.reshape.__doc__
+reshape.__doc__ = "numpy doc: %s\naxes/attributes/grid are applied onto the new variable" % numpy.reshape.__doc__
+
 
-def resize (a, new_shape, axes=None, attributes=None, id=None, grid=None):
+def resize(a, new_shape, axes=None, attributes=None, id=None, grid=None):
     """resize(a, new_shape) returns a new array with the specified shape.
     The original array's total size can be any size."""
     ignore, attributes, id, ignore = _extractMetadata(a, axes, attributes, id)
     if axes is not None:
         axesshape = [len(item) for item in axes]
-        if axesshape!=list(new_shape):
-            raise CDMSError, 'axes must be shaped %s'%`newshape`
+        if axesshape != list(new_shape):
+            raise CDMSError('axes must be shaped %s' % repr(newshape))
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.resize(ta, new_shape)
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-def masked_array (a, mask=None, fill_value=None, axes=None, attributes=None, id=None):
-    """masked_array(a, mask=None) = 
+
+def masked_array(a, mask=None, fill_value=None,
+                 axes=None, attributes=None, id=None):
+    """masked_array(a, mask=None) =
        array(a, mask=mask, copy=0, fill_value=fill_value)
        Use fill_value(a) if None.
     """
-    maresult = numpy.ma.masked_array(_makeMaskedArg(a), mask=mask, fill_value=fill_value)
+    maresult = numpy.ma.masked_array(
+        _makeMaskedArg(a),
+        mask=mask,
+     fill_value=fill_value)
     axes, attributes, id, grid = _extractMetadata(a, axes, attributes, id)
-    F=getattr(a,"fill_value",1.e20)
+    F = getattr(a, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-def masked_values (data, value, rtol=1.e-5, atol=1.e-8, copy=1,
-    savespace=0, axes=None, attributes=None, id=None): 
+
+def masked_values(data, value, rtol=1.e-5, atol=1.e-8, copy=1,
+                  savespace=0, axes=None, attributes=None, id=None):
     """
        masked_values(data, value, rtol=1.e-5, atol=1.e-8)
        Create a masked array; mask is None if possible.
        May share data values with original array, but not recommended.
        Masked where abs(data-value)<= atol + rtol * abs(value)
     """
-    maresult = numpy.ma.masked_values(_makeMaskedArg(data), value, rtol=rtol, atol=atol, copy=copy)
+    maresult = numpy.ma.masked_values(
+        _makeMaskedArg(data),
+        value,
+     rtol=rtol,
+     atol=atol,
+     copy=copy)
     axes, attributes, id, grid = _extractMetadata(data, axes, attributes, id)
-    F=getattr(data,"fill_value",1.e20)
+    F = getattr(data, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-def masked_object (data, value, copy=1, savespace=0, axes=None, attributes=None, id=None):
+
+def masked_object(data, value, copy=1,
+                  savespace=0, axes=None, attributes=None, id=None):
     "Create array masked where exactly data equal to value"
     maresult = numpy.ma.masked_object(_makeMaskedArg(data), value, copy=copy)
     axes, attributes, id, grid = _extractMetadata(data, axes, attributes, id)
-    F=getattr(data,"fill_value",1.e20)
+    F = getattr(data, "fill_value", 1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
-    
-def isMaskedVariable (x):
+
+
+def isMaskedVariable(x):
     "Is x a masked variable, that is, an instance of AbstractVariable?"
     return isinstance(x, AbstractVariable)
 
+
 def set_default_fill_value(value_type, value):
     """Set the default fill value for value_type to value.
     value_type is a string: 'real','complex','character','integer',or 'object'.
@@ -896,24 +1013,23 @@ def set_default_fill_value(value_type, value):
     elif value_type == 'object':
         numpy.ma.default_object_fill_value = value
 
-def fromfunction (f, dimensions):
+
+def fromfunction(f, dimensions):
     """Apply f to s to create an array as in numpy."""
     return TransientVariable(numpy.ma.fromfunction(f, dimensions))
 
-def diagonal (a, offset = 0, axis1=0, axis2 = 1):
-    """diagonal(a, offset=0, axis1=0, axis2 = 1) returns the given 
+
+def diagonal(a, offset=0, axis1=0, axis2=1):
+    """diagonal(a, offset=0, axis1=0, axis2 = 1) returns the given
        diagonals defined by the two dimensions of the array.
     """
-    F=getattr(a,"fill_value",1.e20)
-    return TransientVariable(numpy.ma.diagonal(_makeMaskedArg(a), 
-            offset, axis1, axis2), fill_value=F)
+    F = getattr(a, "fill_value", 1.e20)
+    return TransientVariable(numpy.ma.diagonal(_makeMaskedArg(a),
+                                               offset, axis1, axis2), fill_value=F)
 
-def fromstring (s, t):
+
+def fromstring(s, t):
     """Construct a masked array from a string. Result will have no mask.
        t is a typecode.
     """
-    return TransientArray(numpy.ma.fromstring(s,t))
-
-
-
-
+    return TransientArray(numpy.ma.fromstring(s, t))
diff --git a/Packages/cdms2/Lib/__init__.py b/Packages/cdms2/Lib/__init__.py
index 874601632..e3e53ff09 100644
--- a/Packages/cdms2/Lib/__init__.py
+++ b/Packages/cdms2/Lib/__init__.py
@@ -2,35 +2,35 @@
 CDMS module-level API
 """
 import cdat_info
-cdat_info.pingPCMDIdb("cdat","cdms2")
+cdat_info.pingPCMDIdb("cdat", "cdms2")
 
-__all__ = ["cdmsobj", "axis", "coord", "grid", "hgrid", "avariable", \
-"sliceut", "error", "variable", "fvariable", "tvariable", "dataset", \
-"database", "cache", "selectors", "MV2", "convention", "bindex", \
-"auxcoord", "gengrid", "gsHost", "gsStaticVariable", "gsTimeVariable", \
-"mvBaseWriter", "mvSphereMesh", "mvVsWriter", "mvCdmsRegrid"]
+__all__ = ["cdmsobj", "axis", "coord", "grid", "hgrid", "avariable",
+           "sliceut", "error", "variable", "fvariable", "tvariable", "dataset",
+           "database", "cache", "selectors", "MV2", "convention", "bindex",
+           "auxcoord", "gengrid", "gsHost", "gsStaticVariable", "gsTimeVariable",
+           "mvBaseWriter", "mvSphereMesh", "mvVsWriter", "mvCdmsRegrid"]
 
 # Errors
-from error import CDMSError
+from .error import CDMSError
 
 # CDMS datatypes
-from cdmsobj import CdArray, CdChar, CdByte, CdDouble, CdFloat, CdFromObject, CdInt, CdLong, CdScalar, CdShort, CdString
+from .cdmsobj import CdArray, CdChar, CdByte, CdDouble, CdFloat, CdFromObject, CdInt, CdLong, CdScalar, CdShort, CdString
 
 # Functions which operate on all objects or groups of objects
-from cdmsobj import Unlimited, getPathFromTemplate, matchPattern, matchingFiles, searchPattern, searchPredicate, setDebugMode
+from .cdmsobj import Unlimited, getPathFromTemplate, matchPattern, matchingFiles, searchPattern, searchPredicate, setDebugMode
 
 # Axis functions and classes
-from axis import AbstractAxis, axisMatches, axisMatchAxis, axisMatchIndex
-from axis import createAxis, createEqualAreaAxis, createGaussianAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, setAutoBounds, getAutoBounds
+from .axis import AbstractAxis, axisMatches, axisMatchAxis, axisMatchIndex
+from .axis import createAxis, createEqualAreaAxis, createGaussianAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, setAutoBounds, getAutoBounds
 
 # Grid functions
-from grid import createGenericGrid, createGlobalMeanGrid, createRectGrid, createUniformGrid, createZonalGrid, setClassifyGrids, createGaussianGrid, writeScripGrid, isGrid
+from .grid import createGenericGrid, createGlobalMeanGrid, createRectGrid, createUniformGrid, createZonalGrid, setClassifyGrids, createGaussianGrid, writeScripGrid, isGrid
 
 # Dataset functions
-from dataset import createDataset, openDataset, useNetcdf3, \
-        getNetcdfClassicFlag, getNetcdfShuffleFlag, getNetcdfDeflateFlag, getNetcdfDeflateLevelFlag,\
+from .dataset import createDataset, openDataset, useNetcdf3, \
+    getNetcdfClassicFlag, getNetcdfShuffleFlag, getNetcdfDeflateFlag, getNetcdfDeflateLevelFlag,\
         setNetcdfClassicFlag, setNetcdfShuffleFlag, setNetcdfDeflateFlag, setNetcdfDeflateLevelFlag,\
-        setNetcdfUseNCSwitchModeFlag,getNetcdfUseNCSwitchModeFlag,\
+        setNetcdfUseNCSwitchModeFlag, getNetcdfUseNCSwitchModeFlag,\
         setCompressionWarnings,\
         setNetcdf4Flag, getNetcdf4Flag,\
         setNetcdfUseParallelFlag, getNetcdfUseParallelFlag, \
@@ -39,31 +39,31 @@ from dataset import createDataset, openDataset, useNetcdf3, \
 open = openDataset
 
 # Database functions
-from database import connect, Base, Onelevel, Subtree
+from .database import connect, Base, Onelevel, Subtree
 
-#Selectors
-import selectors
-from selectors import longitude, latitude, time, level, required, \
-                      longitudeslice, latitudeslice, levelslice, timeslice
+# Selectors
+from . import selectors
+from .selectors import longitude, latitude, time, level, required, \
+    longitudeslice, latitudeslice, levelslice, timeslice
 
-from avariable import order2index, orderparse, setNumericCompatibility, getNumericCompatibility
+from .avariable import order2index, orderparse, setNumericCompatibility, getNumericCompatibility
 # TV
-from tvariable import asVariable, createVariable, isVariable
+from .tvariable import asVariable, createVariable, isVariable
 
-from mvSphereMesh import SphereMesh
-from mvBaseWriter import BaseWriter
-from mvVsWriter import VsWriter
-from mvVTKSGWriter import VTKSGWriter
-from mvVTKUGWriter import VTKUGWriter
-from mvCdmsRegrid import CdmsRegrid
+from .mvSphereMesh import SphereMesh
+from .mvBaseWriter import BaseWriter
+from .mvVsWriter import VsWriter
+from .mvVTKSGWriter import VTKSGWriter
+from .mvVTKUGWriter import VTKUGWriter
+from .mvCdmsRegrid import CdmsRegrid
 
 # Gridspec is not installed by default so just pass on if it isn't installed
 try:
-    from gsStaticVariable import StaticFileVariable
-    from gsTimeVariable import TimeFileVariable
+    from .gsStaticVariable import StaticFileVariable
+    from .gsTimeVariable import TimeFileVariable
 except:
     pass
 
-from restApi import esgfConnection,esgfDataset,FacetConnection
+from .restApi import esgfConnection, esgfDataset, FacetConnection
 
 MV = MV2
diff --git a/Packages/cdms2/Lib/auxcoord.py b/Packages/cdms2/Lib/auxcoord.py
index 24a97e8c3..334831dce 100644
--- a/Packages/cdms2/Lib/auxcoord.py
+++ b/Packages/cdms2/Lib/auxcoord.py
@@ -4,50 +4,66 @@ CDMS 1-D auxiliary coordinates.
 Note: In contrast to Axis objects (concrete classes subclassed from AbstractAxis), auxiliary coordinate variables are not monotonic in value, and do not share a name with the dimension.
 """
 
-## import internattr
-from error import CDMSError
-from coord import AbstractCoordinateAxis
-from fvariable import FileVariable
-from variable import DatasetVariable
-from tvariable import TransientVariable
-from avariable import AbstractVariable
+# import internattr
+from .error import CDMSError
+from .coord import AbstractCoordinateAxis
+from .fvariable import FileVariable
+from .variable import DatasetVariable
+from .tvariable import TransientVariable
+from .avariable import AbstractVariable
+
 
 class AbstractAuxAxis1D(AbstractCoordinateAxis):
 
-    def __init__ (self, parent=None, variableNode=None, bounds=None):
-        AbstractCoordinateAxis.__init__(self, parent, variableNode, bounds=bounds)
+    def __init__(self, parent=None, variableNode=None, bounds=None):
+        AbstractCoordinateAxis.__init__(
+            self,
+            parent,
+            variableNode,
+            bounds=bounds)
 
-    def clone (self, copyData=1):
+    def clone(self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        result = TransientAuxAxis1D(self[:], copy=copyData, axes=self.getAxisList(), attributes=self.attributes, bounds=self.getBounds())
+        result = TransientAuxAxis1D(
+            self[:],
+            copy=copyData,
+            axes=self.getAxisList(),
+            attributes=self.attributes,
+            bounds=self.getBounds())
         return result
 
     def setBounds(self, bounds):
         if bounds is not None:
-            if len(bounds.shape)!=2:
-                raise CDMSError, 'Bounds must have rank=2'
-            if bounds.shape[0:1]!=self.shape:
-                raise CDMSError, 'Bounds shape %s is inconsistent with axis shape %s'%(`bounds.shape`,`self.shape`)
+            if len(bounds.shape) != 2:
+                raise CDMSError('Bounds must have rank=2')
+            if bounds.shape[0:1] != self.shape:
+                raise CDMSError(
+                    'Bounds shape %s is inconsistent with axis shape %s' %
+                    (repr(bounds.shape), repr(self.shape)))
         AbstractCoordinateAxis.setBounds(self, bounds)
 
-    def subSlice (self, *specs, **keys):
+    def subSlice(self, *specs, **keys):
         # Take a subslice, returning a TransientAuxAxis1D
         avar = AbstractVariable.subSlice(self, *specs, **keys)
         bounds = self.getBounds()
         if bounds is None:
             newbounds = None
         else:
-            newbounds = bounds[specs]   # bounds can be a numarray or DatasetVariable
+            newbounds = bounds[
+                specs]   # bounds can be a numarray or DatasetVariable
 
-        # Note: disable axis copy to preserve identity of grid and variable domains
-        result = TransientAuxAxis1D(avar, bounds=newbounds, copyaxes=0)    
+        # Note: disable axis copy to preserve identity of grid and variable
+        # domains
+        result = TransientAuxAxis1D(avar, bounds=newbounds, copyaxes=0)
         return result
 
+
 class DatasetAuxAxis1D(AbstractAuxAxis1D, DatasetVariable):
 
     # Note: node is a VariableNode
+
     def __init__(self, parent, id=None, variableNode=None, bounds=None):
         AbstractAuxAxis1D.__init__(self, parent, variableNode, bounds=bounds)
         DatasetVariable.__init__(self, parent, id, variableNode)
@@ -55,11 +71,13 @@ class DatasetAuxAxis1D(AbstractAuxAxis1D, DatasetVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<DatasetAuxAxis1D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
+            return "<DatasetAuxAxis1D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
         else:
-            return "<DatasetAuxAxis1D: %s, file: **CLOSED**>"%self.id
+            return "<DatasetAuxAxis1D: %s, file: **CLOSED**>" % self.id
+
+# internattr.initialize_internal_attributes(DatasetAuxAxis1D) # Copy
+# internal attrs from parents
 
-## internattr.initialize_internal_attributes(DatasetAuxAxis1D) # Copy internal attrs from parents
 
 class FileAuxAxis1D(AbstractAuxAxis1D, FileVariable):
 
@@ -70,15 +88,18 @@ class FileAuxAxis1D(AbstractAuxAxis1D, FileVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<FileAuxAxis1D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
+            return "<FileAuxAxis1D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
         else:
-            return "<FileAuxAxis1D: %s, file: **CLOSED**>"%self.id
+            return "<FileAuxAxis1D: %s, file: **CLOSED**>" % self.id
+
+# internattr.initialize_internal_attributes(FileAuxAxis1D) # Copy internal
+# attrs from parents
 
-## internattr.initialize_internal_attributes(FileAuxAxis1D) # Copy internal attrs from parents
 
 class TransientAuxAxis1D(AbstractAuxAxis1D, TransientVariable):
 
-    def __init__(self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
+    def __init__(
+        self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
                  axes=None, attributes=None, id=None, copyaxes=1, bounds=None):
         """Create a transient, auxiliary 1-D axis.
         All arguments are as for TransientVariable.
@@ -86,11 +107,12 @@ class TransientAuxAxis1D(AbstractAuxAxis1D, TransientVariable):
           nvert is the max number of vertices per cell.
         """
         AbstractAuxAxis1D.__init__(self, None, None, bounds=bounds)
-        TransientVariable.__init__(self, data, typecode=typecode, copy=copy, savespace=savespace,
+        TransientVariable.__init__(
+            self, data, typecode=typecode, copy=copy, savespace=savespace,
                                    mask=mask, fill_value=fill_value, axes=axes, attributes=attributes,
                                    id=id, copyaxes=copyaxes)
         if axes is not None:
             self.setBounds(bounds)
 
-## internattr.initialize_internal_attributes(TransientAuxAxis1D) # Copy internal attrs from parents
-
+# internattr.initialize_internal_attributes(TransientAuxAxis1D) # Copy
+# internal attrs from parents
diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index db174105e..ae31c1171 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -1,26 +1,26 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 "CDMS Variable objects, abstract interface"
 import numpy
 import re
 import warnings
-import cdmsNode
-from cdmsobj import CdmsObj
+from . import cdmsNode
+from .cdmsobj import CdmsObj
 import cdms2
-from slabinterface import Slab
-from sliceut import *
-from error import CDMSError
-from axis import axisMatchIndex, axisMatchAxis, axisMatches, unspecified, CdtimeTypes, AbstractAxis
-import selectors
+from .slabinterface import Slab
+from .sliceut import *
+from .error import CDMSError
+from .axis import axisMatchIndex, axisMatchAxis, axisMatches, unspecified, CdtimeTypes, AbstractAxis
+from . import selectors
 import copy
 # from regrid2 import Regridder, PressureRegridder, CrossSectionRegridder
-from mvCdmsRegrid import CdmsRegrid
+from .mvCdmsRegrid import CdmsRegrid
 from regrid2.mvGenericRegrid import guessPeriodicity
-#import PropertiedClasses
-from convention import CF1
-from grid import AbstractRectGrid
-#import internattr
+# import PropertiedClasses
+from .convention import CF1
+from .grid import AbstractRectGrid
+# import internattr
 
 InvalidRegion = "Invalid region: "
 OutOfRange = "Coordinate interval is out of range or intersection has no data: "
@@ -32,9 +32,10 @@ _numeric_compatibility = False          # Backward compatibility with numpy beha
                                         # True:  return 0-D arrays
                                         #        MV axis=1 by default
 
+
 def getMinHorizontalMask(var):
     """
-    Get the minimum mask associated with 'x' and 'y' (i.e. with the 
+    Get the minimum mask associated with 'x' and 'y' (i.e. with the
     min number of ones) across all axes
     @param var CDMS variable with a mask
     @return mask array or None if order 'x' and 'y' were not found
@@ -46,7 +47,7 @@ def getMinHorizontalMask(var):
 
     shp = var.shape
     ndims = len(shp)
-    order = var.getOrder() # e.g. 'zxty-', ndims = 5
+    order = var.getOrder()  # e.g. 'zxty-', ndims = 5
 
     # run a few checks
     numX = order.count('x')
@@ -57,43 +58,43 @@ def getMinHorizontalMask(var):
         msg = """
 Not able to locate the horizontal (y, x) axes for order = %s in getMinHorizontalMask
         """ % str(order)
-        raise CDMSError, msg
-        
-    
-    ps = [] # index position of x/y, e.g. [1,3]
-    es = [] # end indices, sizes of x/y axes
+        raise CDMSError(msg)
+
+    ps = []  # index position of x/y, e.g. [1,3]
+    es = []  # end indices, sizes of x/y axes
     nonHorizShape = []
     found = False
-    for i in range(ndims-1, -1, -1):
-        # iterate backwards because the horizontal 
+    for i in range(ndims - 1, -1, -1):
+        # iterate backwards because the horizontal
         # axes are more likely to be last
         o = order[i]
         # curvilinear coordinates have '-' in place of
-        # x or y, also check for '-' but exit if we think 
+        # x or y, also check for '-' but exit if we think
         # we found the x and y coords
         if not found and (o in 'xy') or (not hasXY and o == '-'):
-            ps = [i,] + ps
-            es = [shp[i],] + es
+            ps = [i, ] + ps
+            es = [shp[i], ] + es
             if len(ps) == 2:
                 found = True
         else:
-            nonHorizShape = [shp[i],] + nonHorizShape
+            nonHorizShape = [shp[i], ] + nonHorizShape
 
     if len(ps) == 2:
-        # found all the horizontal axes, start with mask 
+        # found all the horizontal axes, start with mask
         # set to invalid everywhere
         mask = numpy.ones(es, numpy.bool8)
         # iterate over all non-horizontal axes, there can be as
         # many as you want...
         for it in MultiArrayIter(nonHorizShape):
-            inds = it.getIndices() # (i0, i1, i2)
+            inds = it.getIndices()  # (i0, i1, i2)
             # build the slice operator, there are three parts
-            # (head, middle, and tail), some parts may be 
+            # (head, middle, and tail), some parts may be
             # missing
             # slce = 'i0,' + ':,'   +   'i1,'  +   ':,' + 'i2,'
-            slce = ('%d,'*ps[0]) % tuple(inds[:ps[0]]) + ':,'            \
-                + ('%d,'*(ps[1]-ps[0]-1)) % tuple(inds[ps[0]:ps[1]-1])   \
-                + ':,' + ('%d,'*(ndims-ps[1]-1)) % tuple(inds[ps[1]-1:])
+            slce = ('%d,' * ps[0]) % tuple(inds[:ps[0]]) + ':,'            \
+                + ('%d,' * (ps[1] - ps[0] - 1)) % tuple(inds[ps[0]:ps[1] - 1])   \
+                + ':,' + \
+                ('%d,' * (ndims - ps[1] - 1)) % tuple(inds[ps[1] - 1:])
             # evaluate the slice for this time, level....
             mask &= eval('var.mask[' + slce + ']')
         return mask
@@ -101,45 +102,49 @@ Not able to locate the horizontal (y, x) axes for order = %s in getMinHorizontal
         msg = """
 Could not find all the horizontal axes for order = %s in getMinHorizontalMask
         """ % str(order)
-        raise CDMSError, msg
-    return None  
+        raise CDMSError(msg)
+    return None
+
 
 def setNumericCompatibility(mode):
     global _numeric_compatibility
-    if mode==True or mode=='on':
+    if mode or mode == 'on':
         _numeric_compatibility = True
-    elif mode==False or mode=='off':
+    elif mode == False or mode == 'off':
         _numeric_compatibility = False
 
+
 def getNumericCompatibility():
     return _numeric_compatibility
 
+
 class AbstractVariable(CdmsObj, Slab):
-    def __init__ (self, parent=None, variableNode=None):
+
+    def __init__(self, parent=None, variableNode=None):
         """Not to be called by users.
            variableNode is the variable tree node, if any.
            parent is the containing dataset instance.
         """
-        if variableNode is not None and variableNode.tag !='variable':
-            raise CDMSError, 'Node is not a variable node'
+        if variableNode is not None and variableNode.tag != 'variable':
+            raise CDMSError('Node is not a variable node')
         CdmsObj.__init__(self, variableNode)
-        val = self.__cdms_internals__ + ['id','domain']
-        self.___cdms_internals__ = val 
+        val = self.__cdms_internals__ + ['id', 'domain']
+        self.___cdms_internals__ = val
         Slab.__init__(self)
         self.id = None                  # Transient variables key on this to create a default ID
         self.parent = parent
         self._grid_ = None      # Variable grid, if any
-        if not hasattr(self,'missing_value'):
+        if not hasattr(self, 'missing_value'):
             self.missing_value = None
         elif numpy.isnan(self.missing_value):
-          self.missing_value = None
+            self.missing_value = None
 
         # Reminder: children to define self.shape and set self.id
 
-    def __array__ (self, t=None, context=None):  #Numeric, ufuncs call this
+    def __array__(self, t=None, context=None):  # Numeric, ufuncs call this
         return numpy.ma.filled(self.getValue(squeeze=0))
 
-    def __call__ (self, *args,  **kwargs):
+    def __call__(self, *args, **kwargs):
         "Selection of a subregion using selectors"
         # separate options from selector specs
         d = kwargs.copy()
@@ -152,13 +157,13 @@ class AbstractVariable(CdmsObj, Slab):
         s = selectors.Selector(*args, **d)
         # get the selection
         return s.unmodified_select(self, raw=raw,
-                                         squeeze=squeeze, 
-                                         order=order, 
-                                         grid=grid)
+                                   squeeze=squeeze,
+                                   order=order,
+                                   grid=grid)
 
     select = __call__
 
-    def rank (self):
+    def rank(self):
         return len(self.shape)
 
     def _returnArray(self, ar, squeeze, singles=None):
@@ -166,85 +171,90 @@ class AbstractVariable(CdmsObj, Slab):
         # job is to make sure we return an numpy.ma or a scalar.
         # If singles is not None, squeeze dimension indices in singles
         inf = 1.8e308
-        if isinstance(ar,cdms2.tvariable.TransientVariable):
-            result = numpy.ma.array(ar._data,mask=ar.mask)
-        elif numpy.ma.isMaskedArray(ar):   #already numpy.ma, only need squeeze.
+        if isinstance(ar, cdms2.tvariable.TransientVariable):
+            result = numpy.ma.array(ar._data, mask=ar.mask)
+        elif numpy.ma.isMaskedArray(ar):  # already numpy.ma, only need squeeze.
             result = ar
         elif isinstance(ar, numpy.ndarray):
             missing = self.getMissing()
             if missing is None:
                 result = numpy.ma.masked_array(ar)
-            elif missing==inf or missing!=missing: # (x!=x) ==> x is NaN
+            elif missing == inf or missing != missing:  # (x!=x) ==> x is NaN
                 result = numpy.ma.masked_object(ar, missing, copy=0)
-            elif ar.dtype.char=='c':
+            elif ar.dtype.char == 'c':
                 # umath.equal is not implemented
-                resultmask = (ar==missing)
+                resultmask = (ar == missing)
                 if not resultmask.any():
                     resultmask = numpy.ma.nomask
-                result = numpy.ma.masked_array(ar, mask=resultmask, fill_value=missing)
+                result = numpy.ma.masked_array(
+                    ar,
+                    mask=resultmask,
+                    fill_value=missing)
             else:
                 result = numpy.ma.masked_values(ar, missing, copy=0)
         elif ar is numpy.ma.masked:
-            return ar  
-        else: # scalar, but it might be the missing value
+            return ar
+        else:  # scalar, but it might be the missing value
             missing = self.getMissing()
             if missing is None:
-                return ar #scalar
+                return ar  # scalar
             else:
                 result = numpy.ma.masked_values(ar, missing, copy=0)
 
         squoze = 0
         if squeeze:
             n = 1
-            newshape=[]
+            newshape = []
             for s in result.shape:
-               if s == 1: 
-                   squoze = 1
-                   continue
-               else:
-                   n = n * s
-                   newshape.append(s)
+                if s == 1:
+                    squoze = 1
+                    continue
+                else:
+                    n = n * s
+                    newshape.append(s)
         elif singles is not None:
             n = 1
-            newshape=[]
+            newshape = []
             oldshape = result.shape
             for i in range(len(oldshape)):
-               if i in singles: 
-                   squoze = 1
-                   continue
-               else:
-                   s = oldshape[i]
-                   n = n * s
-                   newshape.append(s)
-            
+                if i in singles:
+                    squoze = 1
+                    continue
+                else:
+                    s = oldshape[i]
+                    n = n * s
+                    newshape.append(s)
+
         else:
             n = numpy.ma.size(result)
         if n == 1 and squeeze:
-            return numpy.ma.ravel(result)[0] # scalar or masked
+            return numpy.ma.ravel(result)[0]  # scalar or masked
         if squoze:
             result.shape = newshape
         return result
 
     def generateGridkey(self, convention, vardict):
-        """ generateGridkey(): Determine if the variable is gridded, 
+        """ generateGridkey(): Determine if the variable is gridded,
             and generate ((latname, lonname, order, maskname, class), lat, lon) if gridded,
             or (None, None, None) if not gridded. vardict is the variable dictionary of the parent"""
 
         lat, nlat = convention.getVarLatId(self, vardict)
         lon, nlon = convention.getVarLonId(self, vardict)
         if (lat is not None) and (lat is lon):
-            raise CDMSError, "Axis %s is both a latitude and longitude axis! Check standard_name and/or axis attributes."%lat.id
+            raise CDMSError(
+                "Axis %s is both a latitude and longitude axis! Check standard_name and/or axis attributes." %
+                lat.id)
 
         # Check for 2D grid
         if (lat is None) or (lon is None):
             return None, lat, lon
 
         # Check for a rectilinear grid
-        if isinstance(lat, AbstractAxis) and isinstance(lon, AbstractAxis) and (lat.rank()==lon.rank()==1):
+        if isinstance(lat, AbstractAxis) and isinstance(lon, AbstractAxis) and (lat.rank() == lon.rank() == 1):
             return self.generateRectGridkey(lat, lon), lat, lon
 
         # Check for a curvilinear grid:
-        if lat.rank()==lon.rank()==2:
+        if lat.rank() == lon.rank() == 2:
 
             # check that they are defined on the same indices as self
             vardomain = self.getAxisIds()
@@ -258,7 +268,7 @@ class AbstractVariable(CdmsObj, Slab):
                     if axisid not in vardomain:
                         allok = 0
                         break
-            
+
             # It's a curvilinear grid
             if allok:
                 if hasattr(lat, 'maskid'):
@@ -268,7 +278,7 @@ class AbstractVariable(CdmsObj, Slab):
                 return (lat.id, lon.id, 'yx', maskid, 'curveGrid'), lat, lon
 
         # Check for a generic grid:
-        if lat.rank()==lon.rank()==1:
+        if lat.rank() == lon.rank() == 1:
 
             # check that they are defined on the same indices as self
             vardomain = self.getAxisIds()
@@ -282,7 +292,7 @@ class AbstractVariable(CdmsObj, Slab):
                     if axisid not in vardomain:
                         allok = 0
                         break
-            
+
             # It's a generic grid
             if allok:
                 if hasattr(lat, 'maskid'):
@@ -307,12 +317,14 @@ class AbstractVariable(CdmsObj, Slab):
                 ilat = k
             k += 1
 
-        if ilat==-1:
-            raise CDMSError, "Cannot find latitude axis; check standard_name and/or axis attributes"
-        if ilon==-1:
-            raise CDMSError, "Cannot find longitude axis; check standard_name and/or axis attributes"
+        if ilat == -1:
+            raise CDMSError(
+                "Cannot find latitude axis; check standard_name and/or axis attributes")
+        if ilon == -1:
+            raise CDMSError(
+                "Cannot find longitude axis; check standard_name and/or axis attributes")
 
-        if ilat<ilon:
+        if ilat < ilon:
             order = "yx"
         else:
             order = "xy"
@@ -331,12 +343,14 @@ class AbstractVariable(CdmsObj, Slab):
             alist = [d[0] for d in self.getDomain()]
             gridok = grid.checkAxes(alist)
         if not gridok:
-            raise CDMSError, "grid does not match axes for variable %s"%self.id
+            raise CDMSError(
+                "grid does not match axes for variable %s" %
+                self.id)
         self._grid_ = grid
 
-    def getDomain (self):
+    def getDomain(self):
         "Get the list of axes"
-        raise CDMSError, "getDomain not overriden in child"
+        raise CDMSError("getDomain not overriden in child")
 
     def getConvention(self):
         "Get the metadata convention associated with this object."
@@ -345,14 +359,15 @@ class AbstractVariable(CdmsObj, Slab):
         else:
             result = CF1
         return result
-            
+
 # A child class may want to override this
     def getAxis(self, n):
         "Get the n-th axis"
-        if n < 0: n = n + self.rank()
+        if n < 0:
+            n = n + self.rank()
         return self.getDomain()[n][0]
 
-    def getAxisIndex (self, axis_spec):
+    def getAxisIndex(self, axis_spec):
         """Return the index of the axis specificed by axis_spec.
          Argument axis_spec and be as for axisMatches
          Return -1 if no match.
@@ -362,7 +377,7 @@ class AbstractVariable(CdmsObj, Slab):
                 return i
         return -1
 
-    def getAxisListIndex (self, axes=None, omit=None, order=None):
+    def getAxisListIndex(self, axes=None, omit=None, order=None):
         """Return a list of indices of axis objects;
            If axes is not None, include only certain axes.
            less the ones specified in omit. If axes is None,
@@ -371,15 +386,15 @@ class AbstractVariable(CdmsObj, Slab):
         """
         return axisMatchIndex(self.getAxisList(), axes, omit, order)
 
-    def getAxisList(self, axes = None, omit=None, order=None):
-        """Get the list of axis objects; 
+    def getAxisList(self, axes=None, omit=None, order=None):
+        """Get the list of axis objects;
            If axes is not None, include only certain axes.
            If omit is not None, omit those specified by omit.
            Arguments omit or axes  may be as specified in axisMatchAxis
            order is an optional string determining the output order
         """
         alist = [d[0] for d in self.getDomain()]
-        return axisMatchAxis (alist, axes, omit, order)
+        return axisMatchAxis(alist, axes, omit, order)
 
     def getAxisIds(self):
         "Get a list of axis identifiers"
@@ -397,12 +412,12 @@ class AbstractVariable(CdmsObj, Slab):
         except:
             mv = self.missing_value
 
-        if mv is None and hasattr(self,'_FillValue'):
+        if mv is None and hasattr(self, '_FillValue'):
             mv = self._FillValue
-            
-        if asarray==0 and isinstance(mv, numpy.ndarray):
+
+        if asarray == 0 and isinstance(mv, numpy.ndarray):
             mv = mv[0]
-        if isinstance(mv, basestring) and self.dtype.char not in ['?','c','O','S']:
+        if isinstance(mv, basestring) and self.dtype.char not in ['?', 'c', 'O', 'S']:
             mv = float(mv)
         return mv
 
@@ -419,7 +434,7 @@ class AbstractVariable(CdmsObj, Slab):
         if value is None:
             self._basic_set('missing_value', value)
             return
-            
+
         selftype = self.typecode()
         if isnstance(value, numpy.ndarray):
             value = value.astype(selftype).item()
@@ -429,14 +444,15 @@ class AbstractVariable(CdmsObj, Slab):
             try:
                 value = numpy.array([value], selftype)
             except:                     # Set fill value when ar[i:j] returns a masked value
-                value = numpy.array([numpy.ma.default_fill_value(self)], selftype)
-        elif isinstance(value,(basestring,numpy.string_,numpy.str,numpy.string0,numpy.str_)) and selftype in ['?','c','O','S']: # '?' for Boolean and object
+                value = numpy.array(
+                    [numpy.ma.default_fill_value(self)],
+                    selftype)
+        elif isinstance(value, (basestring, numpy.string_, numpy.str, numpy.string0, numpy.str_)) and selftype in ['?', 'c', 'O', 'S']:  # '?' for Boolean and object
             pass
         else:
-            raise CDMSError, 'Invalid missing value %s'%`value`
-        
-        self.missing_value = value
+            raise CDMSError('Invalid missing value %s' % repr(value))
 
+        self.missing_value = value
 
     def getTime(self):
         "Get the first time dimension, or None if not found"
@@ -457,11 +473,12 @@ class AbstractVariable(CdmsObj, Slab):
                 break
         else:
             return None
+
     def getForecast(self):
         return self.getForecastTime()
 
     def getLevel(self):
-        """Get the first vertical level dimension in the domain, 
+        """Get the first vertical level dimension in the domain,
            or None if not found.
         """
         for k in range(self.rank()):
@@ -479,7 +496,7 @@ class AbstractVariable(CdmsObj, Slab):
             result = grid.getLatitude()
         else:
             result = None
-            
+
         if result is None:
             for k in range(self.rank()):
                 result = self.getAxis(k)
@@ -487,7 +504,7 @@ class AbstractVariable(CdmsObj, Slab):
                     break
             else:
                 result = None
-                
+
         return result
 
     def getLongitude(self):
@@ -497,7 +514,7 @@ class AbstractVariable(CdmsObj, Slab):
             result = grid.getLongitude()
         else:
             result = None
-            
+
         if result is None:
             for k in range(self.rank()):
                 result = self.getAxis(k)
@@ -505,9 +522,8 @@ class AbstractVariable(CdmsObj, Slab):
                     break
             else:
                 result = None
-                
-        return result
 
+        return result
 
     # Get an order string, such as "tzyx"
     def getOrder(self, ids=0):
@@ -515,12 +531,12 @@ class AbstractVariable(CdmsObj, Slab):
 
          if ids == 0 (the default) for an axis that is not t,z,x,y
          the order string will contain a '-' in that location.
-         The result string will be of the same length as the number 
+         The result string will be of the same length as the number
          of axes. This makes it easy to loop over the dimensions.
 
          if ids == 1 those axes will be represented in the order
          string as (id) where id is that axis' id. The result will
-         be suitable for passing to order2index to get the 
+         be suitable for passing to order2index to get the
          corresponding axes, and to orderparse for dividing up into
          components.
         """
@@ -528,50 +544,56 @@ class AbstractVariable(CdmsObj, Slab):
         for k in range(self.rank()):
             axis = self.getAxis(k)
             if axis.isLatitude():
-                order = order+"y"
+                order = order + "y"
             elif axis.isLongitude():
-                order = order+"x"
+                order = order + "x"
             elif axis.isLevel():
-                order = order+"z"
+                order = order + "z"
             elif axis.isTime():
-                order = order+"t"
+                order = order + "t"
             elif ids:
                 order = order + '(' + axis.id + ')'
             else:
                 order = order + "-"
         return order
 
-    def subSlice (self, *specs, **keys):
-        speclist = self._process_specs (specs, keys)
-        numericSqueeze = keys.get('numericSqueeze',0)
+    def subSlice(self, *specs, **keys):
+        speclist = self._process_specs(specs, keys)
+        numericSqueeze = keys.get('numericSqueeze', 0)
 
         # Get a list of single-index specs
         if numericSqueeze:
             singles = self._single_specs(specs)
         else:
             singles = None
-        slicelist = self.specs2slices(speclist,force=1)
-        d = self.expertSlice (slicelist)
-        squeeze = keys.get ('squeeze', 0)
-        raw = keys.get('raw',0)
+        slicelist = self.specs2slices(speclist, force=1)
+        d = self.expertSlice(slicelist)
+        squeeze = keys.get('squeeze', 0)
+        raw = keys.get('raw', 0)
         order = keys.get('order', None)
         grid = keys.get('grid', None)
-        forceaxes = keys.get('forceaxes', None) # Force result to have these axes
-        raweasy = raw==1 and order is None and grid is None
+        forceaxes = keys.get(
+            'forceaxes',
+            None)  # Force result to have these axes
+        raweasy = raw == 1 and order is None and grid is None
         if not raweasy:
             if forceaxes is None:
                 axes = []
-                allaxes = [None]*self.rank()
+                allaxes = [None] * self.rank()
                 for i in range(self.rank()):
-                   slice = slicelist[i]
-                   if squeeze and numpy.ma.size(d, i) == 1:
-                       continue
-                   elif numericSqueeze and i in singles:
-                       continue
-                   # Don't wrap square-bracket slices
-                   axis = self.getAxis(i).subaxis(slice.start, slice.stop, slice.step, wrap=(numericSqueeze==0))
-                   axes.append(axis)
-                   allaxes[i] = axis
+                    slice = slicelist[i]
+                    if squeeze and numpy.ma.size(d, i) == 1:
+                        continue
+                    elif numericSqueeze and i in singles:
+                        continue
+                    # Don't wrap square-bracket slices
+                    axis = self.getAxis(
+                        i).subaxis(slice.start,
+                                   slice.stop,
+                                   slice.step,
+                                   wrap=(numericSqueeze == 0))
+                    axes.append(axis)
+                    allaxes[i] = axis
             else:
                 axes = forceaxes
 
@@ -582,42 +604,46 @@ class AbstractVariable(CdmsObj, Slab):
                 resultgrid = None
             else:
                 alist = [item[0] for item in self.getDomain()]
-                gridslices, newaxes = selfgrid.getGridSlices(alist, allaxes, slicelist)
+                gridslices, newaxes = selfgrid.getGridSlices(
+                    alist, allaxes, slicelist)
 
                 # If one of the grid axes was squeezed, the result grid is None
                 if None in newaxes:
                     resultgrid = None
                 else:
-                    resultgrid = apply(selfgrid.subSlice, gridslices, {'forceaxes': newaxes})
+                    resultgrid = selfgrid.subSlice(
+                        *gridslices,
+                        **{'forceaxes': newaxes})
 
         resultArray = self._returnArray(d, squeeze, singles=singles)
         if self.isEncoded():
-            resultArray  = self.decode(resultArray)
+            resultArray = self.decode(resultArray)
             newmissing = resultArray.fill_value
         else:
             newmissing = self.getMissing()
 
         if raweasy:
             return resultArray
-        elif len(axes)>0:
+        elif len(axes) > 0:
 
             # If forcing use of input axes, make sure they are not copied.
-            # Same if the grid is not rectilinear - this is when forceaxes is set.
+            # Same if the grid is not rectilinear - this is when forceaxes is
+            # set.
             copyaxes = (forceaxes is None) and (resultgrid is None)
-            result = TransientVariable(resultArray, 
-                                     copy=0,
-                                     fill_value = newmissing,
-                                     axes=axes,
-                                     copyaxes = copyaxes,
-                                     grid = resultgrid,
-                                     attributes=self.attributes,
-                                     id = self.id)
+            result = TransientVariable(resultArray,
+                                       copy=0,
+                                       fill_value=newmissing,
+                                       axes=axes,
+                                       copyaxes=copyaxes,
+                                       grid=resultgrid,
+                                       attributes=self.attributes,
+                                       id=self.id)
             if grid is not None:
                 order2 = grid.getOrder()
                 if order is None:
                     order = order2
                 elif order != order2:
-                    raise CDMSError, 'grid, order options not compatible.'
+                    raise CDMSError('grid, order options not compatible.')
             result = result.reorder(order).regrid(grid)
             if raw == 0:
                 return result
@@ -627,7 +653,7 @@ class AbstractVariable(CdmsObj, Slab):
         else:               # Return numpy.ma for zero rank, so that __cmp__ works.
             return resultArray
 
-    def getSlice (self, *specs, **keys):
+    def getSlice(self, *specs, **keys):
         """x.getSlice takes arguments of the following forms and produces
            a return array. The keyword argument squeeze determines whether
            or not the shape of the returned array contains dimensions whose
@@ -652,21 +678,21 @@ class AbstractVariable(CdmsObj, Slab):
            of that dimension, as in normal Python indexing.
         """
         # Turn on squeeze and raw options by default.
-        keys['numericSqueeze'] = keys.get('numericSqueeze',0)
-        keys['squeeze'] = keys.get('squeeze',1-keys['numericSqueeze'])
-        keys['raw'] = keys.get('raw',1)
+        keys['numericSqueeze'] = keys.get('numericSqueeze', 0)
+        keys['squeeze'] = keys.get('squeeze', 1 - keys['numericSqueeze'])
+        keys['raw'] = keys.get('raw', 1)
         keys['order'] = keys.get('order', None)
         keys['grid'] = keys.get('grid', None)
         isitem = keys.get('isitem', 0)
         result = self.subSlice(*specs, **keys)
 
         # return a scalar for 0-D slices
-        if isitem and result.size==1 and (not _numeric_compatibility) and not result.mask.item():
+        if isitem and result.size == 1 and (not _numeric_compatibility) and not result.mask.item():
             result = result.item()
         return result
 
     def expertSlice(self, slicelist):
-        raise CDMSError, NotImplemented + 'expertSlice'
+        raise CDMSError(NotImplemented + 'expertSlice')
 
     def getRegion(self, *specs, **keys):
         """getRegion
@@ -702,65 +728,67 @@ class AbstractVariable(CdmsObj, Slab):
         """
 
         # By default, squeeze and raw options are on
-        keys['squeeze'] = keys.get ('squeeze', 1)
-        keys['raw'] = keys.get('raw',1)
+        keys['squeeze'] = keys.get('squeeze', 1)
+        keys['raw'] = keys.get('raw', 1)
         keys['order'] = keys.get('order', None)
         keys['grid'] = keys.get('grid', None)
         return self.subRegion(*specs, **keys)
 
-    def subRegion (self, *specs, **keys):
+    def subRegion(self, *specs, **keys):
 
-        speclist = self._process_specs (specs, keys)
-        slicelist = self.reg_specs2slices (speclist)
+        speclist = self._process_specs(specs, keys)
+        slicelist = self.reg_specs2slices(speclist)
 
-        squeeze = keys.get ('squeeze', 0)
-        raw = keys.get('raw',0)
+        squeeze = keys.get('squeeze', 0)
+        raw = keys.get('raw', 0)
         order = keys.get('order', None)
         grid = keys.get('grid', None)
-        raweasy = raw==1 and order is None and grid is None
+        raweasy = raw == 1 and order is None and grid is None
         if grid is not None and order is None:
             order = grid.getOrder()
 
-
         # Check if any slice wraps around.
 
         wrapdim = -1
-        
+
         axes = []
 
         circulardim = None
-        
+
         for idim in range(len(slicelist)):
             item = slicelist[idim]
             axis = self.getAxis(idim)
             axislen = len(axis)
 
-            if(axis.isCircular()): circulardim=idim
+            if(axis.isCircular()):
+                circulardim = idim
 
-            wraptest1 = ( axis.isCircular() and speclist[idim] != unspecified)
+            wraptest1 = (axis.isCircular() and speclist[idim] != unspecified)
             start, stop = item.start, item.stop
-            wraptest2 = not ((start is None or (0<=start<axislen)) and (stop is None or (0<=stop<=axislen)))
+            wraptest2 = not ((start is None or (0 <= start < axislen)) and (
+                stop is None or (0 <= stop <= axislen)))
 
-            if ( wraptest1 and wraptest2):
+            if (wraptest1 and wraptest2):
                 if wrapdim >= 0:
-                    raise CDMSError, "Too many dimensions wrap around."
+                    raise CDMSError("Too many dimensions wrap around.")
                 wrapdim = idim
                 break
-                    
+
         else:
 
             # No wraparound, just read the data
 
-            # redo the speclist -> slice if passed circular test but not wrapped test
+            # redo the speclist -> slice if passed circular test but not
+            # wrapped test
 
             if(circulardim is not None):
-                slicelist = self.reg_specs2slices (speclist,force=circulardim)
-                
-            d = {'raw':raw, 
-                 'squeeze':squeeze,
-                 'order':order,
-                 'grid':grid,
-                }
+                slicelist = self.reg_specs2slices(speclist, force=circulardim)
+
+            d = {'raw': raw,
+                 'squeeze': squeeze,
+                 'order': order,
+                 'grid': grid,
+                 }
             return self.subSlice(*slicelist, **d)
 
         #
@@ -775,11 +803,11 @@ class AbstractVariable(CdmsObj, Slab):
         # shift the wrap slice to the positive side and calc number of cycles shifted
         #
 
-        wb=wrapslice.start
-        we=wrapslice.stop
-        ws=wrapslice.step
-        size=length
-        cycle=self.getAxis(wrapdim).getModulo()
+        wb = wrapslice.start
+        we = wrapslice.stop
+        ws = wrapslice.step
+        size = length
+        cycle = self.getAxis(wrapdim).getModulo()
 
         #
         # ncycle:
@@ -789,92 +817,93 @@ class AbstractVariable(CdmsObj, Slab):
         # ncyclesrev:
         #    resetting the world coordinate for reversed direction
         #
-        
-        ncycles=0
-        ncyclesrev=0
-
-        if(ws>0):
-
-            if(wb>0):
-                ncycles=1
-                while(wb>=0):
-                    wb=wb-size
-                    we=we-size
-                    ncycles=ncycles-1
+
+        ncycles = 0
+        ncyclesrev = 0
+
+        if(ws > 0):
+
+            if(wb > 0):
+                ncycles = 1
+                while(wb >= 0):
+                    wb = wb - size
+                    we = we - size
+                    ncycles = ncycles - 1
             else:
-                ncycles=0
-                while(wb<0):
-                    wb=wb+size
-                    we=we+size
-                    ncycles=ncycles+1
-                    
+                ncycles = 0
+                while(wb < 0):
+                    wb = wb + size
+                    we = we + size
+                    ncycles = ncycles + 1
+
             if(wb < 0):
-                wb=wb+size
-                we=we+size
-                
+                wb = wb + size
+                we = we + size
+
         #  reversed direction
-        
+
         else:
 
             # do the ncycles for resetting world coordinate
-            wbrev=wb
-            werev=we
-            werevNoneTest=0
+            wbrev = wb
+            werev = we
+            werevNoneTest = 0
             if(werev is None):
-                werev=0
-                werevNoneTest=1
+                werev = 0
+                werevNoneTest = 1
 
-            ncycleRevStart=1
+            ncycleRevStart = 1
             if(wbrev > 0):
-                ncyclesrev=ncycleRevStart
-                while(wbrev>=0):
-                    wbrev=wbrev-size
-                    werev=werev-size
-                    ncyclesrev=ncyclesrev-1
+                ncyclesrev = ncycleRevStart
+                while(wbrev >= 0):
+                    wbrev = wbrev - size
+                    werev = werev - size
+                    ncyclesrev = ncyclesrev - 1
             else:
-                ncyclesrev=0
-                while(wbrev<0):
-                    wbrev=wbrev+size
-                    werev=werev+size
-                    ncyclesrev=ncyclesrev+1
+                ncyclesrev = 0
+                while(wbrev < 0):
+                    wbrev = wbrev + size
+                    werev = werev + size
+                    ncyclesrev = ncyclesrev + 1
 
             while(werev < 0):
-                wbrev=wbrev+size
-                werev=werev+size
+                wbrev = wbrev + size
+                werev = werev + size
 
             # number of cycles to make the slice positive
-            while( we<0 and we != None ):
-                wb=wb+size
-                we=we+size
-                ncycles=ncycles+1
-
-            wb=wbrev
-            we=werev
-            if(werevNoneTest): we=None
-            
-        wrapslice=slice(wb,we,ws)
-        
+            while(we < 0 and we is not None):
+                wb = wb + size
+                we = we + size
+                ncycles = ncycles + 1
+
+            wb = wbrev
+            we = werev
+            if(werevNoneTest):
+                we = None
+
+        wrapslice = slice(wb, we, ws)
+
         #
         #  calc the actual positive slices and create data array
         #
 
-        donew=1
+        donew = 1
 
         if(donew):
 
             wraps = splitSliceExt(wrapslice, length)
 
-            for kk in range(0,len(wraps)):
-                sl=wraps[kk]
-                
+            for kk in range(0, len(wraps)):
+                sl = wraps[kk]
+
                 slicelist[wrapdim] = sl
 
                 if(kk == 0):
                     ar1 = self.getSlice(squeeze=0, *slicelist)
-                    result=ar1
+                    result = ar1
                 else:
                     ar2 = self.getSlice(squeeze=0, *slicelist)
-                    result = numpy.ma.concatenate((result,ar2),axis=wrapdim)
+                    result = numpy.ma.concatenate((result, ar2), axis=wrapdim)
 
         else:
 
@@ -883,8 +912,7 @@ class AbstractVariable(CdmsObj, Slab):
             ar1 = self.getSlice(squeeze=0, *slicelist)
             slicelist[wrapdim] = wrap2
             ar2 = self.getSlice(squeeze=0, *slicelist)
-            result = numpy.ma.concatenate((ar1,ar2),axis=wrapdim)
-
+            result = numpy.ma.concatenate((ar1, ar2), axis=wrapdim)
 
         if raweasy:
             return self._returnArray(result, squeeze)
@@ -895,26 +923,28 @@ class AbstractVariable(CdmsObj, Slab):
         #
         #----------------------------------------------------------------------
 
-        wrapspec=speclist[wrapdim]
-        
+        wrapspec = speclist[wrapdim]
+
         axes = []
         for i in range(self.rank()):
-            if squeeze and numpy.ma.size(result, i) == 1: continue
+            if squeeze and numpy.ma.size(result, i) == 1:
+                continue
 
             sl = slicelist[i]
 
             if i == wrapdim:
 
                 axis = self.getAxis(i).subAxis(wb, we, ws)
-                
+
                 if(ws > 0):
-                    delta_beg_wrap_dimvalue = ncycles*cycle
+                    delta_beg_wrap_dimvalue = ncycles * cycle
                 else:
-                    delta_beg_wrap_dimvalue = ncyclesrev*cycle
+                    delta_beg_wrap_dimvalue = ncyclesrev * cycle
 
                 axis.setBounds(axis.getBounds() - delta_beg_wrap_dimvalue)
-                
-                axis[:]= (axis[:] - delta_beg_wrap_dimvalue).astype(axis.typecode())
+
+                axis[:] = (axis[:] - delta_beg_wrap_dimvalue).astype(
+                    axis.typecode())
 
             else:
                 axis = self.getAxis(i).subaxis(sl.start, sl.stop, sl.step)
@@ -922,18 +952,18 @@ class AbstractVariable(CdmsObj, Slab):
 
         result = self._returnArray(result, squeeze)
         result = TransientVariable(result,
-                                 copy=0, 
-                                 fill_value = self.missing_value,
-                                 axes=axes,
-                                 attributes=self.attributes,
-                                 id = self.id)
+                                   copy=0,
+                                   fill_value=self.missing_value,
+                                   axes=axes,
+                                   attributes=self.attributes,
+                                   id=self.id)
         if grid is not None:
             order2 = grid.getOrder()
             if order is None:
                 order = order2
             elif order != order2:
-                raise CDMSError, 'grid, order options not compatible.'
-            
+                raise CDMSError('grid, order options not compatible.')
+
         result = result.reorder(order).regrid(grid)
         if raw == 0:
             return result
@@ -943,22 +973,22 @@ class AbstractVariable(CdmsObj, Slab):
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         return self.getSlice(Ellipsis, squeeze=squeeze)
-    
-    def assignValue(self,data):
-        raise CDMSError, NotImplemented + 'assignValue'
 
-    def reorder (self, order):
+    def assignValue(self, data):
+        raise CDMSError(NotImplemented + 'assignValue')
+
+    def reorder(self, order):
         """return self reordered per the specification order"""
-        if order is None: 
+        if order is None:
             return self
         axes = self.getAxisList()
         permutation = order2index(axes, order)
         if permutation == range(len(axes)):
             return self
-        return MV.transpose (self, permutation)
+        return MV.transpose(self, permutation)
 
-    def regrid (self, togrid, missing=None, order=None, mask=None, **keywords):
-        """return self regridded to the new grid.  
+    def regrid(self, togrid, missing=None, order=None, mask=None, **keywords):
+        """return self regridded to the new grid.
         One can use the regrid2.Regridder optional arguments as well.
 
         Example:
@@ -973,48 +1003,49 @@ class AbstractVariable(CdmsObj, Slab):
         @param keywords optional keyword arguments dependent on regridTool
         @return regridded variable
         """
-        # there is a circular dependency between cdms2 and regrid2. In 
+        # there is a circular dependency between cdms2 and regrid2. In
         # principle, cdms2 files should not import regrid2, we're bending
         # rules here...
         import regrid2
         from regrid2 import Horizontal
 
-        if togrid is None: 
+        if togrid is None:
             return self
         else:
 
-            fromgrid = self.getGrid() # this returns the horizontal grid only
+            fromgrid = self.getGrid()  # this returns the horizontal grid only
 
             # default w/o bounds
-            regridTool = 'libcf'   
+            regridTool = 'libcf'
             regridMethod = 'linear'
 
-            if self.getAxis(-1).attributes.has_key('topology'):
+            if 'topology' in self.getAxis(-1).attributes:
                 if self.getAxis(-1).attributes['topology'] == 'circular':
                     # for the ESMF regridders
-                    keywords['periodicity'] = guessPeriodicity(self.getAxis(-1).getBounds())
+                    keywords['periodicity'] = guessPeriodicity(
+                        self.getAxis(-1).getBounds())
                     keywords['mkCyclic'] = 1    # for LibCF regridder
 
             # check if there are bounds and we have esmf
-            if fromgrid.getBounds() is not None and hasattr(regrid2,"ESMFRegrid"):
+            if fromgrid.getBounds() is not None and hasattr(regrid2, "ESMFRegrid"):
                 regridTool = 'esmf'
                 regridMethod = 'linear'
                 # Hum ok if only 1 longitude regrid fails, let's check
-                if len(togrid.getLongitude())==1:
-                  # esmf can't deal with this
-                  regridTool   = "regrid2"
+                if len(togrid.getLongitude()) == 1:
+                    # esmf can't deal with this
+                    regridTool = "regrid2"
 
             # let user override
             userSpecifiesMethod = False
             for rm in 'rm', 'method', 'regridmethod', 'regrid_method', 'regridMethod':
-                if keywords.has_key(rm):
+                if rm in keywords:
                     regridMethod = keywords[rm]
                     del keywords[rm]
                     userSpecifiesMethod = True
 
             userSpecifiesTool = False
             for rt in 'rt', 'tool', 'regridtool', 'regrid_tool', 'regridTool':
-                if keywords.has_key(rt):
+                if rt in keywords:
                     regridTool = keywords[rt]
                     del keywords[rt]
                     userSpecifiesTool = True
@@ -1027,8 +1058,8 @@ class AbstractVariable(CdmsObj, Slab):
 
             # make sure the tool can do it
             if re.search('^regrid', regridTool, re.I) is not None and \
-                    (  len(fromgrid.getLatitude().shape) > 1 or \
-                         len(togrid.getLatitude().shape) > 1  ):
+                    (len(fromgrid.getLatitude().shape) > 1 or
+                     len(togrid.getLatitude().shape) > 1):
                 message = """
 avariable.regrid: regrid2 cannot do curvilinear, will switch to esmf..."
                 """
@@ -1038,7 +1069,7 @@ avariable.regrid: regrid2 cannot do curvilinear, will switch to esmf..."
             if re.search('esmf', regridTool, re.I):
                 # make sure source grids have bounds
                 haveBounds = True
-                for g in fromgrid,:
+                for g in fromgrid, :
                     for c in g.getLatitude(), g.getLongitude():
                         haveBounds &= (c.getBounds() is not None)
                 if not haveBounds:
@@ -1048,19 +1079,18 @@ avariable.regrid: regridTool = 'esmf' requires bounds for source grid, will swit
                     warnings.warn(message, Warning)
                     regridTool = 'libcf'
                     regridMethod = 'linear'
-                if not hasattr(regrid2,"ESMFRegrid"):
-                  message = """
+                if not hasattr(regrid2, "ESMFRegrid"):
+                    message = """
 avariable.regrid: regridTool = 'esmf' but your version does not seems to be built with esmf, will switch to regridTool = 'libcf'
                   """
-                  warnings.warn(message, Warning)
-                  regridTool = 'libcf'
-                  regridMethod = 'linear'
-
+                    warnings.warn(message, Warning)
+                    regridTool = 'libcf'
+                    regridMethod = 'linear'
 
             if re.search('conserv', regridMethod, re.I):
                 # make sure destination grid has bounds
                 haveBounds = True
-                for g in togrid,:
+                for g in togrid, :
                     for c in g.getLatitude(), g.getLongitude():
                         haveBounds &= (c.getBounds() is not None)
                 if not haveBounds:
@@ -1074,47 +1104,48 @@ avariable.regrid: regridMethod = 'conserve' requires bounds for destination grid
                 message = """
 avariable.regrid: We chose regridTool = %s for you among the following choices:
    Tools ->    'regrid2' (old behavior)
-               'esmf' (conserve, patch, linear) or 
+               'esmf' (conserve, patch, linear) or
                'libcf' (linear)""" % regridTool
                 warnings.warn(message, Warning)
 
             if not userSpecifiesMethod and re.search('^regrid', regridTool, re.I) is None:
                 message = """
-avariable.regrid: We chose regridMethod = %s for you among the following choices: 
+avariable.regrid: We chose regridMethod = %s for you among the following choices:
     'conserve' or 'linear' or 'patch'""" % regridMethod
                 warnings.warn(message, Warning)
 
             if re.search('^regrid', regridTool, re.I):
-                if keywords.has_key('diag') and \
+                if 'diag' in keywords and \
                         isinstance(keywords['diag'], dict):
                     keywords['diag']['regridTool'] = 'regrid'
 
                 # the original cdms2 regridder
                 regridf = Horizontal(fromgrid, togrid)
-                return regridf(self, missing=missing, order=order, 
+                return regridf(self, missing=missing, order=order,
                                mask=mask, **keywords)
 
             # emsf or libcf...
 
             srcGridMask = None
             # set the source mask if a mask is defined with the source data
-            if numpy.any(self.mask == True):
+            if numpy.any(self.mask):
                 srcGridMask = getMinHorizontalMask(self)
 
             # compute the interpolation weights
-            ro = CdmsRegrid(fromgrid, togrid, 
-                            dtype = self.dtype,
-                            regridMethod = regridMethod,
-                            regridTool = regridTool,
-                            srcGridMask = srcGridMask, 
-                            srcGridAreas = None,
-                            dstGridMask = None,
-                            dstGridAreas = None,
+            ro = CdmsRegrid(fromgrid, togrid,
+                            dtype=self.dtype,
+                            regridMethod=regridMethod,
+                            regridTool=regridTool,
+                            srcGridMask=srcGridMask,
+                            srcGridAreas=None,
+                            dstGridMask=None,
+                            dstGridAreas=None,
                             **keywords)
             # now interpolate
             return ro(self, **keywords)
 
-    def pressureRegrid (self, newLevel, missing=None, order=None, method="log"):
+    def pressureRegrid(
+            self, newLevel, missing=None, order=None, method="log"):
         """Return the variable regridded to new pressure levels.
         The variable should be a function of lat, lon, pressure, and (optionally) time.
         <newLevel> is an axis of the result pressure levels.
@@ -1126,12 +1157,13 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
         fromlevel = self.getLevel()
         if fromlevel is None:
-            raise CDMSError, 'No pressure level'
+            raise CDMSError('No pressure level')
         pregridf = PressureRegridder(fromlevel, newLevel)
         result = pregridf(self, missing=missing, order=order, method=method)
         return result
 
-    def crossSectionRegrid(self, newLevel, newLatitude, missing=None, order=None, method="log"):
+    def crossSectionRegrid(
+            self, newLevel, newLatitude, missing=None, order=None, method="log"):
         """Return the variable regridded to new pressure levels and latitudes.
         The variable should be a function of lat, level, and (optionally) time.
         <newLevel> is an axis of the result pressure levels.
@@ -1145,14 +1177,18 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         fromlevel = self.getLevel()
         fromlat = self.getLatitude()
         if fromlevel is None:
-            raise CDMSError, 'No pressure level'
+            raise CDMSError('No pressure level')
         if fromlat is None:
-            raise CDMSError, 'No latitude level'
-        xregridf = CrossSectionRegridder(fromlat, newLatitude, fromlevel, newLevel)
+            raise CDMSError('No latitude level')
+        xregridf = CrossSectionRegridder(
+            fromlat,
+            newLatitude,
+            fromlevel,
+            newLevel)
         result = xregridf(self, missing=missing, order=order, method=method)
         return result
 
-    def _process_specs (self, specs, keys):
+    def _process_specs(self, specs, keys):
         """Process the arguments for a getSlice, getRegion, etc.
            Returns an array of specifications for all dimensions.
            Any Ellipsis has been eliminated.
@@ -1163,36 +1199,39 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         if Ellipsis in specs:
             nellipses = 1
         elif numpy.newaxis in specs:
-            raise CDMSError, 'Sorry, you cannot use NewAxis in this context ' + str(specs)
+            raise CDMSError(
+                'Sorry, you cannot use NewAxis in this context ' + str(specs))
         else:
             nellipses = 0
-        if nsupplied-nellipses > myrank:
-            raise CDMSError, InvalidRegion + \
-              "too many dimensions: %d, for variable %s"%(len(specs),self.id)
+        if nsupplied - nellipses > myrank:
+            raise CDMSError(InvalidRegion +
+                            "too many dimensions: %d, for variable %s" % (len(specs), self.id))
 
-        speclist = [unspecified]*myrank
+        speclist = [unspecified] * myrank
         i = 0
         j = 0
         while i < nsupplied:
             if specs[i] is Ellipsis:
-               j = myrank  - (nsupplied - (i+1)) 
+                j = myrank - (nsupplied - (i + 1))
             else:
-               speclist[j] = specs[i]
-               j = j + 1
+                speclist[j] = specs[i]
+                j = j + 1
             i = i + 1
 
         for k, v in keys.items():
-            if k in ['squeeze','raw','grid','order']: 
+            if k in ['squeeze', 'raw', 'grid', 'order']:
                 continue
             i = self.getAxisIndex(k)
             if i >= 0:
                 if speclist[i] is not unspecified:
-                    raise CDMSError, 'Conflict between specifier %s and %s'%(`speclist[i]`,`keys`)
+                    raise CDMSError(
+                        'Conflict between specifier %s and %s' %
+                        (repr(speclist[i]), repr(keys)))
                 speclist[i] = v
 
         return speclist
 
-    def _single_specs (self, specs):
+    def _single_specs(self, specs):
         """Return a list of dimension indices where the spec is an index."""
         myrank = self.rank()
         nsupplied = len(specs)
@@ -1201,7 +1240,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         singles = []
         while i < nsupplied:
             if specs[i] is Ellipsis:
-                j = myrank  - (nsupplied - (i+1)) 
+                j = myrank - (nsupplied - (i + 1))
             else:
                 if isinstance(specs[i], int):
                     singles.append(j)
@@ -1209,7 +1248,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
             i = i + 1
         return singles
 
-    def specs2slices (self, speclist, force=None):
+    def specs2slices(self, speclist, force=None):
         """Create an equivalent list of slices from an index specification
            An index specification is a list of acceptable items, which are
            -- an integer
@@ -1220,22 +1259,22 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
            The size of the speclist must be self.rank()
         """
         if len(speclist) != self.rank():
-            raise CDMSError, "Incorrect length of speclist in specs2slices."
+            raise CDMSError("Incorrect length of speclist in specs2slices.")
         slicelist = []
         for i in range(self.rank()):
             key = speclist[i]
             if isinstance(key, int):  # x[i]
-                slicelist.append (slice(key,key+1))
-            elif isinstance(key, slice): # x[i:j:k]
+                slicelist.append(slice(key, key + 1))
+            elif isinstance(key, slice):  # x[i:j:k]
                 slicelist.append(key)
             elif key is unspecified or key is None or key == ':':
-                slicelist.append (slice(0, len(self.getAxis(i))))
+                slicelist.append(slice(0, len(self.getAxis(i))))
             elif key is Ellipsis:
-                raise CDMSError, "Misuse of ellipsis in specification."
+                raise CDMSError("Misuse of ellipsis in specification.")
             elif isinstance(key, tuple):
                 slicelist.append(slice(*key))
             else:
-                raise CDMSError, 'invalid index: %s'% str(key)
+                raise CDMSError('invalid index: %s' % str(key))
         # Change default or negative start, stop to positive
         for i in range(self.rank()):
             axis = self.getAxis(i)
@@ -1246,86 +1285,106 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
             #
             # allow negative indices in a wrapped (isCircular() = 1) axis
             #
-            circular=(axis.isCircular() and force is None)
+            circular = (axis.isCircular() and force is None)
 
             altered = 0
-            if step is None: 
+            if step is None:
                 altered = 1
-                step=1
+                step = 1
 
-            if ( ( start is None or stop is None or start<0 or stop<0 ) and ( circular == 0 ) ):
+            if ((start is None or stop is None or start < 0 or stop < 0) and (circular == 0)):
                 altered = 1
                 adjustit = 1
-                if step>0:
-                    if start is None: 
-                        start=0
-                    if stop is None: 
-                        stop=length
-                    if start==-1 and stop==0:
-                        stop=length
+                if step > 0:
+                    if start is None:
+                        start = 0
+                    if stop is None:
+                        stop = length
+                    if start == -1 and stop == 0:
+                        stop = length
                 else:
-                    if start is None: 
-                        start=length-1
+                    if start is None:
+                        start = length - 1
                     if stop is None:
                         # stop=-1
                         adjustit = 0
-                if start<0: 
-                    start=start%length
-                if stop<0 and adjustit: 
-                    stop=stop%length
-            if altered: 
+                if start < 0:
+                    start = start % length
+                if stop < 0 and adjustit:
+                    stop = stop % length
+            if altered:
                 slicelist[i] = slice(start, stop, step)
         return slicelist
 
-    def reg_specs2slices(self, initspeclist,force=None):
+    def reg_specs2slices(self, initspeclist, force=None):
 
         # Don't use input to store return value
-        speclist=copy.copy(initspeclist)
+        speclist = copy.copy(initspeclist)
 
         for i in range(self.rank()):
             item = speclist[i]
             if isinstance(item, slice):
                 newitem = item
-            elif item==':' or item is None or item is unspecified:
+            elif item == ':' or item is None or item is unspecified:
                 axis = self.getAxis(i)
-                newitem = slice(0,len(axis))
+                newitem = slice(0, len(axis))
             elif isinstance(item, list) or \
-                 isinstance(item, tuple):
+                    isinstance(item, tuple):
                 axis = self.getAxis(i)
-                if len(item)==2:        # (start,end)
+                if len(item) == 2:        # (start,end)
                     indexInterval = axis.mapIntervalExt(item)
-                elif len(item)==3:      # (start,end,'xxx')
-                    coordInterval = (item[0],item[1])
-                    indexInterval = axis.mapIntervalExt(coordInterval,item[2])
-                elif len(item)==4:
-                    coordInterval = (item[0],item[1])
-                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3])
-                elif len(item)==5:
-                    coordInterval = (item[0],item[1])
-                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3],item[4])
-                elif len(item)==6:
-                    coordInterval = (item[0],item[1])
-                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3],item[4],item[5])
+                elif len(item) == 3:      # (start,end,'xxx')
+                    coordInterval = (item[0], item[1])
+                    indexInterval = axis.mapIntervalExt(coordInterval, item[2])
+                elif len(item) == 4:
+                    coordInterval = (item[0], item[1])
+                    indexInterval = axis.mapIntervalExt(
+                        coordInterval, item[2], item[3])
+                elif len(item) == 5:
+                    coordInterval = (item[0], item[1])
+                    indexInterval = axis.mapIntervalExt(
+                        coordInterval,
+                        item[2],
+                        item[3],
+                        item[4])
+                elif len(item) == 6:
+                    coordInterval = (item[0], item[1])
+                    indexInterval = axis.mapIntervalExt(
+                        coordInterval,
+                        item[2],
+                        item[3],
+                        item[4],
+                        item[5])
                 else:
-                    raise CDMSError, InvalidRegion + "invalid format for coordinate interval: %s"%str(item)
+                    raise CDMSError(
+                        InvalidRegion + "invalid format for coordinate interval: %s" %
+                        str(item))
                 if indexInterval is None:
-                    raise CDMSError, OutOfRange + str(item)
-                newitem = slice(indexInterval[0],indexInterval[1],indexInterval[2])
+                    raise CDMSError(OutOfRange + str(item))
+                newitem = slice(
+                    indexInterval[0],
+                    indexInterval[1],
+                    indexInterval[2])
             elif isinstance(item, (numpy.floating, float, numpy.integer, int, long, basestring)) or type(item) in CdtimeTypes:
                 axis = self.getAxis(i)
                 #
                 # default is 'ccn' in axis.mapIntervalExt
                 #
-                indexInterval = axis.mapIntervalExt((item,item))
+                indexInterval = axis.mapIntervalExt((item, item))
                 if indexInterval is None:
-                    raise CDMSError, OutOfRange + str(item)
-                newitem = slice(indexInterval[0],indexInterval[1],indexInterval[2])
+                    raise CDMSError(OutOfRange + str(item))
+                newitem = slice(
+                    indexInterval[0],
+                    indexInterval[1],
+                    indexInterval[2])
             else:
-                raise CDMSError, InvalidRegion + "invalid format for coordinate interval: %s"%str(item)
+                raise CDMSError(
+                    InvalidRegion + "invalid format for coordinate interval: %s" %
+                    str(item))
 
             speclist[i] = newitem
 
-        slicelist = self.specs2slices(speclist,force)
+        slicelist = self.specs2slices(speclist, force)
         return slicelist
 
     def _decodedType(self):
@@ -1340,7 +1399,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
     def isEncoded(self):
         "True iff self is represented as packed data."
-        return (hasattr(self,"scale_factor") or hasattr(self,"add_offset"))
+        return (hasattr(self, "scale_factor") or hasattr(self, "add_offset"))
 
     def decode(self, ar):
         "Decode compressed data. ar is a masked array, scalar, or numpy.ma.masked"
@@ -1348,18 +1407,20 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         if hasattr(self, 'scale_factor'):
             scale_factor = self.scale_factor
         else:
-            scale_factor = numpy.array([1.0],resulttype)
-            
+            scale_factor = numpy.array([1.0], resulttype)
+
         if hasattr(self, 'add_offset'):
             add_offset = self.add_offset
         else:
-            add_offset = numpy.array([0.0],resulttype)
-            
+            add_offset = numpy.array([0.0], resulttype)
+
         if ar is not numpy.ma.masked:
-            result = scale_factor*ar + add_offset
-            if isinstance(result,numpy.ma.MaskedArray):
+            result = scale_factor * ar + add_offset
+            if isinstance(result, numpy.ma.MaskedArray):
                 result = result.astype(resulttype)
-                numpy.ma.set_fill_value(result, numpy.ma.default_fill_value(0.))
+                numpy.ma.set_fill_value(
+                    result,
+                    numpy.ma.default_fill_value(0.))
             else:
                 tmp = numpy.array(result)
                 result = tmp.astype(resulttype)[0]
@@ -1381,7 +1442,9 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
                         result.append(j)
                         break
                 else:
-                    raise CDMSError, 'Variable and grid do not share common dimensions: %s'%self.id
+                    raise CDMSError(
+                        'Variable and grid do not share common dimensions: %s' %
+                        self.id)
 
         return tuple(result)
 
@@ -1391,106 +1454,109 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         if isinstance(key, tuple):
             speclist = self._process_specs(key, {})
         else:
-            if isinstance(key, int) and key>=len(self):
-                raise IndexError, "Index too large: %d"%key
+            if isinstance(key, int) and key >= len(self):
+                raise IndexError("Index too large: %d" % key)
             speclist = self._process_specs([key], {})
 
         # Note: raw=0 ensures that a TransientVariable is returned
         return self.getSlice(numericSqueeze=1, raw=0, isitem=1, *speclist)
-        
+
     def __getslice__(self, low, high):
 
         # Note: raw=0 ensures that a TransientVariable is returned
-        return self.getSlice (slice(low, high), numericSqueeze = 1, raw=0)
+        return self.getSlice(slice(low, high), numericSqueeze=1, raw=0)
 
     def typecode(self):
-        raise CDMSError, NotImplemented + 'typecode'
+        raise CDMSError(NotImplemented + 'typecode')
 
-    def __abs__(self): 
+    def __abs__(self):
         return MV.absolute(self)
 
-    def __neg__(self): 
+    def __neg__(self):
         return MV.negative(self)
 
     def __add__(self, other):
         return MV.add(self, other)
-                        
+
     __radd__ = __add__
 
-    def __lshift__ (self, n):
+    def __lshift__(self, n):
         return MV.left_shift(self, n)
 
-    def __rshift__ (self, n):
+    def __rshift__(self, n):
         return MV.right_shift(self, n)
-                        
-    def __sub__(self, other): 
+
+    def __sub__(self, other):
         return MV.subtract(self, other)
 
-    def __rsub__(self, other): 
+    def __rsub__(self, other):
         return MV.subtract(other, self)
 
     def __mul__(self, other):
         return MV.multiply(self, other)
-    
+
     __rmul__ = __mul__
 
-    def __div__(self, other): 
+    def __div__(self, other):
         return MV.divide(self, other)
 
-    def __rdiv__(self, other): 
+    def __rdiv__(self, other):
         return MV.divide(other, self)
 
-    def __pow__(self,other, third=None): 
+    def __pow__(self, other, third=None):
         return MV.power(self, other, third)
 
-    def __iadd__(self, other): 
+    def __iadd__(self, other):
         "Add other to self in place."
         return MV.add(self, other)   # See if we can improve these later.
 
-    def __isub__(self, other): 
+    def __isub__(self, other):
         "Subtract other from self in place."
         return MV.subtract(self, other)   # See if we can improve these later.
 
-    def __imul__(self, other): 
+    def __imul__(self, other):
         "Multiply self by other in place."
         return MV.multiply(self, other)   # See if we can improve these later.
 
-    def __idiv__(self, other): 
+    def __idiv__(self, other):
         "Divide self by other in place."
         return MV.divide(self, other)   # See if we can improve these later.
 
-    def __eq__(self,other): 
-        return MV.equal(self,other)
+    def __eq__(self, other):
+        return MV.equal(self, other)
 
-    def __ne__(self,other): 
-        return MV.not_equal(self,other)
+    def __ne__(self, other):
+        return MV.not_equal(self, other)
 
-    def __lt__(self,other): 
-        return MV.less(self,other)
+    def __lt__(self, other):
+        return MV.less(self, other)
 
-    def __le__(self,other): 
-        return MV.less_equal(self,other)
+    def __le__(self, other):
+        return MV.less_equal(self, other)
 
-    def __gt__(self,other): 
-        return MV.greater(self,other)
+    def __gt__(self, other):
+        return MV.greater(self, other)
 
-    def __ge__(self,other): 
-        return MV.greater_equal(self,other)
+    def __ge__(self, other):
+        return MV.greater_equal(self, other)
 
-    def __sqrt__(self): 
+    def __sqrt__(self):
         return MV.sqrt(self)
 
-    def astype (self, tc):
+    def astype(self, tc):
         "return self as array of given type."
         return self.subSlice().astype(tc)
-    
 
-## internattr.add_internal_attribute(AbstractVariable, 'id', 'parent')   
-#PropertiedClasses.set_property(AbstractVariable, 'missing_value', acts=AbstractVariable._setmissing, nodelete=1)
+
+# internattr.add_internal_attribute(AbstractVariable, 'id', 'parent')
+# PropertiedClasses.set_property(AbstractVariable, 'missing_value',
+# acts=AbstractVariable._setmissing, nodelete=1)
 
 __rp = r'\s*([-txyz0-9]{1,1}|\(\s*\w+\s*\)|[.]{3,3})\s*'
 __crp = re.compile(__rp)
-def orderparse (order):
+
+
+def orderparse(order):
     """Parse an order string. Returns a list of axes specifiers.
        Order elements can be:
           Letters t, x, y, z meaning time, longitude, latitude, level
@@ -1501,13 +1567,14 @@ def orderparse (order):
           (name) meaning an axis whose id is name
     """
     if not isinstance(order, basestring):
-        raise CDMSError, 'order arguments must be strings.'
+        raise CDMSError('order arguments must be strings.')
     pos = 0
-    result=[]
+    result = []
     order = order.strip()
     while pos < len(order):
         m = __crp.match(order, pos)
-        if m is None: break
+        if m is None:
+            break
         r = m.group(1)
         if r[0] == '(':
             pass
@@ -1520,11 +1587,12 @@ def orderparse (order):
         pos = m.end(0)
 
     if pos != len(order):
-        raise CDMSError, 'Order string "' + order + \
-                          '" malformed, index '+str(pos)
+        raise CDMSError('Order string "' + order +
+                        '" malformed, index ' + str(pos))
     return result
 
-def order2index (axes, order):
+
+def order2index(axes, order):
     """Find the index permutation of axes to match order.
        The argument order is a string.
        Order elements can be:
@@ -1540,22 +1608,23 @@ def order2index (axes, order):
     elif isinstance(order, list):
         result = order
     else:
-        raise CDMSError, 'order2index, order specified of bad type:' + str(type(order))
+        raise CDMSError(
+            'order2index, order specified of bad type:' + str(type(order)))
     n = len(axes)
     ie = n
-    permutation = [None]*n
+    permutation = [None] * n
     j = 0
     pos = 0
     while j < len(result):
         item = result[j]
         if isinstance(item, basestring):
-            if item == 't': 
+            if item == 't':
                 spec = 'time'
-            elif item == 'x': 
+            elif item == 'x':
                 spec = 'longitude'
-            elif item == 'y': 
+            elif item == 'y':
                 spec = 'latitude'
-            elif item == 'z': 
+            elif item == 'z':
                 spec = 'level'
             elif item == '-':
                 pos += 1
@@ -1566,25 +1635,26 @@ def order2index (axes, order):
             for k in range(n):
                 if axisMatches(axes[k], spec):
                     if k in permutation:
-                        raise CDMSError, 'Duplicate item in order %s' % order
+                        raise CDMSError('Duplicate item in order %s' % order)
                     permutation[pos] = k
                     pos += 1
                     break
             else:
-                raise CDMSError, 'No axis matching order spec %s' %str(item)
+                raise CDMSError('No axis matching order spec %s' % str(item))
         elif isinstance(item, int):
             if item in permutation:
-                raise CDMSError, 'Duplicate item in order %s' % order
+                raise CDMSError('Duplicate item in order %s' % order)
             if item >= n:
-                raise CDMSError, 'Index %d out of range in order %s' %\
-                                 (item,order)
+                raise CDMSError('Index %d out of range in order %s' %
+                                (item, order))
             permutation[pos] = item
             pos += 1
         elif item is Ellipsis:
             nleft = len(result) - j - 1
             pos = n - nleft
         else:
-            raise CDMSError, 'List specified for order contains bad item: ' + repr(item)
+            raise CDMSError(
+                'List specified for order contains bad item: ' + repr(item))
         j += 1
 
     for i in range(n):
@@ -1593,7 +1663,7 @@ def order2index (axes, order):
                 if permutation[j] is None:
                     permutation[j] = i
                     break
-    return permutation    
+    return permutation
 
-from tvariable import TransientVariable
-import MV2 as MV
+from .tvariable import TransientVariable
+from . import MV2 as MV
diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 458096c22..1f9561313 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -1,12 +1,13 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS Axis objects
 """
 _debug = 0
 std_axis_attributes = ['name', 'units', 'length', 'values', 'bounds']
-import sys, copy
+import sys
+import copy
 import numpy
 # import regrid2._regrid
 import cdmsNode
@@ -16,17 +17,24 @@ from cdmsobj import CdmsObj, Max32int
 from sliceut import reverseSlice, splitSlice, splitSliceExt
 from error import CDMSError
 import forecast
-#import internattr
+# import internattr
 from UserList import UserList
+
+
 class AliasList (UserList):
+
     def __init__(self, alist):
-        UserList.__init__(self,alist)
-    def __setitem__ (self, i, value):
+        UserList.__init__(self, alist)
+
+    def __setitem__(self, i, value):
         self.data[i] = value.lower()
+
     def __setslice(self, i, j, values):
         self.data[i:j] = map(lambda x: x.lower(), values)
+
     def append(self, value):
         self.data.append(value.lower())
+
     def extend(self, values):
         self.data.extend(map("".lower, values))
 
@@ -46,35 +54,35 @@ ReadOnlyAxis = "Axis is read-only: "
 InvalidNCycles = "Invalid number of cycles requested for wrapped dimension: "
 
 ComptimeType = type(cdtime.comptime(0))
-ReltimeType = type(cdtime.reltime(0,"days"))
+ReltimeType = type(cdtime.reltime(0, "days"))
 CdtimeTypes = (ComptimeType, ReltimeType)
 
 # Map between cdtime calendar and CF tags
 calendarToTag = {
-    cdtime.MixedCalendar : 'gregorian',
-    cdtime.NoLeapCalendar : 'noleap',
-    cdtime.GregorianCalendar : 'proleptic_gregorian',
-    cdtime.JulianCalendar : 'julian',
-    cdtime.Calendar360 : '360_day',
-    cdtime.ClimCalendar : 'clim_noncf',
-    cdtime.ClimLeapCalendar : 'climleap_noncf',
-    cdtime.DefaultCalendar : 'gregorian',
-    cdtime.StandardCalendar : 'proleptic_gregorian',
+    cdtime.MixedCalendar: 'gregorian',
+    cdtime.NoLeapCalendar: 'noleap',
+    cdtime.GregorianCalendar: 'proleptic_gregorian',
+    cdtime.JulianCalendar: 'julian',
+    cdtime.Calendar360: '360_day',
+    cdtime.ClimCalendar: 'clim_noncf',
+    cdtime.ClimLeapCalendar: 'climleap_noncf',
+    cdtime.DefaultCalendar: 'gregorian',
+    cdtime.StandardCalendar: 'proleptic_gregorian',
     }
 
 tagToCalendar = {
-    'gregorian' : cdtime.MixedCalendar,
-    'standard' : cdtime.GregorianCalendar,
-    'noleap' : cdtime.NoLeapCalendar,
-    'julian' : cdtime.JulianCalendar,
-    'proleptic_gregorian' : cdtime.GregorianCalendar,
-    '360_day' : cdtime.Calendar360,
-    '360' : cdtime.Calendar360,
-    '365_day' : cdtime.NoLeapCalendar,
-    'clim' : cdtime.ClimCalendar,
-    'clim_noncf' : cdtime.ClimCalendar,
-    'climleap_noncf' : cdtime.ClimLeapCalendar,
-    'climleap' : cdtime.ClimLeapCalendar,
+    'gregorian': cdtime.MixedCalendar,
+    'standard': cdtime.GregorianCalendar,
+    'noleap': cdtime.NoLeapCalendar,
+    'julian': cdtime.JulianCalendar,
+    'proleptic_gregorian': cdtime.GregorianCalendar,
+    '360_day': cdtime.Calendar360,
+    '360': cdtime.Calendar360,
+    '365_day': cdtime.NoLeapCalendar,
+    'clim': cdtime.ClimCalendar,
+    'clim_noncf': cdtime.ClimCalendar,
+    'climleap_noncf': cdtime.ClimLeapCalendar,
+    'climleap': cdtime.ClimLeapCalendar,
     }
 
 # This is not an error message, it is used to detect which things have
@@ -86,64 +94,77 @@ _autobounds = 2                         # Automatically generate axis and grid b
                                         # Modes:
                                         # 0 : off (not bounds generation)
                                         # 1 : on  (generate bounds)
-                                        # 2 : grid (generate bounds for lat/lon grids only)
+                                        # 2 : grid (generate bounds for lat/lon
+                                        # grids only)
 
 # Set autobounds mode to 'on' or 'off'. If on, getBounds will automatically
 # generate boundary information for an axis or grid, if not explicitly defined.
 # If 'off', and no boundary data is explicitly defined, the bounds will NOT
 # be generated; getBounds will return None for the boundaries.
+
+
 def setAutoBounds(mode):
     global _autobounds
-    if mode=='on' or mode==1:
-        _autobounds=1
-    elif mode=='off' or mode==0:
-        _autobounds=0
-    elif mode=='grid' or mode==2:
-        _autobounds=2
+    if mode == 'on' or mode == 1:
+        _autobounds = 1
+    elif mode == 'off' or mode == 0:
+        _autobounds = 0
+    elif mode == 'grid' or mode == 2:
+        _autobounds = 2
+
 
 def getAutoBounds():
     return _autobounds
 
 # Create a transient axis
+
+
 def createAxis(data, bounds=None, id=None, copy=0):
     return TransientAxis(data, bounds, id, copy=copy)
 
 # Generate a Gaussian latitude axis, north-to-south
+
+
 def createGaussianAxis(nlat):
     import regrid2._regrid
 
-    lats,wts,bnds = regrid2._regrid.gridattr(nlat,'gaussian')
-
-    # For odd number of latitudes, gridattr returns 0 in the second half of lats
-    if nlat%2:
-        mid = nlat/2
-        lats[mid+1:] = -lats[:mid][::-1]
-        
-    latBounds = numpy.zeros((nlat,2),numpy.float)
-    latBounds[:,0] = bnds[:-1]
-    latBounds[:,1] = bnds[1:]
-    lat = createAxis(lats,latBounds,id="latitude")
+    lats, wts, bnds = regrid2._regrid.gridattr(nlat, 'gaussian')
+
+    # For odd number of latitudes, gridattr returns 0 in the second half of
+    # lats
+    if nlat % 2:
+        mid = nlat / 2
+        lats[mid + 1:] = -lats[:mid][::-1]
+
+    latBounds = numpy.zeros((nlat, 2), numpy.float)
+    latBounds[:, 0] = bnds[:-1]
+    latBounds[:, 1] = bnds[1:]
+    lat = createAxis(lats, latBounds, id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     return lat
 
 # Generate an equal-area latitude axis, north-to-south
+
+
 def createEqualAreaAxis(nlat):
     import regrid2._regrid
 
-    lats,wts,bnds = regrid2._regrid.gridattr(nlat,'equalarea')
-    latBounds = numpy.zeros((nlat,2),numpy.float)
-    latBounds[:,0] = bnds[:-1]
-    latBounds[:,1] = bnds[1:]
-    lat = createAxis(lats,latBounds,id="latitude")
+    lats, wts, bnds = regrid2._regrid.gridattr(nlat, 'equalarea')
+    latBounds = numpy.zeros((nlat, 2), numpy.float)
+    latBounds[:, 0] = bnds[:-1]
+    latBounds[:, 1] = bnds[1:]
+    lat = createAxis(lats, latBounds, id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     return lat
 
 # Generate a uniform latitude axis
+
+
 def createUniformLatitudeAxis(startLat, nlat, deltaLat):
-    latArray = startLat + deltaLat*numpy.arange(nlat)
-    lat = createAxis(latArray,id="latitude")
+    latArray = startLat + deltaLat * numpy.arange(nlat)
+    lat = createAxis(latArray, id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     latBounds = lat.genGenericBounds(width=deltaLat)
@@ -151,18 +172,21 @@ def createUniformLatitudeAxis(startLat, nlat, deltaLat):
     return lat
 
 # Generate a uniform longitude axis
+
+
 def createUniformLongitudeAxis(startLon, nlon, deltaLon):
-    lonArray = startLon + deltaLon*numpy.arange(nlon)
-    lon = createAxis(lonArray,id="longitude")
+    lonArray = startLon + deltaLon * numpy.arange(nlon)
+    lon = createAxis(lonArray, id="longitude")
     lon.designateLongitude()
     lon.units = "degrees_east"
     lonBounds = lon.genGenericBounds(width=deltaLon)
     lon.setBounds(lonBounds)
     return lon
 
-def mapLinearIntersection(xind,yind,iind,
-                          aMinusEps,aPlusEps,bPlusEps,bMinusEps,
-                          boundLeft,nodeSubI,boundRight):
+
+def mapLinearIntersection(xind, yind, iind,
+                          aMinusEps, aPlusEps, bPlusEps, bMinusEps,
+                          boundLeft, nodeSubI, boundRight):
     """
 
     Return true iff the coordinate interval (a,b) intersects the node
@@ -182,34 +206,35 @@ def mapLinearIntersection(xind,yind,iind,
     """
 
     if(iind == 'n' or iind == 'e'):
-        testC_ = ( aMinusEps  <= nodeSubI   )
-        test_C = (  nodeSubI  <= bPlusEps   )
-        testO_ = (  aPlusEps  <  nodeSubI   )
-        test_O = (  nodeSubI  <  bMinusEps  )
+        testC_ = (aMinusEps <= nodeSubI)
+        test_C = (nodeSubI <= bPlusEps)
+        testO_ = (aPlusEps < nodeSubI)
+        test_O = (nodeSubI < bMinusEps)
     elif(iind == 'b'):
-        testC_ = ( aMinusEps  <= boundRight )
-        test_C = ( boundLeft  <= bPlusEps   )
-        testO_ = ( aPlusEps   <  boundRight )
-        test_O = ( boundLeft  <  bMinusEps  )
+        testC_ = (aMinusEps <= boundRight)
+        test_C = (boundLeft <= bPlusEps)
+        testO_ = (aPlusEps < boundRight)
+        test_O = (boundLeft < bMinusEps)
     elif(iind == 's'):
-        testC_ = ( aMinusEps  <= boundLeft  )
-        test_C = ( boundRight <= bPlusEps   )
-        testO_ = ( aPlusEps   <  boundLeft  )
-        test_O = ( boundRight <  bMinusEps  )
+        testC_ = (aMinusEps <= boundLeft)
+        test_C = (boundRight <= bPlusEps)
+        testO_ = (aPlusEps < boundLeft)
+        test_O = (boundRight < bMinusEps)
 
     if(xind == 'c' and yind == 'c'):
-        test=(testC_ and test_C)
+        test = (testC_ and test_C)
     elif(xind == 'c' and yind == 'o'):
-        test=(testC_ and test_O)
+        test = (testC_ and test_O)
     elif(xind == 'o' and yind == 'c'):
-        test=(testO_ and test_C)
+        test = (testO_ and test_C)
     elif(xind == 'o' and yind == 'o'):
-        test=(testO_ and test_O)
+        test = (testO_ and test_O)
 
     return(test)
 
-def mapLinearExt(axis, bounds, interval, indicator ='ccn', epsilon=None, stride=1, wrapped=0):
 
+def mapLinearExt(axis, bounds, interval,
+                 indicator='ccn', epsilon=None, stride=1, wrapped=0):
     """Map coordinate interval to index interval, without
     wraparound. interval has the form (x,y) where x and y are the
     endpoints in coordinate space. indicator is a three-character
@@ -222,137 +247,139 @@ def mapLinearExt(axis, bounds, interval, indicator ='ccn', epsilon=None, stride=
     'b' - the interval intersects the cell bounds
     's' - the cell bounds are a subset of the interval
     'e' - same as 'n', plus an extra node on either side.
-    
+
     Returns the corresponding index interval (i,j), where i<j,
     indicating the half-open index interval [i,j), or None if the
     intersection is empty.
     """
-    
+
     indicator = indicator.lower()
     length = len(axis)
 
     # Make the interval and search array non-decreasing
-    x,y = interval
-
-    iind  = indicator[2]
-    
-    if x>y:
-        x,y = y,x
-        xind  = indicator[1]
-        yind  = indicator[0]
-        
+    x, y = interval
+
+    iind = indicator[2]
+
+    if x > y:
+        x, y = y, x
+        xind = indicator[1]
+        yind = indicator[0]
+
     else:
         xind = indicator[0]
         yind = indicator[1]
 
-    if axis[0]>axis[-1]:
+    if axis[0] > axis[-1]:
         ar = axis[::-1]
-        if bounds[0,0]<bounds[0,1]:
+        if bounds[0, 0] < bounds[0, 1]:
             bd = bounds[::-1]
         else:
-            bd = bounds[::-1,::-1]
+            bd = bounds[::-1, ::-1]
         direc = 'dec'
     else:
         ar = axis
-        if bounds[0,0]<bounds[0,1]:
+        if bounds[0, 0] < bounds[0, 1]:
             bd = bounds
         else:
-            bd = bounds[:,::-1]
+            bd = bounds[:, ::-1]
         direc = 'inc'
 
     if(epsilon is None):
-        eps=1.0e-5
-        if len(ar)>1:
-            epsilon = eps * min(abs(ar[1]-ar[0]), abs(ar[-1]-ar[-2]))
+        eps = 1.0e-5
+        if len(ar) > 1:
+            epsilon = eps * min(abs(ar[1] - ar[0]), abs(ar[-1] - ar[-2]))
         else:
-            epsilon=eps
+            epsilon = eps
 
     #
     #  interval bound +/- epsilon
     #
 
-    aMinusEps=(x-epsilon)
-    aPlusEps=(x+epsilon)
-    bMinusEps=(y-epsilon)
-    bPlusEps=(y+epsilon)
-
+    aMinusEps = (x - epsilon)
+    aPlusEps = (x + epsilon)
+    bMinusEps = (y - epsilon)
+    bPlusEps = (y + epsilon)
 
-    #oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
+    # oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
     #
     # out-of-bounds requests
     #
-    #oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
+    # oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
 
-    if iind in ['n','e']:
+    if iind in ['n', 'e']:
         mina = ar[0]
         maxa = ar[-1]
     else:
-        mina = bd[0,0]
-        maxa = bd[-1,1]
-        
+        mina = bd[0, 0]
+        maxa = bd[-1, 1]
+
     if(bPlusEps < mina or aMinusEps > maxa):
         return None
 
-    #nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+    # nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
     #
     # empty node check
     #
-    #nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+    # nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
 
     # Handle empty intersections
     if (
-        ( ((aPlusEps)  >  ar[-1]) and (iind == 'n') and (xind == 'o') ) or
-        ( ((aMinusEps) >= ar[-1]) and (iind == 'n') and (xind == 'c') ) or
-        ( ((bMinusEps) <  ar[0] ) and (iind == 'n') and (yind == 'o') ) or
-        ( ((bPlusEps)  <= ar[0] ) and (iind == 'n') and (yind == 'c') ) 
+        (((aPlusEps) > ar[-1]) and (iind == 'n') and (xind == 'o')) or
+        (((aMinusEps) >= ar[-1]) and (iind == 'n') and (xind == 'c')) or
+        (((bMinusEps) < ar[0]) and (iind == 'n') and (yind == 'o')) or
+        (((bPlusEps) <= ar[0]) and (iind == 'n') and (yind == 'c'))
         ):
         return None
 
-
-    bdMaxRight=max(bd[-1][0],bd[-1][1])
-    bdMinLeft=min(bd[0][0],bd[0][1])
+    bdMaxRight = max(bd[-1][0], bd[-1][1])
+    bdMinLeft = min(bd[0][0], bd[0][1])
     if (
-        ( ( (aMinusEps) >  bdMaxRight ) and (iind != 'n') and (xind == 'o') ) or
-        ( ( (aMinusEps) >= bdMaxRight ) and (iind != 'n') and (xind == 'c') ) or
-        ( ( (bPlusEps)  <  bdMinLeft  ) and (iind != 'n') and (yind == 'o') ) or
-        ( ( (bPlusEps)  <= bdMinLeft  ) and (iind != 'n') and (yind == 'c') ) 
-        ): 
+        (((aMinusEps) > bdMaxRight) and (iind != 'n') and (xind == 'o')) or
+        (((aMinusEps) >= bdMaxRight) and (iind != 'n') and (xind == 'c')) or
+        (((bPlusEps) < bdMinLeft) and (iind != 'n') and (yind == 'o')) or
+        (((bPlusEps) <= bdMinLeft) and (iind != 'n') and (yind == 'c'))
+        ):
         return None
-    
-    # The intersection is nonempty; use searchsorted to get left/right limits for testing
 
-    ii,jj = numpy.searchsorted(ar,(x,y))
+    # The intersection is nonempty; use searchsorted to get left/right limits
+    # for testing
+
+    ii, jj = numpy.searchsorted(ar, (x, y))
 
     #
     #  find index range for left (iStart,iEnd) and right (jStart,jEnd)
     #
-    
-    # iEnd + 2 because last point in loop not done
-    iStart=ii-1
-    iEnd=ii+2
-    if(iStart < 0): iStart=0
-    if( iEnd >= length ): iEnd = length - 1
 
-    jStart=jj-1
-    jEnd=jj+2
-    if( jStart < 0 ): jStart=0
-    if( jEnd >= length ): jEnd = length - 1
+    # iEnd + 2 because last point in loop not done
+    iStart = ii - 1
+    iEnd = ii + 2
+    if(iStart < 0):
+        iStart = 0
+    if(iEnd >= length):
+        iEnd = length - 1
+
+    jStart = jj - 1
+    jEnd = jj + 2
+    if(jStart < 0):
+        jStart = 0
+    if(jEnd >= length):
+        jEnd = length - 1
 
     #
     #  initialise the index to -1 (does not exist)
     #
 
-    iInterval=-1
-    jInterval=-1
-    iIntervalB=-1
-    jIntervalB=-1
+    iInterval = -1
+    jInterval = -1
+    iIntervalB = -1
+    jIntervalB = -1
 
-    #pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
+    # pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
     #
     #  preliminary checks
     #
-    #pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
-
+    # pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
 
     if(iStart == jStart == iEnd == jEnd):
         iInterval = jInterval = iStart
@@ -362,172 +389,173 @@ def mapLinearExt(axis, bounds, interval, indicator ='ccn', epsilon=None, stride=
 
     else:
 
-        #llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
+        # llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
         #
         #  left interval check
         #
-        #llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
+        # llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
 
         # - user check
-        
-        for i in range(iStart,iEnd+1):
 
-            nodeSubI=ar[i]
-            boundLeft=bd[i][0]
-            boundRight=bd[i][1]
+        for i in range(iStart, iEnd + 1):
+
+            nodeSubI = ar[i]
+            boundLeft = bd[i][0]
+            boundRight = bd[i][1]
 
-            test=mapLinearIntersection(xind,yind,iind,
-                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
-                                       boundLeft,nodeSubI,boundRight)
+            test = mapLinearIntersection(xind, yind, iind,
+                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
+                                       boundLeft, nodeSubI, boundRight)
 
-            if( iInterval == -1 and test ):
+            if(iInterval == -1 and test):
                 iInterval = i
                 break
 
         # - "B" check for extension
-        
-        for i in range(iStart,iEnd+1):
 
-            nodeSubI=ar[i]
-            boundLeft=bd[i][0]
-            boundRight=bd[i][1]
+        for i in range(iStart, iEnd + 1):
 
-            testB=mapLinearIntersection(xind,yind,'b',
-                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
-                                       boundLeft,nodeSubI,boundRight)
+            nodeSubI = ar[i]
+            boundLeft = bd[i][0]
+            boundRight = bd[i][1]
 
-            if( iIntervalB == -1 and testB ):
+            testB = mapLinearIntersection(xind, yind, 'b',
+                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
+                                       boundLeft, nodeSubI, boundRight)
+
+            if(iIntervalB == -1 and testB):
                 iIntervalB = i
                 break
 
-        #rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
+        # rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
         #
         #  right interval check
         #
-        #rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
+        # rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
 
-        for j in range(jStart,jEnd+1):
+        for j in range(jStart, jEnd + 1):
 
-            nodeSubI=ar[j]
-            boundLeft=bd[j][0]
-            boundRight=bd[j][1]
+            nodeSubI = ar[j]
+            boundLeft = bd[j][0]
+            boundRight = bd[j][1]
 
             #
             #  user test
             #
 
-            test=mapLinearIntersection(xind,yind,iind,
-                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
-                                       boundLeft,nodeSubI,boundRight)
+            test = mapLinearIntersection(xind, yind, iind,
+                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
+                                       boundLeft, nodeSubI, boundRight)
 
-            if( ( jInterval == -1 and iInterval != -1 and test == 0  and j <= jEnd ) ):
-                jInterval = j-1
+            if((jInterval == -1 and iInterval != -1 and test == 0 and j <= jEnd)):
+                jInterval = j - 1
 
-            if( (j == length-1 and test == 1) ):
+            if((j == length - 1 and test == 1)):
                 jInterval = j
-                
+
                 # no break here...
 
         #
-        #  B test on right 
+        #  B test on right
         #
 
-        for j in range(jStart,jEnd+1):
+        for j in range(jStart, jEnd + 1):
 
-            nodeSubI=ar[j]
-            boundLeft=bd[j][0]
-            boundRight=bd[j][1]
+            nodeSubI = ar[j]
+            boundLeft = bd[j][0]
+            boundRight = bd[j][1]
 
-            testB=mapLinearIntersection(xind,yind,'b',
-                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
-                                       boundLeft,nodeSubI,boundRight)
+            testB = mapLinearIntersection(xind, yind, 'b',
+                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
+                                       boundLeft, nodeSubI, boundRight)
 
-            if( ( jIntervalB == -1 and iIntervalB != -1 and testB == 0  and j <= jEnd ) ):
-                jIntervalB = j-1
+            if((jIntervalB == -1 and iIntervalB != -1 and testB == 0 and j <= jEnd)):
+                jIntervalB = j - 1
 
-            if( ( j == length-1 and testB == 1) ):
+            if((j == length - 1 and testB == 1)):
                 jIntervalB = j
 
-
-    #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+    # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
     #
     #  extension check
     #
-    #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+    # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
 
     if(iind == 'e'):
 
         # if B index does not exist return
-        if(iIntervalB < 0 or jIntervalB <0):
+        if(iIntervalB < 0 or jIntervalB < 0):
             return None
 
         # if user index exists:
-        elif ( ( iInterval > -1 and jInterval > -1 ) ):
+        elif ((iInterval > -1 and jInterval > -1)):
 
             if(jInterval < iInterval):
-                
-                npoints=iInterval-jInterval
+
+                npoints = iInterval - jInterval
                 if(npoints > 0):
-                    (iInterval,jInterval)=(jInterval+1,iInterval+1)
-                    
+                    (iInterval, jInterval) = (jInterval + 1, iInterval + 1)
+
                 else:
-                    jInterval=iInterval
-                    iInterval=jInterval+1
-                    
+                    jInterval = iInterval
+                    iInterval = jInterval + 1
+
             else:
 
-                iInterval = iInterval-1
-                jInterval = jInterval+1
-                
+                iInterval = iInterval - 1
+                jInterval = jInterval + 1
+
         # else set index interval to B index interval
         else:
-            
-            iInterval=iIntervalB
-            jInterval=jIntervalB
+
+            iInterval = iIntervalB
+            jInterval = jIntervalB
 
         if(iInterval == jInterval):
-            if( x < ar[iInterval] and iInterval > 0 ):
-                iInterval=jInterval-1
-            elif( jIntervalB < length-1 ): 
-                jInterval=iInterval+1
+            if(x < ar[iInterval] and iInterval > 0):
+                iInterval = jInterval - 1
+            elif(jIntervalB < length - 1):
+                jInterval = iInterval + 1
 
         if(jInterval < iInterval):
-            npoints=jInterval-iInterval
+            npoints = jInterval - iInterval
             if(npoints > 2):
-                jInterval=iIntervalB
-                iInterval=jIntervalB
+                jInterval = iIntervalB
+                iInterval = jIntervalB
             else:
-                jInterval=iIntervalB
-                iInterval=jIntervalB+1
-
-        # Since the lookup is linear, ensure that the result is in range [0..length)
-        iInterval = max(iInterval,0)
-        jInterval = min(jInterval,length-1)
-            
-    #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+                jInterval = iIntervalB
+                iInterval = jIntervalB + 1
+
+        # Since the lookup is linear, ensure that the result is in range
+        # [0..length)
+        iInterval = max(iInterval, 0)
+        jInterval = min(jInterval, length - 1)
+
+    # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
     #
     # final checks
     #
-    #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+    # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
 
     # if jInteval < iInterval have a single point; set to iInterval
 
     if(jInterval < iInterval):
-        jInterval=iInterval
-        
+        jInterval = iInterval
+
     elif(jInterval < 0 and iInterval < 0):
         return None
-    
+
     # Reverse back if necessary
-    if direc=='dec':
-        iInterval,jInterval = length-jInterval-1,length-iInterval-1
-    
-    iReturn=iInterval
-    jReturn=jInterval+1
+    if direc == 'dec':
+        iInterval, jInterval = length - jInterval - 1, length - iInterval - 1
+
+    iReturn = iInterval
+    jReturn = jInterval + 1
+
+    return (iReturn, jReturn)
 
-    return (iReturn,jReturn)
 
-def lookupArray(ar,value):
+def lookupArray(ar, value):
     """Lookup value in array ar. Return index such that:
     (a) ar is monotonically increasing:
     value <= ar[index], index==0..len(ar)-1
@@ -537,42 +565,46 @@ def lookupArray(ar,value):
     value < ar[index], index==len(ar)
     """
     ar = numpy.ma.filled(ar)
-    ascending = (ar[0]<ar[-1]) or len(ar)==1
+    ascending = (ar[0] < ar[-1]) or len(ar) == 1
     if ascending:
-        index = numpy.searchsorted(ar,value)
+        index = numpy.searchsorted(ar, value)
     else:
         revar = ar[::-1]
-        index = numpy.searchsorted(revar,value)
-        if index<len(revar) and value==revar[index]:
-            index = len(ar)-index-1
+        index = numpy.searchsorted(revar, value)
+        if index < len(revar) and value == revar[index]:
+            index = len(ar) - index - 1
         else:
-            index = len(ar)-index
+            index = len(ar) - index
     return index
 
-## # Lookup a value in a monotonic 1-D array. value is a scalar
-## # Always returns a valid index for ar
-## def lookupArray(ar,value):
-##     ascending = (ar[0]<ar[-1])
-##     if ascending:
-##         index = numpy.searchsorted(ar,value)
-##     else:
-##         index = numpy.searchsorted(ar[::-1],value)
-##         index = len(ar)-index-1
-##     index = max(index,0)
-##     index = min(index,len(ar))
-##     return index
+# Lookup a value in a monotonic 1-D array. value is a scalar
+# Always returns a valid index for ar
+# def lookupArray(ar,value):
+# ascending = (ar[0]<ar[-1])
+# if ascending:
+# index = numpy.searchsorted(ar,value)
+# else:
+# index = numpy.searchsorted(ar[::-1],value)
+# index = len(ar)-index-1
+# index = max(index,0)
+# index = min(index,len(ar))
+# return index
 
 # Return true if vector vec1 is a subset of vec2, within tolerance tol.
 # Return second arg of index, if it is a subset
+
+
 def isSubsetVector(vec1, vec2, tol):
-    index = lookupArray(vec2,vec1[0])
-    if index>(len(vec2)-len(vec1)):
-        return (0,-1)                   # vec1 is too large, cannot be a subset
-    issubset = numpy.alltrue(numpy.less(numpy.absolute(vec1-vec2[index:index+len(vec1)]),tol))
+    index = lookupArray(vec2, vec1[0])
+    if index > (len(vec2) - len(vec1)):
+        return (0, -1)                   # vec1 is too large, cannot be a subset
+    issubset = numpy.alltrue(
+    numpy.less(numpy.absolute(vec1 - vec2[index:index + len(vec1)]), tol))
     if issubset:
-        return (issubset,index)
+        return (issubset, index)
     else:
-        return (0,-1)
+        return (0, -1)
+
 
 def isOverlapVector(vec1, vec2, atol=1.e-8):
     """Returns (isoverlap, index) where:
@@ -580,32 +612,35 @@ def isOverlapVector(vec1, vec2, atol=1.e-8):
     index is the index such that vec1[0]<=vec2[index]. If index==len(vec2),
     then vec1[0]>vec2[len(vec2)-1]
     """
-    index = lookupArray(vec2,vec1[0])
-    if index==0 and abs(vec1[0]-vec2[0]):
-        return (0,index)
-    elif index==len(vec2):
-        return (1,index)
+    index = lookupArray(vec2, vec1[0])
+    if index == 0 and abs(vec1[0] - vec2[0]):
+        return (0, index)
+    elif index == len(vec2):
+        return (1, index)
     else:
-        ar2 = vec2[index:index+len(vec1)]
+        ar2 = vec2[index:index + len(vec1)]
         ar1 = vec1[:len(ar2)]
         isoverlap = numpy.ma.allclose(ar1, ar2, atol=atol)
     if isoverlap:
-        return (isoverlap,index)
+        return (isoverlap, index)
     else:
-        return (0,index)
+        return (0, index)
+
 
 def allclose(ax1, ax2, rtol=1.e-5, atol=1.e-8):
     """True if all elements of axes ax1 and ax2 are close,
     in the sense of numpy.ma.allclose."""
-    return ((ax1 is ax2) or numpy.ma.allclose(ax1[:],ax2[:],rtol=rtol,atol=atol))
+    return ((ax1 is ax2) or numpy.ma.allclose(ax1[:], ax2[:], rtol=rtol, atol=atol))
 
-# AbstractAxis defines the common axis interface. 
+# AbstractAxis defines the common axis interface.
 # Concrete axis classes are derived from this class.
 
+
 class AbstractAxis(CdmsObj):
-    def __init__ (self, parent, node):
-        CdmsObj.__init__ (self, node)
-        val = self.__cdms_internals__ + ['id',]
+
+    def __init__(self, parent, node):
+        CdmsObj.__init__(self, node)
+        val = self.__cdms_internals__ + ['id', ]
         self.___cdms_internals__ = val
         self.parent = parent
         self.id = id
@@ -613,11 +648,11 @@ class AbstractAxis(CdmsObj):
         self._data_ = None
         # Cached wraparound values for circular axes
         self._doubledata_ = None
-        
-    def __str__ (self):
+
+    def __str__(self):
         return "\n".join(self.listall() + "\n"
 
-    __repr__ = __str__
+    __repr__=__str__
 
     def __len__(self):
         raise CDMSError, MethodNotImplemented
@@ -626,7 +661,7 @@ class AbstractAxis(CdmsObj):
         return (len(self),)
 
     def _getdtype(self, name):
-        tc = self.typecode()
+        tc=self.typecode()
         return numpy.dtype(tc)
 
     def __getitem__(self, key):
@@ -648,17 +683,17 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateLatitude(self, persistent=0):
         if persistent:
-            self.axis = "Y"
+            self.axis="Y"
         else:
-            self.__dict__['axis'] = "Y"
+            self.__dict__['axis']="Y"
             self.attributes['axis']="Y"
 
     # Return true iff the axis is a latitude axis
     def isLatitude(self):
-        id = self.id.strip().lower()
-        if (hasattr(self,'axis') and self.axis=='Y'): return 1
-        units = getattr(self,"units","").strip().lower()
-        if units in ["degrees_north","degree_north","degree_n","degrees_n","degreen","degreesn"]:
+        id=self.id.strip().lower()
+        if (hasattr(self, 'axis') and self.axis == 'Y'): return 1
+        units=getattr(self, "units", "").strip().lower()
+        if units in ["degrees_north", "degree_north", "degree_n", "degrees_n", "degreen", "degreesn"]:
           return 1
         return (id[0:3] == 'lat') or (id in latitude_aliases)
 
@@ -666,25 +701,25 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateLevel(self, persistent=0):
         if persistent:
-            self.axis = "Z"
+            self.axis="Z"
         else:
-            self.__dict__['axis'] = "Z"
+            self.__dict__['axis']="Z"
             self.attributes['axis']="Z"
 
     # Return true iff the axis is a level axis
     def isLevel(self):
-        id = self.id.strip().lower()
-        if (hasattr(self,'axis') and self.axis=='Z'): return 1
-        if getattr(self,"positive","").strip().lower() in ["up","down"]:
+        id=self.id.strip().lower()
+        if (hasattr(self, 'axis') and self.axis == 'Z'): return 1
+        if getattr(self, "positive", "").strip().lower() in ["up", "down"]:
           return 1
         try:
-          #Ok let's see if this thing as pressure units
+          # Ok let's see if this thing as pressure units
           import genutil
-          p=genutil.udunits(1,"Pa")
-          units=getattr(self,'units',"").strip()
+          p=genutil.udunits(1, "Pa")
+          units=getattr(self, 'units', "").strip()
           p2=p.to(units)
           return 1
-        except Exception,err:
+        except Exception, err:
           pass
         return ((id[0:3] == 'lev') or (id[0:5] == 'depth') or (id in level_aliases))
 
@@ -693,30 +728,30 @@ class AbstractAxis(CdmsObj):
     # If modulo is defined, set as circular
     def designateLongitude(self, persistent=0, modulo=360.0):
         if persistent:
-            self.axis = "X"
+            self.axis="X"
             if modulo is None:
-                self.topology = 'linear'
+                self.topology='linear'
             else:
-                self.modulo = modulo
-                self.topology = 'circular'
+                self.modulo=modulo
+                self.topology='circular'
         else:
-            self.__dict__['axis'] = "X"
+            self.__dict__['axis']="X"
             self.attributes['axis']="X"
             if modulo is None:
-                self.__dict__['topology'] = 'linear'
-                self.attributes['topology'] = 'linear'
+                self.__dict__['topology']='linear'
+                self.attributes['topology']='linear'
             else:
-                self.__dict__['modulo'] = modulo
-                self.__dict__['topology'] = 'circular'
-                self.attributes['modulo'] = modulo
-                self.attributes['topology'] = 'circular'
+                self.__dict__['modulo']=modulo
+                self.__dict__['topology']='circular'
+                self.attributes['modulo']=modulo
+                self.attributes['topology']='circular'
 
     # Return true iff the axis is a longitude axis
     def isLongitude(self):
-        id = self.id.strip().lower()
-        if (hasattr(self,'axis') and self.axis=='X'): return 1
-        units = getattr(self,"units","").strip().lower()
-        if units in ["degrees_east","degree_east","degree_e","degrees_e","degreee","degreese"]:
+        id=self.id.strip().lower()
+        if (hasattr(self, 'axis') and self.axis == 'X'): return 1
+        units=getattr(self, "units", "").strip().lower()
+        if units in ["degrees_east", "degree_east", "degree_e", "degrees_e", "degreee", "degreese"]:
           return 1
         return (id[0:3] == 'lon') or (id in longitude_aliases)
 
@@ -724,63 +759,65 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateTime(self, persistent=0, calendar=None):
         if calendar is None:
-            calendar = cdtime.DefaultCalendar
+            calendar=cdtime.DefaultCalendar
         if persistent:
-            self.axis = "T"
+            self.axis="T"
             if calendar is not None:
                 self.setCalendar(calendar, persistent)
         else:
-            self.__dict__['axis'] = "T"
-            self.attributes['axis'] = "T"
+            self.__dict__['axis']="T"
+            self.attributes['axis']="T"
             if calendar is not None:
                 self.setCalendar(calendar, persistent)
 
     # For isTime(), keep track of whether each id is for a time axis or not, for better performance.
-    # This dictionary is a class variable (not a member of any particular instance).
-    idtaxis = {}  # id:type where type is 'T' for time, 'O' for other
+    # This dictionary is a class variable (not a member of any particular
+    # instance).
+    idtaxis={}  # id:type where type is 'T' for time, 'O' for other
 
     # Return true iff the axis is a time axis
     def isTime(self):
-        id = self.id.strip().lower()
-        if hasattr(self,'axis'):
-            if self.axis=='T': return 1
+        id=self.id.strip().lower()
+        if hasattr(self, 'axis'):
+            if self.axis == 'T': return 1
             elif self.axis is not None: return 0
         # Have we saved the id-to-axis type information already?
         if id in self.idtaxis:
-            if self.idtaxis[id]=='T':
+            if self.idtaxis[id] == 'T':
                 return 1
             else:
                 return 0
-        ## Try to figure it out from units
+        # Try to figure it out from units
         try:
           import genutil
-          units=getattr(self,"units","").lower()
-          sp = units.split("since")
-          if len(sp)>1:
-            t=genutil.udunits(1,"day")
-            s = sp[0].strip()
-            if s in t.available_units() and t.known_units()[s]=="TIME":
-              self.idtaxis[id] = 'T'
+          units=getattr(self, "units", "").lower()
+          sp=units.split("since")
+          if len(sp) > 1:
+            t=genutil.udunits(1, "day")
+            s=sp[0].strip()
+            if s in t.available_units() and t.known_units()[s] == "TIME":
+              self.idtaxis[id]='T'
               return 1
-            #try the plural version since udunits only as singular (day noy days)
-            s=s+"s"
-            if s in t.available_units() and t.known_units()[s]=="TIME":
-              self.idtaxis[id] = 'T'
+            # try the plural version since udunits only as singular (day noy
+            # days)
+            s=s + "s"
+            if s in t.available_units() and t.known_units()[s] == "TIME":
+              self.idtaxis[id]='T'
               return 1
         except:
           pass
-        #return (id[0:4] == 'time') or (id in time_aliases)
+        # return (id[0:4] == 'time') or (id in time_aliases)
         if (id[0:4] == 'time') or (id in time_aliases):
             self.idtaxis[id]='T'
             return 1
         else:
-            self.idtaxis[id] = 'O'
+            self.idtaxis[id]='O'
             return 0
 
     # Return true iff the axis is a forecast axis
     def isForecast(self):
-        id = self.id.strip().lower()
-        if (hasattr(self,'axis') and self.axis=='F'): return 1
+        id=self.id.strip().lower()
+        if (hasattr(self, 'axis') and self.axis == 'F'): return 1
         return (id[0:6] == 'fctau0') or (id in forecast_aliases)
     def isForecastTime(self):
         return self.isForecast()
@@ -790,15 +827,15 @@ class AbstractAxis(CdmsObj):
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
         if calendar is None:
-            calendar = self.getCalendar()
+            calendar=self.getCalendar()
         if self.isForecast():
-            result = [ forecast.comptime(t) for t in self[:] ]
+            result=[forecast.comptime(t) for t in self[:]]
         else:
-            result = []
+            result=[]
             for val in self[:]:
                 result.append(cdtime.reltime(val, self.units).tocomp(calendar))
         return result
-    
+
     #
     #  mf 20010418 -- output DTGs (YYYYMMDDHH)
     #
@@ -806,22 +843,22 @@ class AbstractAxis(CdmsObj):
         "Array version of cdtime tocomp. Returns a list of component times in DTG format."
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
-        result = []
+        result=[]
         if calendar is None:
-            calendar = self.getCalendar()
+            calendar=self.getCalendar()
         for val in self[:]:
             comptime=cdtime.reltime(val, self.units).tocomp(calendar)
             s=repr(comptime)
             tt=s.split(' ')
-        
+
             ttt=tt[0].split('-')
             yr=int(ttt[0])
             mo=int(ttt[1])
             da=int(ttt[2])
-        
+
             ttt=tt[1].split(':')
             hr=int(ttt[0])
-            dtg="%04d%02d%02d%02d"%(yr,mo,da,hr)
+            dtg="%04d%02d%02d%02d" % (yr, mo, da, hr)
             result.append(dtg)
 
         return result
@@ -831,27 +868,35 @@ class AbstractAxis(CdmsObj):
         import datetime
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
-        result = []
+        result=[]
         if calendar is None:
-            calendar = self.getCalendar()
+            calendar=self.getCalendar()
         for val in self[:]:
             c=cdtime.reltime(val, self.units).tocomp(calendar)
-            dtg = datetime.datetime(c.year,c.month,c.day,c.hour,c.minute,int(c.second),int((c.second-int(c.second))*1000))
+            dtg=datetime.datetime(
+    c.year,
+     c.month,
+     c.day,
+     c.hour,
+     c.minute,
+     int(c.second),
+     int((c.second - int(c.second)) * 1000))
             result.append(dtg)
         return result
 
-    def asRelativeTime( self, units=None ):
+    def asRelativeTime(self, units=None):
         "Array version of cdtime torel. Returns a list of relative times."
-        sunits = getattr(self,'units',None)
-        if sunits==None or sunits=='None':
+        sunits=getattr(self, 'units', None)
+        if sunits is None or sunits == 'None':
             raise CDMSError, "No time units defined"
-        if units==None or units=='None':
+        if units is None or units == 'None':
             units=sunits
         if self.isForecast():
-            result = [ forecast.comptime(t).torel(units) for t in self[:] ]
+            result=[forecast.comptime(t).torel(units) for t in self[:]]
         else:
-            cal = self.getCalendar()
-            result = [ cdtime.reltime(t,sunits).torel(units,cal) for t in self[:] ]
+            cal=self.getCalendar()
+            result=[cdtime.reltime(t, sunits).torel(units, cal)
+                                     for t in self[:]]
         return result
 
     def toRelativeTime(self, units, calendar=None):
@@ -860,31 +905,35 @@ class AbstractAxis(CdmsObj):
             raise CDMSError, "No time units defined"
         n=len(self[:])
         b=self.getBounds()
-        scal = self.getCalendar()
+        scal=self.getCalendar()
         if calendar is None:
-            calendar = scal
+            calendar=scal
         else:
             self.setCalendar(calendar)
         for i in range(n):
             tmp=cdtime.reltime(self[i], self.units).tocomp(scal)
-            tmp2 = numpy.array(float(tmp.torel(units, calendar).value)).astype(self[:].dtype.char)
-            ## if i==1 : print self[:].dtype.char,'tmp2:',tmp2,tmp2.astype('f'),self[i],self[i].astype('f')
+            tmp2=numpy.array(float(tmp.torel(units, calendar).value)).astype(
+                self[:].dtype.char)
+            # if i==1 : print
+            # self[:].dtype.char,'tmp2:',tmp2,tmp2.astype('f'),self[i],self[i].astype('f')
             self[i]=tmp2
             if b is not None:
-                tmp=cdtime.reltime(b[i,0], self.units).tocomp(scal)
-                b[i,0]=numpy.array(float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
-                tmp=cdtime.reltime(b[i,1], self.units).tocomp(scal)
-                b[i,1]=numpy.array(float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
+                tmp=cdtime.reltime(b[i, 0], self.units).tocomp(scal)
+                b[i, 0]=numpy.array(
+                    float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
+                tmp=cdtime.reltime(b[i, 1], self.units).tocomp(scal)
+                b[i, 1]=numpy.array(
+                    float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
         if b is not None:
             self.setBounds(b)
         self.units=units
         return
 
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010412 -- test if an Axis is intrinsically circular
 #
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
 
     # Return true iff the axis wraps around
@@ -892,22 +941,22 @@ class AbstractAxis(CdmsObj):
     # (1) self.topology=='circular', or
     # (2) self.topology is undefined, and the axis is a longitude
     def isCircularAxis(self):
-        
-        if hasattr(self,'topology'):
-            iscircle = (self.topology=='circular')
+
+        if hasattr(self, 'topology'):
+            iscircle=(self.topology == 'circular')
         elif self.isLongitude():
-            iscircle = 1
+            iscircle=1
         else:
-            iscircle = 0
+            iscircle=0
 
         return iscircle
 
 
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010405 -- test if an transient Axis is REALLY circular
 #
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
 
     # Return true iff the axis wraps around
@@ -916,45 +965,45 @@ class AbstractAxis(CdmsObj):
     # (2) self.topology is undefined, and the axis is a longitude
     def isCircular(self):
 
-        if hasattr(self,'realtopology'):
-            if self.realtopology=='circular': return 1
-            elif self.realtopology=='linear': return 0
+        if hasattr(self, 'realtopology'):
+            if self.realtopology == 'circular': return 1
+            elif self.realtopology == 'linear': return 0
         if(len(self) < 2):
             return 0
-        
-        baxis = self[0]
-        eaxis = self[-1]
-        deltaend = self[-1] - self[-2]
-        eaxistest = eaxis + deltaend - baxis
+
+        baxis=self[0]
+        eaxis=self[-1]
+        deltaend=self[-1] - self[-2]
+        eaxistest=eaxis + deltaend - baxis
 
         cycle=self.getModuloCycle()
 
-        tol=0.01*deltaend
+        tol=0.01 * deltaend
 
         test=0
         if(abs(eaxistest - cycle) < tol): test=1
-        
-        if hasattr(self,'topology') and test == 1:
-            iscircle = (self.topology=='circular')
+
+        if hasattr(self, 'topology') and test == 1:
+            iscircle=(self.topology == 'circular')
         elif (self.isLongitude() and test == 1):
-            iscircle = 1
+            iscircle=1
         else:
-            iscircle = 0
+            iscircle=0
 
         # save realtopology attribute in __dict__, don't write it to the file
-        if iscircle==1:  self.__dict__['realtopology'] = 'circular'
-        elif iscircle==0: self.__dict__['realtopology'] = 'linear'
+        if iscircle == 1: self.__dict__['realtopology']='circular'
+        elif iscircle == 0: self.__dict__['realtopology']='linear'
         return iscircle
 
     def designateCircular(self, modulo, persistent=0):
         if persistent:
-            self.topology = 'circular'
-            self.modulo = modulo
+            self.topology='circular'
+            self.modulo=modulo
         else:
-            self.__dict__['topology'] = 'circular'
-            self.__dict__['modulo'] = modulo
-            self.attributes['modulo'] = modulo
-            self.attributes['topology'] = 'linear'
+            self.__dict__['topology']='circular'
+            self.__dict__['modulo']=modulo
+            self.attributes['modulo']=modulo
+            self.attributes['topology']='linear'
 
     def isLinear(self):
         raise CDMSError, MethodNotImplemented
@@ -973,62 +1022,62 @@ class AbstractAxis(CdmsObj):
     # or None. If the axis does not have a calendar attribute, return the global
     # calendar.
     def getCalendar(self):
-        if hasattr(self,'calendar'):
-            calendar = self.calendar.lower()
+        if hasattr(self, 'calendar'):
+            calendar=self.calendar.lower()
         else:
-            calendar = None
+            calendar=None
 
-        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
     # Set the calendar
     def setCalendar(self, calendar, persistent=1):
         if persistent:
-            self.calendar = calendarToTag.get(calendar, None)
+            self.calendar=calendarToTag.get(calendar, None)
             self.attributes['calendar']=self.calendar
             if self.calendar is None:
                 raise CDMSError, InvalidCalendar % calendar
         else:
-            self.__dict__['calendar'] = calendarToTag.get(calendar, None)
+            self.__dict__['calendar']=calendarToTag.get(calendar, None)
             self.attributes['calendar']=self.calendar
             if self.__dict__['calendar'] is None:
                 raise CDMSError, InvalidCalendar % calendar
 
     def getData(self):
         raise CDMSError, MethodNotImplemented
- 
+
     # Return the entire array
     def getValue(self):
         return self.__getitem__(slice(None))
 
-    def assignValue(self,data):
-        self.__setitem__(slice(None),data)
+    def assignValue(self, data):
+        self.__setitem__(slice(None), data)
 
     def _time2value(self, value):
         """ Map value of type comptime, reltime, or string of form "yyyy-mm-dd hh:mi:ss" to value"""
         if self.isTime():
             if type(value) in CdtimeTypes:
-                value = value.torel(self.units, self.getCalendar()).value
-            elif isinstance(value, basestring) and value not in [':',unspecified]:
-                cal = self.getCalendar()
-                value = cdtime.s2c(value, cal).torel(self.units, cal).value
+                value=value.torel(self.units, self.getCalendar()).value
+            elif isinstance(value, basestring) and value not in [':', unspecified]:
+                cal=self.getCalendar()
+                value=cdtime.s2c(value, cal).torel(self.units, cal).value
         return value
 
 
     def getModuloCycle(self):
 
-        if hasattr(self,'modulo'):
-            cycle = self.modulo
+        if hasattr(self, 'modulo'):
+            cycle=self.modulo
             #
             # mf 20010419 test if attribute is a string (non CF), set to 360.0
             #
             if isnstance(cycle, basestring):
-                cycle = 360.0
+                cycle=360.0
         else:
-            cycle = 360.0
+            cycle=360.0
 
         if isinstance(cycle, numpy.ndarray):
-            cycle = cycle[0]
+            cycle=cycle[0]
 
         return(cycle)
 
@@ -1040,7 +1089,7 @@ class AbstractAxis(CdmsObj):
 
         return(self.getModuloCycle())
 
-    def mapInterval(self,interval,indicator='ccn',cycle=None):
+    def mapInterval(self, interval, indicator='ccn', cycle=None):
         """
         Map coordinate interval to index interval. interval has one of the forms:
 
@@ -1071,295 +1120,301 @@ class AbstractAxis(CdmsObj):
         Note: if the interval is interior to the axis, but does not span any
         axis element, a singleton (i,i+1) indicating an adjacent index is returned.
         """
-        i,j,k = self.mapIntervalExt(interval,indicator,cycle)
-        j = min(j, i+len(self))
-        #i=i-1
-        return (i,j)
+        i, j, k=self.mapIntervalExt(interval, indicator, cycle)
+        j=min(j, i + len(self))
+        # i=i-1
+        return (i, j)
 
 
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010308 - 20010412 -- general handing of wrapping
 #
-#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
-    def mapIntervalExt(self,interval,indicator='ccn',cycle=None,epsilon=None):
+    def mapIntervalExt(
+        self, interval, indicator='ccn', cycle=None, epsilon=None):
         """Like mapInterval, but returns (i,j,k) where k is stride,
         and (i,j) is not restricted to one cycle."""
 
         # nCycleMax : max number of cycles a user a specify in wrapping
 
         nCycleMax=6
-        
+
         # interval is None returns the full interval
-        if interval is None or interval==':':
+        if interval is None or interval == ':':
             return (0, len(self), 1)
 
         # Allow intervals of the same form as getRegion.
-        if len(interval)==3:
-            x,y,indicator = interval
-            interval = (x,y)
-        elif len(interval)==4:
-            x,y,indicator,cycle = interval
-            interval = (x,y)
+        if len(interval) == 3:
+            x, y, indicator=interval
+            interval=(x, y)
+        elif len(interval) == 4:
+            x, y, indicator, cycle=interval
+            interval=(x, y)
 
         # check length of indicator if overridden by user
         #
 
-        indicator = indicator.lower()
-        if len(indicator)==2: indicator += 'n'
+        indicator=indicator.lower()
+        if len(indicator) == 2: indicator += 'n'
 
-        if( ( len(indicator) != 3 ) or
-               ( (indicator[0] != 'c' and indicator[0] != 'o') or
+        if((len(indicator) != 3) or
+               ((indicator[0] != 'c' and indicator[0] != 'o') or
                  (indicator[1] != 'c' and indicator[1] != 'o') or
                  (indicator[2] != 'n' and indicator[2] != 'b' and indicator[2] != 's' and
                   indicator[2] != 'e')
                  )
             ):
             raise CDMSError, "EEE: 3-character interval/intersection indicator incomplete or incorrect = "\
-                 +indicator
-       
+                 + indicator
+
         if self._data_ is None:
-            self._data_ = self.getData()
+            self._data_=self.getData()
 
-        #ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt
+        # ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt
         # Handle time types
-        interval = (self._time2value(interval[0]), self._time2value(interval[1]))
+        interval=(
+    self._time2value(interval[0]),
+     self._time2value(interval[1]))
 
         # If the interval is reversed wrt self, reverse the interval and
         # set the stride to -1
-        if (interval[0]<=interval[1])==(self[0]<=self[-1]):
+        if (interval[0] <= interval[1]) == (self[0] <= self[-1]):
             stride=1
         else:
             stride=-1
-            interval = (interval[1],interval[0])
-            indicator = indicator[1]+indicator[0]+indicator[2]
+            interval=(interval[1], interval[0])
+            indicator=indicator[1] + indicator[0] + indicator[2]
 
-        #mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
+        # mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
         #
         # basic test for wrapping - is axis REALLY circular?
         #
-        #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+        # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
 
-        xind = indicator[0]
-        yind = indicator[1]
-        iind = indicator[2]
+        xind=indicator[0]
+        yind=indicator[1]
+        iind=indicator[2]
 
-        xi,yi = interval
+        xi, yi=interval
 
-        length = len(self)
-        ar = self[:]
-        ar0 = ar[0]
-        arn = ar[-1]
-        armin = min(ar0,arn)
-        armax = max(ar0,arn)
+        length=len(self)
+        ar=self[:]
+        ar0=ar[0]
+        arn=ar[-1]
+        armin=min(ar0, arn)
+        armax=max(ar0, arn)
 
         # Wrapped if circular and at least one value is outside the axis range.
-        wraptest1 = self.isCircular()
-        wraptest2 = not ((armin <= xi <= armax) and (armin <= yi <= armax))
-        
+        wraptest1=self.isCircular()
+        wraptest2=not ((armin <= xi <= armax) and (armin <= yi <= armax))
+
         if (wraptest1 and wraptest2):
 
             #
-            #  find cycle and calc # of cycles in the interval
+            # find cycle and calc # of cycles in the interval
             #
-            
+
             cycle=self.getModulo()
-            
-            intervalLength=yi-xi
-            intervalCycles=intervalLength/cycle
-            
-            bd = self.getBounds()
-            
-            nPointsCycle = len(ar)
 
-            ar0 = ar[0]
-            ar1 = ar[-1]
+            intervalLength=yi - xi
+            intervalCycles=intervalLength / cycle
+
+            bd=self.getBounds()
+
+            nPointsCycle=len(ar)
+
+            ar0=ar[0]
+            ar1=ar[-1]
 
             #
             # test for reversed coordinates
             #
-            
-            if ar0>ar1:
-                cycle = -1 * abs(cycle)
 
-            #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+            if ar0 > ar1:
+                cycle=-1 * abs(cycle)
+
+            # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
             #
             #  make sure xi<yi and shift to positive axis indices
             #
-            #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
-            
+            # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+
             # Ensure that xi<yi
-            
-            if cycle>0 and yi<xi: xi,yi = yi,xi
-            if cycle<0 and yi>xi: xi,yi = yi,xi
+
+            if cycle > 0 and yi < xi: xi, yi=yi, xi
+            if cycle < 0 and yi > xi: xi, yi=yi, xi
 
             # calculate the number of cycles to shift to positive side
-            
-            nCycleShift=numpy.floor((xi-ar0)/cycle)
-            xp = xi - cycle * nCycleShift
-            yp = xp + intervalLength
 
-            # Extend the data vector with wraparound number of cycles in interval and shifts
+            nCycleShift=numpy.floor((xi - ar0) / cycle)
+            xp=xi - cycle * nCycleShift
+            yp=xp + intervalLength
+
+            # Extend the data vector with wraparound number of cycles in
+            # interval and shifts
+
+            nCycle=int(intervalCycles + 1.0 + 0.5) + abs(nCycleShift)
+
 
-            nCycle = int(intervalCycles + 1.0 + 0.5) + abs(nCycleShift)
-            
-                
             #
             # check if nCycle is > nCycleMax
             #
             if(nCycle >= nCycleMax):
                 raise CDMSError, InvalidNCycles + repr(nCycle)
 
-            self._doubledata_ = numpy.concatenate(( ar, ar + cycle ))
+            self._doubledata_=numpy.concatenate((ar, ar + cycle))
             k=2
-            while(k<nCycle):
-                self._doubledata_ = numpy.concatenate(( self._doubledata_, ar + k*cycle ) )
-                k=k+1
+            while(k < nCycle):
+                self._doubledata_=numpy.concatenate(
+                    (self._doubledata_, ar + k * cycle))
+                k=k + 1
 
             # Map the canonical coordinate interval (xp,yp) in the 'extended' data array
             # create axis to get the bounds array
 
-            bigar = self._doubledata_
+            bigar=self._doubledata_
             bigarAxis=createAxis(bigar)
-            bd = bigarAxis.getBounds()
+            bd=bigarAxis.getBounds()
             if bd is None:              # In case autobounds is off
-                bd = bigarAxis.genGenericBounds()
+                bd=bigarAxis.genGenericBounds()
 
             # run the more general mapLinearExt to get the indices
-            
-            indexInterval= mapLinearExt(bigar,bd,(xp,yp),indicator,wrapped=1)
+
+            indexInterval=mapLinearExt(
+                bigar, bd, (xp, yp), indicator, wrapped=1)
 
             #
             # check to make sure we got an interval
             #
-            
+
             if(indexInterval is None):
                 return None
 
-            i,j=indexInterval
+            i, j=indexInterval
 
             #
             #  now shift i back
             #
 
-            i = i + int(nCycleShift*float(nPointsCycle))
+            i=i + int(nCycleShift * float(nPointsCycle))
 
-            #   
+            #
             #  adjust the length of the output interval by the indicator
             #  mapLinear does the calc correctly, we have to modify because we
             #  are overriding with the (float)(number of cycles) in the interval
             #
-            
-            j = j + int(nCycleShift*float(nPointsCycle))
-            retval = (i,j)
-            
+
+            j=j + int(nCycleShift * float(nPointsCycle))
+            retval=(i, j)
+
         else:
-            bd = self.getBounds()
+            bd=self.getBounds()
             if bd is None:              # In case autobounds is off
-                bd = self.genGenericBounds()
-            retval = mapLinearExt(ar, bd, interval, indicator)
+                bd=self.genGenericBounds()
+            retval=mapLinearExt(ar, bd, interval, indicator)
 
         if retval is not None:
-            i,j = retval
-            if stride==-1:
+            i, j=retval
+            if stride == -1:
                 if(j == length):
-                    i,j=j-1,i-1
+                    i, j=j - 1, i - 1
                 else:
-                    i,j=j-1,i-1
-                if j==-1:
+                    i, j=j - 1, i - 1
+                if j == -1:
                     j=None
 
-            retval = (i,j,stride)
+            retval=(i, j, stride)
 
         return retval
 
-    def subaxis(self,i,j,k=1, wrap=True):
+    def subaxis(self, i, j, k=1, wrap=True):
         """Create a transient axis for the index slice [i:j:k]
         The stride k can be positive or negative. Wraparound is
         supported for longitude dimensions or those with a modulus attribute.
         """
-        fullBounds = self.getBounds()
+        fullBounds=self.getBounds()
         _debug=0
         _debugprefix="SS__XX subaxis "
-        
+
 
         # Handle wraparound
-        modulo = None
-        size = len(self)
-        
+        modulo=None
+        size=len(self)
+
         #----------------------------------------------------------------------
-        # mf 20010328 negative stride i >= vice i > 
+        # mf 20010328 negative stride i >= vice i >
         #----------------------------------------------------------------------
-        
-        if wrap and ((k>0 and j>size) or (k<0 and i >= size)) and self.isCircular():
-            modulo = self.getModuloCycle()
+
+        if wrap and ((k > 0 and j > size) or (k < 0 and i >= size)) and self.isCircular():
+            modulo=self.getModuloCycle()
 
         if modulo is not None:
             # If self is decreasing and stride is positive,
             # or self is increasing and stride is negative, subtract the modulus,
             # otherwise add it.
-            if (self[0]>self[-1])==(k>0):
-                modulo = -modulo
+            if (self[0] > self[-1]) == (k > 0):
+                modulo=-modulo
 
             #----------------------------------------------------------------------
             #
             #  mf 20010329 -- N vice two slice scheme (more general)
             #
-            #----------------------------------------------------------------------
+            #------------------------------------------------------------------
 
             donew=1
 
             if(donew):
 
-                sn = splitSliceExt(slice(i,j,k),size)
-                if(_debug): print "SSSS1-------------------- ",sn,len(sn)
+                sn=splitSliceExt(slice(i, j, k), size)
+                if(_debug): print "SSSS1-------------------- ", sn, len(sn)
 
-                for kk in range(0,len(sn)):
+                for kk in range(0, len(sn)):
                     sl=sn[kk]
-                    if(_debug): print "SSSSSSSS kk = ",kk,sl
-                    part = self[sl] + kk*modulo
-                    if(_debug): print "SSSSSSSSSSSSSSS modulo",part[0],part[-1],modulo
+                    if(_debug): print "SSSSSSSS kk = ", kk, sl
+                    part=self[sl] + kk * modulo
+                    if(_debug): print "SSSSSSSSSSSSSSS modulo", part[0], part[-1], modulo
                     if(kk == 0):
-                        data = part
+                        data=part
                     else:
-                        data = numpy.concatenate((data,part))
+                        data=numpy.concatenate((data, part))
 
                     if fullBounds is not None:
-                        bound = fullBounds[sl] + kk*modulo
+                        bound=fullBounds[sl] + kk * modulo
                         if (kk == 0):
-                            bounds = bound
+                            bounds=bound
                         else:
-                            bounds = numpy.concatenate((bounds,bound))
+                            bounds=numpy.concatenate((bounds, bound))
                     else:
-                        bounds = None
-                        
-            
+                        bounds=None
+
+
             else:
-                
-                s1, s2 = splitSlice(slice(i,j,k),size)
-                if(_debug): print "SSSS0: original ",s1,s2
-                
-                part1 = self[s1]
-                part2 = self[s2]+modulo
-                if(_debug): print "SSSSSSSSSSSSSSS modulo",self[0],self[-1],modulo
-                data = numpy.concatenate((part1,part2))
+
+                s1, s2=splitSlice(slice(i, j, k), size)
+                if(_debug): print "SSSS0: original ", s1, s2
+
+                part1=self[s1]
+                part2=self[s2] + modulo
+                if(_debug): print "SSSSSSSSSSSSSSS modulo", self[0], self[-1], modulo
+                data=numpy.concatenate((part1, part2))
                 if fullBounds is not None:
-                    bounds1 = fullBounds[s1]
-                    bounds2 = fullBounds[s2]+modulo
-                    bounds = numpy.concatenate((bounds1,bounds2))
+                    bounds1=fullBounds[s1]
+                    bounds2=fullBounds[s2] + modulo
+                    bounds=numpy.concatenate((bounds1, bounds2))
                 else:
-                    bounds = None
-            
+                    bounds=None
+
 
         else:                           # no wraparound
-            data = self[i:j:k]
+            data=self[i:j:k]
             if fullBounds is not None:
-                bounds = fullBounds[i:j:k]
+                bounds=fullBounds[i:j:k]
             else:
-                bounds = None
-        
-        newaxis = TransientAxis(data, bounds, id=self.id, copy=1)
+                bounds=None
+
+        newaxis=TransientAxis(data, bounds, id=self.id, copy=1)
 
         if self.isLatitude(): newaxis.designateLatitude()
         if self.isLongitude(): newaxis.designateLongitude()
@@ -1367,190 +1422,201 @@ class AbstractAxis(CdmsObj):
         if self.isTime(): newaxis.designateTime()
 
         for attname in self.attributes.keys():
-            if attname not in ["datatype", "length","isvar","name_in_file","partition","partition_length"]:
+            if attname not in ["datatype", "length", "isvar", "name_in_file", "partition", "partition_length"]:
                 setattr(newaxis, attname, getattr(self, attname))
-                newaxis.attributes[attname]=getattr(self,attname)
+                newaxis.attributes[attname]=getattr(self, attname)
 
         # Change circular topology to linear if a strict subset was copied
-        if hasattr(self,"topology") and self.topology=="circular" and len(newaxis)<len(self):
+        if hasattr(self, "topology") and self.topology == "circular" and len(newaxis) < len(self):
             newaxis.topology="linear"
 
         return newaxis
-    
+
 #----------------------------------------------------------------------
 # mf 2001 set calls to subAxis as subaxis
 #----------------------------------------------------------------------
 
-    subAxis = subaxis
+    subAxis=subaxis
 
     def typecode(self):
         raise CDMSError, MethodNotImplemented
 
-    # Check that a boundary array is valid, raise exception if not. bounds is an array of shape (n,2)
-    def validateBounds(self,bounds):
-        requiredShape = (len(self),2)
-        requiredShape2 = (len(self)+1,)
-        if bounds.shape!=requiredShape and bounds.shape!=requiredShape2:
+    # Check that a boundary array is valid, raise exception if not. bounds is
+    # an array of shape (n,2)
+    def validateBounds(self, bounds):
+        requiredShape=(len(self), 2)
+        requiredShape2=(len(self) + 1,)
+        if bounds.shape != requiredShape and bounds.shape != requiredShape2:
             raise CDMSError, InvalidBoundsArray + \
-                 'shape is %s, should be %s or %s'%(`bounds.shape`,`requiredShape`,`requiredShape2`)
-        if bounds.shape==requiredShape2: # case of "n+1" bounds
+                 'shape is %s, should be %s or %s' % (
+    `bounds.shape`,
+     `requiredShape`,
+     `requiredShape2`)
+        if bounds.shape == requiredShape2:  # case of "n+1" bounds
             bounds2=numpy.zeros(requiredShape)
-            bounds2[:,0]=bounds[:-1]
-            bounds2[:,1]=bounds[1::]
+            bounds2[:, 0]=bounds[:-1]
+            bounds2[:, 1]=bounds[1::]
             bounds=bounds2
-        mono = (bounds[0,0]<=bounds[0,1])
+        mono=(bounds[0, 0] <= bounds[0, 1])
         if mono:
             for i in range(bounds.shape[0]):
-                if not bounds[i,0]<=self[i]<=bounds[i,1]:
+                if not bounds[i, 0] <= self[i] <= bounds[i, 1]:
                     raise CDMSError, InvalidBoundsArray + \
-'bounds[%i]=%f is not in the range [%f,%f]'%(i,self[i],bounds[i,0],bounds[i,1])
+'bounds[%i]=%f is not in the range [%f,%f]' % (
+    i, self[i], bounds[i, 0], bounds[i, 1])
         else:
             for i in range(bounds.shape[0]):
-                if not bounds[i,0]>=self[i]>=bounds[i,1]:
+                if not bounds[i, 0] >= self[i] >= bounds[i, 1]:
                     raise CDMSError, InvalidBoundsArray + \
-'bounds[%i]=%f is not in the range [%f,%f]'%(i,self[i],bounds[i,1],bounds[i,0])
+'bounds[%i]=%f is not in the range [%f,%f]' % (
+    i, self[i], bounds[i, 1], bounds[i, 0])
         return bounds
 
-    # Generate bounds from midpoints. width is the width of the zone if the axis has one value.
+    # Generate bounds from midpoints. width is the width of the zone if the
+    # axis has one value.
     def genGenericBounds(self, width=1.0):
         if self._data_ is None:
-            self._data_ = self.getData()
-        ar = self._data_
-        if len(self)>1:
-            leftPoint = numpy.array([1.5*ar[0]-0.5*ar[1]])
-            midArray = (ar[0:-1]+ar[1:])/2.0
-            rightPoint = numpy.array([1.5*ar[-1]-0.5*ar[-2]])
-            bnds = numpy.concatenate((leftPoint,midArray,rightPoint))
+            self._data_=self.getData()
+        ar=self._data_
+        if len(self) > 1:
+            leftPoint=numpy.array([1.5 * ar[0] - 0.5 * ar[1]])
+            midArray=(ar[0:-1] + ar[1:]) / 2.0
+            rightPoint=numpy.array([1.5 * ar[-1] - 0.5 * ar[-2]])
+            bnds=numpy.concatenate((leftPoint, midArray, rightPoint))
         else:
-            delta = width/2.0
-            bnds = numpy.array([self[0]-delta,self[0]+delta])
+            delta=width / 2.0
+            bnds=numpy.array([self[0] - delta, self[0] + delta])
 
         # Transform to (n,2) array
-        retbnds = numpy.zeros((len(ar),2),numpy.float)
-        retbnds[:,0] = bnds[:-1]
-        retbnds[:,1] = bnds[1:]
+        retbnds=numpy.zeros((len(ar), 2), numpy.float)
+        retbnds[:, 0]=bnds[:-1]
+        retbnds[:, 1]=bnds[1:]
 
         if self.isLatitude():
-            retbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,retbnds[0,:]))
-            retbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,retbnds[-1,:]))
+            retbnds[0, :]=numpy.maximum(
+    -90.0, numpy.minimum(90.0, retbnds[0, :]))
+            retbnds[-1, :]=numpy.maximum(
+    -90.0, numpy.minimum(90.0, retbnds[-1, :]))
 
         return retbnds
 
-    def clone (self, copyData=1):
+    def clone(self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        b = self.getBounds()
-        if copyData==1:
-            mycopy = createAxis(copy.copy(self[:]))
+        b=self.getBounds()
+        if copyData == 1:
+            mycopy=createAxis(copy.copy(self[:]))
         else:
-            mycopy = createAxis(self[:])
-        mycopy.id = self.id
+            mycopy=createAxis(self[:])
+        mycopy.id=self.id
         try:
             mycopy.setBounds(b)
         except CDMSError:
-            b = mycopy.genGenericBounds()
+            b=mycopy.genGenericBounds()
             mycopy.setBounds(b)
         for k, v in self.attributes.items():
            setattr(mycopy, k, v)
         return mycopy
 
-    def listall (self, all=None):
+    def listall(self, all=None):
         "Get list of info about this axis."
-        aname = self.id
-        result = []
+        aname=self.id
+        result=[]
         result.append('   id: ' + aname)
         if self.isLatitude(): result.append('   Designated a latitude axis.')
         if self.isLongitude(): result.append('   Designated a longitude axis.')
         if self.isTime(): result.append('   Designated a time axis.')
         if self.isLevel(): result.append('   Designated a level axis.')
         try:
-            units = self.units
+            units=self.units
             result.append('   units:  ' + units)
         except:
             pass
-        d = self.getValue()
+        d=self.getValue()
         result.append('   Length: ' + str(len(d)))
         result.append('   First:  ' + str(d[0]))
         result.append('   Last:   ' + str(d[-1]))
-        flag = 1
+        flag=1
         for k in self.attributes.keys():
             if k in std_axis_attributes: continue
             if flag:
                 result.append('   Other axis attributes:')
-                flag = 0
-            result.append('      '+k+': '+str(self.attributes[k]))
+                flag=0
+            result.append('      ' + k + ': ' + str(self.attributes[k]))
         result.append('   Python id:  %s' % hex(id(self)))
 
         if all:
             result.append("   Values:")
             result.append(str(d))
-            b = self.getBounds()
+            b=self.getBounds()
             result.append("   Bounds:")
             result.append(str(b))
         return result
 
     def info(self, flag=None, device=None):
         "Write info about axis; include dimension values and weights if flag"
-        if device is None: device = sys.stdout
+        if device is None: device=sys.stdout
         device.write(str(self))
 
     def isVirtual(self):
         "Return true iff coordinate values are implicitly defined."
         return 0
 
-    shape = property(_getshape,None)
-    dtype = _getdtype
+    shape=property(_getshape, None)
+    dtype=_getdtype
 
-## PropertiedClasses.set_property(AbstractAxis, 'shape', 
-##                         AbstractAxis._getshape, nowrite=1, nodelete=1)
-## PropertiedClasses.set_property(AbstractAxis, 'dtype', 
-##                         AbstractAxis._getdtype, nowrite=1, nodelete=1)
-## internattr.add_internal_attribute (AbstractAxis, 'id', 'parent')
+# PropertiedClasses.set_property(AbstractAxis, 'shape',
+# AbstractAxis._getshape, nowrite=1, nodelete=1)
+# PropertiedClasses.set_property(AbstractAxis, 'dtype',
+# AbstractAxis._getdtype, nowrite=1, nodelete=1)
+# internattr.add_internal_attribute (AbstractAxis, 'id', 'parent')
 
 # One-dimensional coordinate axis in a dataset
 class Axis(AbstractAxis):
-    def __init__(self,parent,axisNode=None):
+    def __init__(self, parent, axisNode=None):
         if axisNode is not None and axisNode.tag != 'axis':
                raise CDMSError, 'Creating axis, node is not an axis node.'
         AbstractAxis.__init__(self, parent, axisNode)
         if axisNode is not None:
             if axisNode.partition is not None:
-                flatpart = axisNode.partition
-                self.__dict__['partition']=numpy.reshape(flatpart,(len(flatpart)/2,2))
+                flatpart=axisNode.partition
+                self.__dict__['partition']=numpy.reshape(
+    flatpart, (len(flatpart) / 2, 2))
                 self.attributes['partition']=self.partition
-        self.id = axisNode.id
-    
+        self.id=axisNode.id
+
     def typecode(self):
         return cdmsNode.CdToNumericType.get(self._node_.datatype)
 
     # Handle slices of the form x[i], x[i:j:k], x[(slice(i,j,k),)], and x[...]
     def __getitem__(self, key):
-        node = self._node_
-        length = len(node)
+        node=self._node_
+        length=len(node)
 
         # Allow key of form (slice(i,j),) etc.
-        if isinstance(key, tuple) and len(key)==1:
-            key = key[0]
+        if isinstance(key, tuple) and len(key) == 1:
+            key=key[0]
 
-        if isinstance(key, (int, numpy.int,numpy.int32)):  # x[i]
-            if key>=length:
+        if isinstance(key, (int, numpy.int, numpy.int32)):  # x[i]
+            if key >= length:
                 raise IndexError, 'index out of bounds'
             else:
-                # Don't generate the entire array (if linear) just for one value
-                return node.data[key%length]
-        elif isinstance(key, slice): # x[i:j:k]
+                # Don't generate the entire array (if linear) just for one
+                # value
+                return node.data[key % length]
+        elif isinstance(key, slice):  # x[i:j:k]
             if self._data_ is None:
-                self._data_ = node.getData()
+                self._data_=node.getData()
             return self._data_[key.start:key.stop:key.step]
-        elif isinstance(key, Ellipsis.__class__): # x[...]
+        elif isinstance(key, Ellipsis.__class__):  # x[...]
             if self._data_ is None:
-                self._data_ = node.getData()
+                self._data_=node.getData()
             return self._data_
         elif isinstance(key, tuple):
-            raise IndexError,'axis is one-dimensional'
+            raise IndexError, 'axis is one-dimensional'
         else:
-            raise IndexError,'index must be an integer: %s'%`key`
+            raise IndexError, 'index must be an integer: %s' % `key`
 
     # Get axis data
     def getData(self):
@@ -1559,7 +1625,7 @@ class Axis(AbstractAxis):
     # Handle slices of the form x[i:j]
     def __getslice__(self, low, high):
         if self._data_ is None:
-            self._data_ = self.getData()
+            self._data_=self.getData()
         return self._data_[low:high]
 
     def __len__(self):
@@ -1567,85 +1633,85 @@ class Axis(AbstractAxis):
 
     # Return true iff the axis representation is linear
     def isLinear(self):
-        return self._node_.dataRepresent==cdmsNode.CdLinear
+        return self._node_.dataRepresent == cdmsNode.CdLinear
 
     # Return the bounds array, or generate a default if autoBounds mode is on
     def getBounds(self):
-        boundsArray = self.getExplicitBounds()
+        boundsArray=self.getExplicitBounds()
         try:
             self.validateBounds(boundsArray)
         except:
-            boundsArray = None
-        abopt = getAutoBounds()
-        if boundsArray is None and (abopt==1 or (abopt==2 and (self.isLatitude() or self.isLongitude()))) :
-            boundsArray = self.genGenericBounds()
-            
+            boundsArray=None
+        abopt=getAutoBounds()
+        if boundsArray is None and (abopt == 1 or (abopt == 2 and (self.isLatitude() or self.isLongitude()))):
+            boundsArray=self.genGenericBounds()
+
         return boundsArray
 
     # Return the bounds array, or None
     def getExplicitBounds(self):
-        boundsArray = None
-        if hasattr(self,'bounds'):
-            boundsName = self.bounds
+        boundsArray=None
+        if hasattr(self, 'bounds'):
+            boundsName=self.bounds
             try:
-                boundsVar = self.parent.variables[boundsName]
-                boundsArray = numpy.ma.filled(boundsVar.getSlice())
+                boundsVar=self.parent.variables[boundsName]
+                boundsArray=numpy.ma.filled(boundsVar.getSlice())
             except KeyError:
-                boundsArray = None
+                boundsArray=None
 
         return boundsArray
 
     def getCalendar(self):
-        if hasattr(self,'calendar'):
-            calendar = self.calendar.lower()
+        if hasattr(self, 'calendar'):
+            calendar=self.calendar.lower()
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar = self.parent.calendar.lower()
+            calendar=self.parent.calendar.lower()
         else:
-            calendar = None
+            calendar=None
 
-        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
 # In-memory coordinate axis
 class TransientAxis(AbstractAxis):
-    axis_count = 0
+    axis_count=0
     def __init__(self, data, bounds=None, id=None, attributes=None, copy=0):
         AbstractAxis.__init__(self, None, None)
         if id is None:
-            TransientAxis.axis_count = TransientAxis.axis_count + 1
-            id = 'axis_' + str(TransientAxis.axis_count)
+            TransientAxis.axis_count=TransientAxis.axis_count + 1
+            id='axis_' + str(TransientAxis.axis_count)
         if attributes is None:
-            if hasattr(data, 'attributes'): attributes = data.attributes
+            if hasattr(data, 'attributes'): attributes=data.attributes
         if attributes is not None:
             for name, value in attributes.items():
                 if name not in ['missing_value', 'name']:
                     setattr(self, name, value)
-        self.id = id
+        self.id=id
         if isinstance(data, AbstractAxis):
             if copy == 0:
-                self._data_ = data[:]
+                self._data_=data[:]
             else:
-                self._data_ = numpy.array(data[:])
+                self._data_=numpy.array(data[:])
         elif isinstance(data, numpy.ndarray):
             if copy == 0:
-                self._data_ = data
+                self._data_=data
             else:
-                self._data_ = numpy.array(data)
+                self._data_=numpy.array(data)
         elif isinstance(data, numpy.ma.MaskedArray):
             if numpy.ma.getmask(data) is not numpy.ma.nomask:
                 raise CDMSError, \
                       'Cannot construct an axis with a missing value.'
-            data = data.data
+            data=data.data
             if copy == 0:
-                self._data_ = data
+                self._data_=data
             else:
-                self._data_ = numpy.array(data)
+                self._data_=numpy.array(data)
         elif data is None:
-            self._data_ = None
+            self._data_=None
         else:
-            self._data_ = numpy.array(data)
+            self._data_=numpy.array(data)
 
-        self._doubledata_ = None
+        self._doubledata_=None
         self.setBounds(bounds)
 
     def __getitem__(self, key):
@@ -1655,10 +1721,10 @@ class TransientAxis(AbstractAxis):
         return self._data_[low:high]
 
     def __setitem__(self, index, value):
-        self._data_[index] = numpy.ma.filled(value)
+        self._data_[index]=numpy.ma.filled(value)
 
     def __setslice__(self, low, high, value):
-        self._data_[low:high] = numpy.ma.filled(value)
+        self._data_[low:high]=numpy.ma.filled(value)
 
     def __len__(self):
         return len(self._data_)
@@ -1666,7 +1732,7 @@ class TransientAxis(AbstractAxis):
     def getBounds(self):
         if self._bounds_ is not None:
             return copy.copy(self._bounds_)
-        elif (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+        elif (getAutoBounds() == 1 or (getAutoBounds() == 2 and (self.isLatitude() or self.isLongitude()))):
             return self.genGenericBounds()
         else:
             return None
@@ -1682,29 +1748,33 @@ class TransientAxis(AbstractAxis):
     #
     # mf 20010308 - add validate key word, by default do not validate
     #
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    def setBounds(self, bounds, persistent=0,
+                  validate=0, index=None, boundsid=None):
         if bounds is not None:
             if isinstance(bounds, numpy.ma.MaskedArray):
-                bounds = numpy.ma.filled(bounds)
+                bounds=numpy.ma.filled(bounds)
             if validate:
-                bounds = self.validateBounds(bounds)
+                bounds=self.validateBounds(bounds)
             else:                       # Just do the absolute minimum validation
-                requiredShape = (len(self),2)
-                requiredShape2 = (len(self)+1,)
-                if bounds.shape!=requiredShape and bounds.shape!=requiredShape2:
+                requiredShape=(len(self), 2)
+                requiredShape2=(len(self) + 1,)
+                if bounds.shape != requiredShape and bounds.shape != requiredShape2:
                     raise CDMSError, InvalidBoundsArray + \
-                          'shape is %s, should be %s or %s'%(`bounds.shape`,`requiredShape`,`requiredShape2`)
-                if bounds.shape==requiredShape2: # case of "n+1" bounds
+                          'shape is %s, should be %s or %s' % (
+    `bounds.shape`,
+     `requiredShape`,
+     `requiredShape2`)
+                if bounds.shape == requiredShape2:  # case of "n+1" bounds
                     bounds2=numpy.zeros(requiredShape)
-                    bounds2[:,0]=bounds[:-1]
-                    bounds2[:,1]=bounds[1::]
+                    bounds2[:, 0]=bounds[:-1]
+                    bounds2[:, 1]=bounds[1::]
                     bounds=bounds2
-            self._bounds_ = copy.copy(bounds)
+            self._bounds_=copy.copy(bounds)
         else:
-            if (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
-                self._bounds_ = self.genGenericBounds()
+            if (getAutoBounds() == 1 or (getAutoBounds() == 2 and (self.isLatitude() or self.isLongitude()))):
+                self._bounds_=self.genGenericBounds()
             else:
-                self._bounds_ = None
+                self._bounds_=None
 
     def isLinear(self):
         return 0
@@ -1719,17 +1789,17 @@ class TransientVirtualAxis(TransientAxis):
 
     def __init__(self, axisname, axislen):
         TransientAxis.__init__(self, None, id=axisname)
-        self._virtualLength = axislen # length of the axis
+        self._virtualLength=axislen  # length of the axis
 
     def __len__(self):
         return self._virtualLength
 
-    def __str__ (self):
-        return "<TransientVirtualAxis %s(%d)>"%(self.id, self._virtualLength)
+    def __str__(self):
+        return "<TransientVirtualAxis %s(%d)>" % (self.id, self._virtualLength)
 
-    __repr__ = __str__
+    __repr__=__str__
 
-    def clone (self, copyData=1):
+    def clone(self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient virtual axis.
         If copyData is 1, make a separate copy of the data."""
@@ -1747,7 +1817,7 @@ class TransientVirtualAxis(TransientAxis):
 
     def setBounds(self, bounds):
         "No boundaries on virtual axes"
-        self._bounds_ = None
+        self._bounds_=None
 
     def __getitem__(self, key):
         return self.getData()[key]
@@ -1755,79 +1825,84 @@ class TransientVirtualAxis(TransientAxis):
     def __getslice__(self, low, high):
         return self.getData()[low:high]
 
-## PropertiedClasses.initialize_property_class (TransientVirtualAxis)
+# PropertiedClasses.initialize_property_class (TransientVirtualAxis)
 
 # One-dimensional coordinate axis in a CdmsFile.
 class FileAxis(AbstractAxis):
-    
+
     def __init__(self, parent, axisname, obj=None):
-        AbstractAxis.__init__ (self, parent, None)
-        val = self.__cdms_internals__ +['name_in_file',]
-        self.___cdms_internals__ = val
-        self.id = axisname
-        self._obj_ = obj
+        AbstractAxis.__init__(self, parent, None)
+        val=self.__cdms_internals__ + ['name_in_file', ]
+        self.___cdms_internals__=val
+        self.id=axisname
+        self._obj_=obj
         # Overshadows file boundary data, if not None
-        self._boundsArray_ = None
-        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
-                   parent._file_.dimensioninfo[axisname]
-        self.__dict__['_units'] = units
-        att = self.attributes
+        self._boundsArray_=None
+        (units, typecode, name_in_file, parent_varname,
+         dimtype, ncid)=parent._file_.dimensioninfo[axisname]
+        self.__dict__['_units']=units
+        att=self.attributes
         att['units']=units
-        self.attributes = att
-        self.name_in_file = self.id
+        self.attributes=att
+        self.name_in_file=self.id
         if name_in_file:
-            self.name_in_file = name_in_file
+            self.name_in_file=name_in_file
         # Combine the attributes of the variable object, if any
         if obj is not None:
             for attname in self._obj_.__dict__.keys():
-                attval = getattr(self._obj_,attname)
+                attval=getattr(self._obj_, attname)
                 if not callable(attval):
-                    self.__dict__[attname]  = attval
-                    att = self.attributes
+                    self.__dict__[attname]=attval
+                    att=self.attributes
                     att[attname]=attval
-                    self.attributes= att
-        
+                    self.attributes=att
+
     def getData(self):
-        if cdmsobj._debug==1:
-            print 'Getting array for axis',self.id
+        if cdmsobj._debug == 1:
+            print 'Getting array for axis', self.id
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         try:
-            result = self.parent._file_.readDimension(self.id)
+            result=self.parent._file_.readDimension(self.id)
         except:
             try:
-                result = apply(self._obj_.getitem, (slice(None,None),))
+                result=apply(self._obj_.getitem, (slice(None, None),))
             except:
-                raise CDMSError,'Data for dimension %s not found'%self.id
+                raise CDMSError, 'Data for dimension %s not found' % self.id
         return result
 
     def typecode(self):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
-                             self.parent._file_.dimensioninfo[self.id]
+        (units, typecode, name_in_file, parent_varname,
+         dimtype, ncid)=self.parent._file_.dimensioninfo[self.id]
         return typecode
-    
+
     def _setunits(self, value):
-        self._units = value
+        self._units=value
         self.attributes['units']=value
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         setattr(self._obj_, 'units', value)
-        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
-            self.parent._file_.dimensioninfo[self.id]
-        self.parent._file_.dimensioninfo[self.id] = \
-                  (value,typecode,name_in_file,parent_varname,dimtype,ncid)
+        (units, typecode, name_in_file, parent_varname,
+         dimtype, ncid)=self.parent._file_.dimensioninfo[self.id]
+        self.parent._file_.dimensioninfo[self.id]=(
+    value,
+     typecode,
+     name_in_file,
+     parent_varname,
+     dimtype,
+     ncid)
     def _getunits(self):
         return self._units
 
     def _delunits(self):
         del(self._units)
         del(self.attributes['units'])
-        delattr(self._obj_,'units')
+        delattr(self._obj_, 'units')
 
 
-    def __getattr__(self,name):
+    def __getattr__(self, name):
         if name == 'units':
             return self._units
         try:
@@ -1835,27 +1910,27 @@ class FileAxis(AbstractAxis):
         except:
             raise AttributeError
     # setattr writes external attributes to the file
-    def __setattr__ (self, name, value):
+    def __setattr__(self, name, value):
         if name == 'units':
             self._setunits(value)
             return
         if hasattr(self, 'parent') and self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-##         s = self.get_property_s (name)
-##         if s is not None:
-##             s(self, name, value)
-##             return
-        if not name in self.__cdms_internals__ and name[0]!='_':
+# s = self.get_property_s (name)
+# if s is not None:
+# s(self, name, value)
+# return
+        if not name in self.__cdms_internals__ and name[0] != '_':
             setattr(self._obj_, name, value)
             self.attributes[name]=value
-        self.__dict__[name]  = value
+        self.__dict__[name]=value
 
     # delattr deletes external global attributes in the file
     def __delattr__(self, name):
-##         d = self.get_property_d(name)
-##         if d is not None:
-##             d(self, name)
-##             return
+# d = self.get_property_d(name)
+# if d is not None:
+# d(self, name)
+# return
         if name == "units":
             self._delunits()
             return
@@ -1870,83 +1945,84 @@ class FileAxis(AbstractAxis):
 
     # Read data
     # If the axis has a related Cdunif variable object, just read that variable
-    # otherwise, cache the Cdunif (read-only) data values in self._data_. in this case, 
-    # the axis is not extensible, so it is not necessary to reread it each time.
+    # otherwise, cache the Cdunif (read-only) data values in self._data_. in this case,
+    # the axis is not extensible, so it is not necessary to reread it each
+    # time.
     def __getitem__(self, key):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         # See __getslice__ comment below.
-        if (self._obj_ is not None) and (self.parent._mode_!='r') and not (hasattr(self.parent,'format') and self.parent.format=="DRS"):
+        if (self._obj_ is not None) and (self.parent._mode_ != 'r') and not (hasattr(self.parent, 'format') and self.parent.format == "DRS"):
             # For negative strides, get the equivalent slice with positive stride,
             # then reverse the result.
-            if (isinstance(key, slice) and (key.step is not None) and key.step<0:
-                posslice = reverseSlice(key,len(self))
-                result = apply(self._obj_.getitem, (posslice,))
+            if (isinstance(key, slice) and (key.step is not None) and key.step < 0:
+                posslice=reverseSlice(key, len(self))
+                result=apply(self._obj_.getitem, (posslice,))
                 return result[::-1]
             else:
-                if isinstance(key, int) and key>=len(self):
-                    raise IndexError, 'Index out of bounds: %d'%key
+                if isinstance(key, int) and key >= len(self):
+                    raise IndexError, 'Index out of bounds: %d' % key
                     if isinstance(key, tuple):
-                    key = (key,)
+                    key=(key,)
                 return apply(self._obj_.getitem, key)
         if self._data_ is None:
-            self._data_ = self.getData()
-        length = len(self._data_)
+            self._data_=self.getData()
+        length=len(self._data_)
         if isinstance(key, int):  # x[i]
-            if key>=length:
+            if key >= length:
                 raise IndexError, 'index out of bounds'
             else:
-                return self._data_[key%length]
-        elif isinstance(key, slice): # x[i:j:k]
+                return self._data_[key % length]
+        elif isinstance(key, slice):  # x[i:j:k]
             return self._data_[key.start:key.stop:key.step]
-        elif isinstance(key, Ellipsis.__class__): # x[...]
+        elif isinstance(key, Ellipsis.__class__):  # x[...]
             return self._data_
         elif isinstance(key, tuple):
-            raise IndexError,'axis is one-dimensional'
+            raise IndexError, 'axis is one-dimensional'
         else:
-            raise IndexError,'index must be an integer or slice: %s'%`key`
+            raise IndexError, 'index must be an integer or slice: %s' % `key`
 
     def __getslice__(self, low, high):
         # Hack to prevent netCDF overflow error on 64-bit architectures
-        high = min(Max32int, high)
-        
+        high=min(Max32int, high)
+
         # Hack to fix a DRS bug: force use of readDimension for DRS axes.
         # Since DRS is read-only here, it is OK just to cache all dimensions.
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-        if (self._obj_ is not None) and (self.parent._mode_!='r') and not (hasattr(self.parent,'format') and self.parent.format=="DRS"):
-            return apply(self._obj_.getslice,(low,high))
+        if (self._obj_ is not None) and (self.parent._mode_ != 'r') and not (hasattr(self.parent, 'format') and self.parent.format == "DRS"):
+            return apply(self._obj_.getslice, (low, high))
         else:
             if self._data_ is None:
-                self._data_ = self.getData()
+                self._data_=self.getData()
             return self._data_[low:high]
 
     def __setitem__(self, index, value):
         if self._obj_ is None:
             raise CDMSError, ReadOnlyAxis + self.id
         if self.parent is None:
-            raise CDMSError, FileWasClosed+self.id
-        return apply(self._obj_.setitem,(index,numpy.ma.filled(value)))
+            raise CDMSError, FileWasClosed + self.id
+        return apply(self._obj_.setitem, (index, numpy.ma.filled(value)))
 
     def __setslice__(self, low, high, value):
         # Hack to prevent netCDF overflow error on 64-bit architectures
-        high = min(Max32int, high)
+        high=min(Max32int, high)
         if self._obj_ is None:
             raise CDMSError, ReadOnlyAxis + self.id
         if self.parent is None:
-            raise CDMSError, FileWasClosed+self.id
-        return apply(self._obj_.setslice,(low,high,numpy.ma.filled(value)))
+            raise CDMSError, FileWasClosed + self.id
+        return apply(self._obj_.setslice, (low, high, numpy.ma.filled(value)))
 
     def __len__(self):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         if self._obj_ is not None:
-            length = len(self._obj_)
+            length=len(self._obj_)
         elif self._data_ is None:
-            self._data_ = self.getData()
-            length = len(self._data_)
+            self._data_=self.getData()
+            length=len(self._data_)
         else:
-            length = len(self._data_)
+            length=len(self._data_)
         return length
 
     def isLinear(self):
@@ -1954,31 +2030,31 @@ class FileAxis(AbstractAxis):
 
     # Return the bounds array, or generate a default if autobounds mode is set
     def getBounds(self):
-        boundsArray = self.getExplicitBounds()
+        boundsArray=self.getExplicitBounds()
         try:
-            boundsArray = self.validateBounds(boundsArray)
-        except Exception,err:
-            boundsArray = None
-        if boundsArray is None and (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
-            boundsArray = self.genGenericBounds()
-            
+            boundsArray=self.validateBounds(boundsArray)
+        except Exception, err:
+            boundsArray=None
+        if boundsArray is None and (getAutoBounds() == 1 or (getAutoBounds() == 2 and (self.isLatitude() or self.isLongitude()))):
+            boundsArray=self.genGenericBounds()
+
         return boundsArray
 
     # Return the bounds array, or None
     def getExplicitBounds(self):
         if self._boundsArray_ is None:
-            boundsArray = None
-            if hasattr(self,'bounds'):
-                boundsName = self.bounds
+            boundsArray=None
+            if hasattr(self, 'bounds'):
+                boundsName=self.bounds
                 try:
-                    boundsVar = self.parent[boundsName]
-                    boundsArray = numpy.ma.filled(boundsVar)
-                    self._boundsArray_ = boundsArray  # for climatology performance
-                except KeyError,err:
+                    boundsVar=self.parent[boundsName]
+                    boundsArray=numpy.ma.filled(boundsVar)
+                    self._boundsArray_=boundsArray  # for climatology performance
+                except KeyError, err:
                     print err
-                    boundsArray = None
+                    boundsArray=None
         else:
-            boundsArray = self._boundsArray_
+            boundsArray=self._boundsArray_
 
         return boundsArray
 
@@ -1989,58 +2065,63 @@ class FileAxis(AbstractAxis):
     # index in the extended dimension (default is index=0).
     # If the bounds variable is new, use the name boundsid, or 'bounds_<varid>'
     # if unspecified.
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    def setBounds(self, bounds, persistent=0,
+                  validate=0, index=None, boundsid=None):
         if persistent:
             if index is None:
                 if validate:
-                    bounds = self.validateBounds(bounds)
-                index = 0
+                    bounds=self.validateBounds(bounds)
+                index=0
 
             # Create the bound axis, if necessary
-            file = self.parent
+            file=self.parent
             if file._boundAxis_ is None:
 
                 # First look for 'bound' of length two
-                if file.axes.has_key("bound") and len(file.axes["bound"])==2:
-                    file._boundAxis_ = file.axes["bound"]
+                if file.axes.has_key("bound") and len(file.axes["bound"]) == 2:
+                    file._boundAxis_=file.axes["bound"]
                 else:
-                    file._boundAxis_ = file.createVirtualAxis("bound",2)
+                    file._boundAxis_=file.createVirtualAxis("bound", 2)
 
             # Create the boundary variable if necessary
-            if hasattr(self,'bounds'):
-                boundName = self.bounds
-                boundVar = file.variables[boundName]
+            if hasattr(self, 'bounds'):
+                boundName=self.bounds
+                boundVar=file.variables[boundName]
             else:
                 if boundsid is None:
-                    boundName = "bounds_"+self.id
+                    boundName="bounds_" + self.id
                 else:
-                    boundName = boundsid
-                boundVar = file.createVariable(boundName, cdmsNode.NumericToCdType.get(bounds.dtype.char), (self,file._boundAxis_))
+                    boundName=boundsid
+                boundVar=file.createVariable(
+    boundName,
+     cdmsNode.NumericToCdType.get(bounds.dtype.char),
+     (self,
+     file._boundAxis_))
                 # And link to self
-                self.bounds = boundName
-                self._boundsArray_ = None
+                self.bounds=boundName
+                self._boundsArray_=None
 
-            boundVar[index:index+len(bounds)] = bounds
+            boundVar[index:index + len(bounds)]=bounds
 
         else:
-            self._boundsArray_ = copy.copy(bounds)
+            self._boundsArray_=copy.copy(bounds)
 
     def getCalendar(self):
-        if hasattr(self,'calendar'):
-            calendar = self.calendar.lower()
+        if hasattr(self, 'calendar'):
+            calendar=self.calendar.lower()
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar = self.parent.calendar.lower()
+            calendar=self.parent.calendar.lower()
         else:
-            calendar = None
+            calendar=None
 
-        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
     def isVirtual(self):
         "Return true iff coordinate values are implicitly defined."
 
         # No virtual axes in GrADS files
-        if self.parent is not None and hasattr(self.parent, 'format') and self.parent.format=='GRADS':
+        if self.parent is not None and hasattr(self.parent, 'format') and self.parent.format == 'GRADS':
             return 0
         return (self._obj_ is None)
 
@@ -2050,11 +2131,11 @@ class FileAxis(AbstractAxis):
             return (self.parent._file_.dimensions[self.id] is None)
         else:
             return False
-## PropertiedClasses.set_property (FileAxis, 'units', 
-##                                 acts=FileAxis._setunits,
-##                                 nodelete=1
-##                                )
-## internattr.add_internal_attribute(FileAxis, 'name_in_file')
+# PropertiedClasses.set_property (FileAxis, 'units',
+# acts=FileAxis._setunits,
+# nodelete=1
+# )
+# internattr.add_internal_attribute(FileAxis, 'name_in_file')
 
 class FileVirtualAxis(FileAxis):
     """An axis with no explicit representation of data values in the file.
@@ -2066,8 +2147,8 @@ class FileVirtualAxis(FileAxis):
 
     def __init__(self, parent, axisname, axislen):
         FileAxis.__init__(self, parent, axisname)
-        self._virtualLength = axislen # length of the axis
-        
+        self._virtualLength=axislen  # length of the axis
+
     def __len__(self):
         return self._virtualLength
 
@@ -2078,95 +2159,95 @@ class FileVirtualAxis(FileAxis):
         "Return true iff coordinate values are implicitly defined."
         return 1
 
-## PropertiedClasses.initialize_property_class (FileVirtualAxis)
+# PropertiedClasses.initialize_property_class (FileVirtualAxis)
 
-######## Functions for selecting axes
-def axisMatchAxis (axes, specifications=None, omit=None, order=None):
-    """Given a list of axes and a specification or list of 
+# Functions for selecting axes
+def axisMatchAxis(axes, specifications=None, omit=None, order=None):
+    """Given a list of axes and a specification or list of
      specificatons, and a specification or list of specifications
-     of those axes to omit, return a list of 
-     those axes in the list that match the specification but 
-     do not include in the list any axes that matches an omit 
+     of those axes to omit, return a list of
+     those axes in the list that match the specification but
+     do not include in the list any axes that matches an omit
      specification.
 
      If specifications is None, include all axes less the omitted ones.
 
-     Individual specifications must be integer indices into axes or 
+     Individual specifications must be integer indices into axes or
      matching criteria as detailed in axisMatches.
 
      Axes are returned in the order they occur in the axes argument unless
-     order is given. 
+     order is given.
 
-     order can be a string containing the symbols t,x,y,z, or -. 
-     If a - is given, any elements of the result not chosen otherwise are 
+     order can be a string containing the symbols t,x,y,z, or -.
+     If a - is given, any elements of the result not chosen otherwise are
      filled in from left to right with remaining candidates.
     """
     return [axes[i] for i in \
             axisMatchIndex(axes, specifications, omit, order)]
 
-def axisMatchIndex (axes, specifications=None, omit=None, order=None):
-    """Given a list of axes and a specification or list of 
+def axisMatchIndex(axes, specifications=None, omit=None, order=None):
+    """Given a list of axes and a specification or list of
      specificatons, and a specification or list of specifications
-     of those axes to omit, return a list of the indices of 
-     those axes in the list that match the specification but 
-     do not include in the list any axes that matches an omit 
+     of those axes to omit, return a list of the indices of
+     those axes in the list that match the specification but
+     do not include in the list any axes that matches an omit
      specification.
 
      If specifications is None, include all axes less the omitted ones.
 
-     Individual specifications must be integer indices into axes or 
+     Individual specifications must be integer indices into axes or
      matching criteria as detailed in axisMatches.
 
      The indices of axes are returned in the order the axes
      occur in the axes argument, unless order is given.
 
-     order can be a string containing the symbols t,x,y,z, or -. 
-     If a - is given, any elements of the result not chosen otherwise are 
+     order can be a string containing the symbols t,x,y,z, or -.
+     If a - is given, any elements of the result not chosen otherwise are
      filled in from left to right with remaining candidates.
     """
     if specifications is None:
-        speclist = axes
+        speclist=axes
     elif isinstance(specifications, basestring):
-        speclist = [specifications]
+        speclist=[specifications]
     elif isinstance(specifications, list):
-        speclist = specifications
+        speclist=specifications
     elif isinstance(specifications, tuple):
         speclist=list(specifications)
     elif isinstance(specifications, int):
-        speclist = [specifications]
+        speclist=[specifications]
     elif callable(specifications):
-        speclist = [specifications]
-    else: # to allow arange, etc.
-        speclist = list(numpy.ma.filled(specifications))
+        speclist=[specifications]
+    else:  # to allow arange, etc.
+        speclist=list(numpy.ma.filled(specifications))
 
-    candidates = []
+    candidates=[]
     for i in range(len(axes)):
         for s in speclist:
             if isinstance(s, int):
-                r = (s == i)
+                r=(s == i)
             else:
-                r = axisMatches(axes[i], s)
+                r=axisMatches(axes[i], s)
             if r:
                 candidates.append(i)
                 break
 
     if not candidates:
-        return candidates   #list empty
+        return candidates  # list empty
 
     if omit is None:
-        omitlist = []
+        omitlist=[]
     elif isinstance(omit, basestring):
-        omitlist = [omit]
+        omitlist=[omit]
     elif isinstance(omit, list):
-        omitlist = omit
+        omitlist=omit
     elif isinstance(omit, tuple):
         omitlist=list(omit)
     elif isinstance(omit, int):
-        omitlist = [omit]
+        omitlist=[omit]
     elif callable(omit):
-        omitlist = [omit]
+        omitlist=[omit]
     elif isinstance(omit, AbstractAxis):
-        omitlist = [omit]
+        omitlist=[omit]
     else:
         raise CDMSError, 'Unknown type of omit specifier.'
 
@@ -2179,11 +2260,11 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
         elif isinstance(s, AbstractAxis):
             for i in range(len(candidates)):
                 if s is axes[candidates[i]]:
-                    del candidates[i] 
+                    del candidates[i]
                     break
         else:
             for i in range(len(candidates)):
-                r = axisMatches(axes[candidates[i]], s)
+                r=axisMatches(axes[candidates[i]], s)
                 if r:
                     del candidates[i]
                     break
@@ -2191,31 +2272,31 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
     if order is None:
         return candidates
 
-    n = len(candidates)
-    m = len(order)
-    result = [None]*n
+    n=len(candidates)
+    m=len(order)
+    result=[None] * n
 # this loop is done this way for future escapes where elements of order
 # are not single chars.
-    j = 0
-    io = 0
+    j=0
+    io=0
     while j < n:
         if j >= m or order[io] == '-':
-            result[j] = candidates[0]
+            result[j]=candidates[0]
             del candidates[0]
             j += 1
             io += 1
             continue
         elif order[j] == 't':
-            oj = 'time'
+            oj='time'
             io += 1
         elif order[j] == 'x':
-            oj = 'longitude'
+            oj='longitude'
             io += 1
         elif order[j] == 'y':
-            oj = 'latitude'
+            oj='latitude'
             io += 1
         elif order[j] == 'z':
-            oj = 'level'
+            oj='level'
             io += 1
         else:
             # later could put in escaped ids or indices
@@ -2223,21 +2304,21 @@ def axisMatchIndex (axes, specifications=None, omit=None, order=None):
 
         for i in range(n):
             if axisMatches(axes[candidates[i]], oj):
-                result[j] = candidates[i]
+                result[j]=candidates[i]
                 del candidates[i]
                 break
         else:
             raise CDMSError, "Axis requested in order specification not there"
-        j += 1    
+        j += 1
     return result
-            
+
 
 def axisMatches(axis, specification):
     """Return 1 or 0 depending on whether axis matches the specification.
        Specification must be one of:
-       1. a string representing an axis id or one of 
-          the keywords time, fctau0, latitude or lat, longitude or lon, or 
-          lev or level. 
+       1. a string representing an axis id or one of
+          the keywords time, fctau0, latitude or lat, longitude or lon, or
+          lev or level.
 
           axis may be surrounded with parentheses or spaces.
 
@@ -2249,20 +2330,20 @@ def axisMatches(axis, specification):
           if the value returned is true, the axis matches.
 
        3. an axis object; will match if it is the same object as axis.
-    """   
+    """
     if isinstance(specification, basestring):
-        s = specification.lower()
-        s = s.strip()
+        s=specification.lower()
+        s=s.strip()
         while s[0] == '(':
             if s[-1] != ')':
                 raise CDMSError, 'Malformed axis spec, ' + specification
-            s = s[1:-1].strip()
+            s=s[1:-1].strip()
         if axis.id.lower() == s:
             return 1
         elif (s == 'time') or (s in time_aliases):
-            return axis.isTime() 
+            return axis.isTime()
         elif (s == 'fctau0') or (s in forecast_aliases):
-            return axis.isForecast() 
+            return axis.isForecast()
         elif (s[0:3] == 'lat') or (s in latitude_aliases):
             return axis.isLatitude()
         elif (s[0:3] == 'lon') or (s in longitude_aliases):
@@ -2273,10 +2354,10 @@ def axisMatches(axis, specification):
             return 0
 
     elif callable(specification):
-        r = specification(axis)
-        if r: 
+        r=specification(axis)
+        if r:
             return 1
-        else: 
+        else:
             return 0
 
     elif isinstance(specification, AbstractAxis):
@@ -2284,26 +2365,26 @@ def axisMatches(axis, specification):
 
     raise CDMSError, "Specification not acceptable: "\
           + str(type(specification)) + ', ' + str(specification)
-    
+
 def concatenate(axes, id=None, attributes=None):
     """Concatenate the axes, return a transient axis."""
-    
-    data = numpy.ma.concatenate([ax[:] for ax in axes])
-    boundsArray = [ax.getBounds() for ax in axes]
+
+    data=numpy.ma.concatenate([ax[:] for ax in axes])
+    boundsArray=[ax.getBounds() for ax in axes]
     if None in boundsArray:
-        bounds = None
+        bounds=None
     else:
-        bounds = numpy.ma.concatenate(boundsArray)
+        bounds=numpy.ma.concatenate(boundsArray)
     return TransientAxis(data, bounds=bounds, id=id, attributes=attributes)
 
 def take(ax, indices):
     """Take values indicated by indices list, return a transient axis."""
 
     # Bug in ma compatibility module
-    data = numpy.ma.take(ax[:], indices)
-    abounds = ax.getBounds()
+    data=numpy.ma.take(ax[:], indices)
+    abounds=ax.getBounds()
     if abounds is not None:
-        bounds = numpy.ma.take(abounds, indices, axis=0)
+        bounds=numpy.ma.take(abounds, indices, axis=0)
     else:
-        bounds = None
+        bounds=None
     return TransientAxis(data, bounds=bounds, id=ax.id, attributes=ax.attributes)
diff --git a/Packages/cdms2/Lib/bindex.py b/Packages/cdms2/Lib/bindex.py
index 9156c9709..913d6fdbd 100644
--- a/Packages/cdms2/Lib/bindex.py
+++ b/Packages/cdms2/Lib/bindex.py
@@ -1,9 +1,11 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """Bin index for non-rectilinear grids"""
 
-import _bindex, numpy 
+import _bindex
+import numpy
+
 
 def bindexHorizontalGrid(latlin, lonlin):
     """Create a bin index for a horizontal grid.
@@ -12,14 +14,16 @@ def bindexHorizontalGrid(latlin, lonlin):
 
     Returns the index.
     """
-    lonlin = numpy.mod(lonlin,360)
-    NI,NJ = _bindex.getLens()
-    head = numpy.zeros(NI*NJ,dtype='l')       # This should match NBINI, NBINJ in bindex.c
-    next = numpy.zeros(len(latlin),dtype='l')
+    lonlin = numpy.mod(lonlin, 360)
+    NI, NJ = _bindex.getLens()
+    head = numpy.zeros(NI * NJ, dtype='l')
+                       # This should match NBINI, NBINJ in bindex.c
+    next = numpy.zeros(len(latlin), dtype='l')
     _bindex.bindex(latlin, lonlin, head, next)
-    
+
     return (head, next)
 
+
 def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
     """Intersect a horizontal grid with a lat-lon region.
 
@@ -31,7 +35,7 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
     Returns an array of indices, in latlin/lonlin, of the points in
     the intersection.
     """
-    points = numpy.zeros(len(latlin),dtype='l')
+    points = numpy.zeros(len(latlin), dtype='l')
     if latspecs is None:
         slat = -90.0
         elat = 90.0
@@ -41,14 +45,14 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
         elat = latspecs[1]
         latopt = latspecs[2]
 
-    if slat>elat:
+    if slat > elat:
         tmp = slat
         slat = elat
         elat = tmp
 
     # If the longitude range is >=360.0, just intersect with the full range.
     # Otherwise, the points array could overflow and generate a seg fault.
-    if lonspecs is None or abs(lonspecs[1]-lonspecs[0])>=360.0:
+    if lonspecs is None or abs(lonspecs[1] - lonspecs[0]) >= 360.0:
         slon = 0.0
         elon = 360.0
         lonopt = 'co'
@@ -57,11 +61,22 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
         elon = lonspecs[1]
         lonopt = lonspecs[2]
 
-    if slon>elon:
+    if slon > elon:
         tmp = slon
         slon = elon
         elon = tmp
 
-    npoints = _bindex.intersect(slat, slon, elat, elon, latlin, lonlin, index[0], index[1], points, latopt, lonopt)
+    npoints = _bindex.intersect(
+        slat,
+        slon,
+     elat,
+     elon,
+     latlin,
+     lonlin,
+     index[0],
+     index[1],
+     points,
+     latopt,
+     lonopt)
 
     return points[:npoints]
diff --git a/Packages/cdms2/Lib/cache.py b/Packages/cdms2/Lib/cache.py
index c7456b503..12163fe94 100644
--- a/Packages/cdms2/Lib/cache.py
+++ b/Packages/cdms2/Lib/cache.py
@@ -1,8 +1,16 @@
 """
 CDMS cache management and file movement objects
 """
-import cdurllib, urlparse, tempfile, os, time, cdmsobj, sys, errno, shelve
-from error import CDMSError
+import cdurllib
+import urlparse
+import tempfile
+import os
+import time
+import cdmsobj
+import sys
+import errno
+import shelve
+from .error import CDMSError
 MethodNotImplemented = "Method not yet implemented"
 SchemeNotSupported = "Scheme not supported: "
 LockError = "Lock error:"
@@ -14,6 +22,7 @@ _lock_max_tries = 10                    # Number of tries for a lock
 _lock_naptime = 1                       # Seconds between lock tries
 _cache_tempdir = None                   # Default temporary directory
 
+
 def lock(filename):
     """
     Acquire a file-based lock with the given name.
@@ -32,48 +41,50 @@ def lock(filename):
     while (not success) and (tries < _lock_max_tries):
         try:
             if cdmsobj._debug:
-                print 'Process %d: Trying to acquire lock %s'%(os.getpid(),path)
-            fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0666)
+                print 'Process %d: Trying to acquire lock %s' % (os.getpid(), path)
+            fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0o666)
 
         # If the open failed because the file already exists, keep trying, otherwise
         # reraise the error
         except OSError:
-            if sys.exc_value.errno!=errno.EEXIST:
+            if sys.exc_value.errno != errno.EEXIST:
                 raise
             tries = tries + 1
         else:
             if cdmsobj._debug:
-                print 'Process %d: Acquired lock %s after %d tries'%(os.getpid(),path,tries)
+                print 'Process %d: Acquired lock %s after %d tries' % (os.getpid(), path, tries)
             success = 1
             break
 
         # Sleep until next retry
         if cdmsobj._debug:
-            print 'Process %d: Failed to acquire lock %s, sleeping'%(os.getpid(),path)
+            print 'Process %d: Failed to acquire lock %s, sleeping' % (os.getpid(), path)
         time.sleep(_lock_naptime)
 
     # Error if the lock could not be acquired
     if not success:
-        raise CDMSError, LockError + 'Could not acquire a lock on %s'%path
+        raise CDMSError(LockError + 'Could not acquire a lock on %s' % path)
 
     # The lock succeeded, so just close the file - we don't need to write
     # anything here
     else:
         os.close(fd)
 
+
 def unlock(filename):
     """
     Delete a file-based lock with the given name.
-    Usage:unlock(filename) 
+    Usage:unlock(filename)
     If the function returns, the lock was successfully deleted.
     Note: This function is UNIX-specific.
     """
 
     path = lockpath(filename)
     if cdmsobj._debug:
-        print 'Process %d: Unlocking %s'%(os.getpid(),path)
+        print 'Process %d: Unlocking %s' % (os.getpid(), path)
     os.unlink(path)
 
+
 def lockpath(filename):
     """
     Generate the pathname of a lock. Creates the directory containing the lock
@@ -84,12 +95,12 @@ def lockpath(filename):
 
     if not _cache_tempdir:
         tempfile.mktemp()
-        _cache_tempdir = os.path.join(tempfile.tempdir,'cdms')
+        _cache_tempdir = os.path.join(tempfile.tempdir, 'cdms')
         if not os.path.isdir(_cache_tempdir):
             if cdmsobj._debug:
-                print 'Process %d: Creating cache directory %s'%(os.getpid(),_cache_tempdir)
-            os.mkdir(_cache_tempdir,0777)
-    return os.path.join(_cache_tempdir,filename)
+                print 'Process %d: Creating cache directory %s' % (os.getpid(), _cache_tempdir)
+            os.mkdir(_cache_tempdir, 0o777)
+    return os.path.join(_cache_tempdir, filename)
 
 _useWindow = 0                          # If true, use a progress dialog
 _pythonTransfer = 0
@@ -97,6 +108,7 @@ _globusTransfer = 1
 _requestManagerTransfer = 2
 _transferMethod = _pythonTransfer       # Method of transferring files
 
+
 def useWindow():
     """
     Specify that dialog windows should be used if possible. Do not call this directly, use
@@ -105,6 +117,7 @@ def useWindow():
     global _useWindow
     _useWindow = 1
 
+
 def useTTY():
     """
     Informational messages such as FTP status should be sent to the terminal. See useWindow.
@@ -112,6 +125,7 @@ def useTTY():
     global _useWindow
     _useWindow = 0
 
+
 def useGlobusTransfer():
     """
     Specify that file transfers should use the Globus storage API (SC-API). See usePythonTransfer.
@@ -119,6 +133,7 @@ def useGlobusTransfer():
     global _transferMethod
     _transferMethod = _globusTransfer
 
+
 def usePythonTransfer():
     """
     Specify that file transfers should use the Python libraries urllib, ftplib. See useGlobusTransfer.
@@ -126,15 +141,18 @@ def usePythonTransfer():
     global _transferMethod
     _transferMethod = _pythonTransfer
 
+
 def useRequestManagerTransfer():
     try:
         import reqm
     except ImportError:
-        raise CDMSError, RequestManagerNotSupported
+        raise CDMSError(RequestManagerNotSupported)
     global _transferMethod
     _transferMethod = _requestManagerTransfer
 
-def copyFile(fromURL, toURL, callback=None, lcpath=None, userid=None, useReplica=1):
+
+def copyFile(fromURL, toURL, callback=None,
+             lcpath=None, userid=None, useReplica=1):
     """
     Copy file <fromURL> to local file <toURL>. For FTP transfers, if cache._useWindow is true,
     display a progress dialog, otherwise just print progress messages.
@@ -145,15 +163,16 @@ def copyFile(fromURL, toURL, callback=None, lcpath=None, userid=None, useReplica
     """
     if callback is None:
         if _useWindow:
-            import gui
+            from . import gui
             dialogParent = gui.getProgressParent()
             dialog = gui.CdProgressDialog(dialogParent, fromURL)
             callback = gui.updateProgressGui
         else:
             callback = cdurllib.sampleReportHook
-    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fromURL)
-    if scheme=='ftp':
-        if _transferMethod==_pythonTransfer:
+    (scheme, netloc, path, parameters, query,
+     fragment) = urlparse.urlparse(fromURL)
+    if scheme == 'ftp':
+        if _transferMethod == _pythonTransfer:
             urlopener = cdurllib.CDURLopener()
 
             # In window environment, attach the dialog to the opener. This will
@@ -167,51 +186,57 @@ def copyFile(fromURL, toURL, callback=None, lcpath=None, userid=None, useReplica
             except:
                 if _useWindow:
                     dialog.Destroy()
-                raise 
-        elif _transferMethod==_globusTransfer: # Transfer via Globus SC-API
+                raise
+        elif _transferMethod == _globusTransfer:  # Transfer via Globus SC-API
             try:
                 import globus.storage
             except ImportError:
-                raise CDMSError, GlobusNotSupported
+                raise CDMSError(GlobusNotSupported)
 
-            globus.storage.transfer(fromURL, "file:"+toURL)
+            globus.storage.transfer(fromURL, "file:" + toURL)
         else:
-            raise CDMSError, SchemeNotSupported + scheme
+            raise CDMSError(SchemeNotSupported + scheme)
         return
-    elif _transferMethod==_requestManagerTransfer: # Request manager gransfer
-        import reqm, signal
+    elif _transferMethod == _requestManagerTransfer:  # Request manager gransfer
+        import reqm
+        import signal
 
         # Define an alarm handler, to poll the request manager
         def handler(signum, frame):
             pass
 
-        # Obtain server reference from environment variable ESG_REQM_REF if present
+        # Obtain server reference from environment variable ESG_REQM_REF if
+        # present
         serverRef = os.environ.get('ESG_REQM_REF', '/tmp/esg_rqm.ref')
         server = reqm.RequestManager(iorFile=serverRef)
-        result, token = server.requestFile(userid, lcpath, path, toURL, useReplica)
+        result, token = server.requestFile(
+            userid, lcpath, path, toURL, useReplica)
         server.execute(token)
 
         # Poll the request manager for completion, signalled by estim<=0.0
-        while 1:
+        while True:
             signal.signal(signal.SIGALRM, handler)
             estim = server.estimate(token)
-            print 'Estimate: ',estim
-            if estim<=0.0: break
+            print 'Estimate: ', estim
+            if estim <= 0.0:
+                break
             signal.alarm(3)             # Number of seconds between polls
             signal.pause()
 
         #!!!! Remove this when gsincftp uses the right target name !!!
-            
-##         oldpath = os.path.join(os.path.dirname(toURL),path)
-##         os.rename(oldpath,toURL)
+
+# oldpath = os.path.join(os.path.dirname(toURL),path)
+# os.rename(oldpath,toURL)
 
         #!!!!
-        
+
         return
     else:
-        raise CDMSError, SchemeNotSupported + scheme
+        raise CDMSError(SchemeNotSupported + scheme)
 
 # A simple data cache
+
+
 class Cache:
 
     indexpath = None                    # Path of data cache index
@@ -227,9 +252,10 @@ class Cache:
             except:
                 pass
             lock("index_lock")
-            self.index = shelve.open(self.indexpath) # Persistent cache index
+            self.index = shelve.open(self.indexpath)  # Persistent cache index
             try:
-                os.chmod(self.indexpath,0666) # Make index file world writeable
+                os.chmod(self.indexpath, 0o666)
+                         # Make index file world writeable
             except:
                 pass
             self.index.close()
@@ -237,7 +263,7 @@ class Cache:
             # Clean up pending read notifications in the cache. This will also
             # mess up tranfers in progress...
             self.clean()
-            self.direc = os.path.dirname(self.indexpath) # Cache directory
+            self.direc = os.path.dirname(self.indexpath)  # Cache directory
 
     def get(self, filekey):
         """
@@ -269,7 +295,7 @@ class Cache:
         lock("index_lock")
         try:
             if cdmsobj._debug:
-                print 'Process %d: Adding cache file %s,\n   key %s'%(os.getpid(),path,filekey)
+                print 'Process %d: Adding cache file %s,\n   key %s' % (os.getpid(), path, filekey)
             self.index = shelve.open(self.indexpath)
             self.index[filekey] = path
         except:
@@ -294,7 +320,8 @@ class Cache:
             pass
         unlock("index_lock")
 
-    def copyFile(self, fromURL, filekey, lcpath=None, userid=None, useReplica=None):
+    def copyFile(self, fromURL, filekey,
+                 lcpath=None, userid=None, useReplica=None):
         """
         Copy the file <fromURL> into the cache. Return the result path.
 
@@ -302,9 +329,9 @@ class Cache:
         <userid> is the string user ID, <useReplica> is true iff the request manager should
         search the replica catalog for the actual file to transfer.
         """
-        
+
         # Put a notification into the cache, that this file is being read.
-        self.put(filekey,"__READ_PENDING__")
+        self.put(filekey, "__READ_PENDING__")
 
         # Get a temporary file in the cache
         tempdir = tempfile.tempdir
@@ -314,21 +341,29 @@ class Cache:
 
         # Copy to the temporary file
         try:
-            copyFile(fromURL, toPath, lcpath=lcpath, userid=userid, useReplica=useReplica)
-            os.chmod(toPath,0666)           # Make cache files world writeable
+            copyFile(
+                fromURL,
+                toPath,
+                lcpath=lcpath,
+                userid=userid,
+                useReplica=useReplica)
+            os.chmod(toPath, 0o666)
+                     # Make cache files world writeable
         except:
-            # Remove the notification on error, and the temp file, then re-raise
+            # Remove the notification on error, and the temp file, then
+            # re-raise
             self.deleteEntry(filekey)
             if os.path.isfile(toPath):
                 os.unlink(toPath)
             raise
 
         # Add to the cache index
-        self.put(filekey,toPath)
+        self.put(filekey, toPath)
 
         return toPath
 
-    def getFile(self, fromURL, filekey, naptime=5, maxtries=60, lcpath=None, userid=None, useReplica=None):
+    def getFile(self, fromURL, filekey, naptime=5,
+                maxtries=60, lcpath=None, userid=None, useReplica=None):
         """
         Get the file with <fileURL>. If the file is in the cache, read it.
         If another process is transferring it into the cache, wait for the
@@ -351,23 +386,34 @@ class Cache:
         """
         # If the file is being read into the cache, just wait for it
         tempname = self.get(filekey)
-        # Note: This is not bulletproof: another process could set the cache at this point
+        # Note: This is not bulletproof: another process could set the cache at
+        # this point
         if tempname is None:
-            fpath = self.copyFile(fromURL,filekey,lcpath=lcpath,userid=userid,useReplica=useReplica)
-        elif tempname=="__READ_PENDING__":
+            fpath = self.copyFile(
+                fromURL,
+                filekey,
+                lcpath=lcpath,
+                userid=userid,
+                useReplica=useReplica)
+        elif tempname == "__READ_PENDING__":
             success = 0
             for i in range(maxtries):
                 if cdmsobj._debug:
-                    print 'Process %d: Waiting for read completion, %s'%(os.getpid(),`filekey`)
+                    print 'Process %d: Waiting for read completion, %s' % (os.getpid(), repr(filekey))
                 time.sleep(naptime)
                 tempname = self.get(filekey)
 
                 # The read failed, or the entry was deleted.
                 if tempname is None:
-                    fpath = self.copyFile(fromURL,filekey,lcpath=lcpath,userid=userid,useReplica=useReplica)
+                    fpath = self.copyFile(
+                        fromURL,
+                        filekey,
+                        lcpath=lcpath,
+                        userid=userid,
+                        useReplica=useReplica)
 
                 # The read is not yet complete
-                elif tempname=="__READ_PENDING__":
+                elif tempname == "__READ_PENDING__":
                     continue
 
                 # The read is finished.
@@ -376,13 +422,13 @@ class Cache:
                     success = 1
                     break
             if not success:
-                raise CDMSError, TimeOutError +`filekey`
+                raise CDMSError(TimeOutError + repr(filekey))
 
         else:
             fpath = tempname
 
         if cdmsobj._debug:
-            print 'Process %d: Got file %s from cache %s'%(os.getpid(),fromURL,fpath)
+            print 'Process %d: Got file %s from cache %s' % (os.getpid(), fromURL, fpath)
         return fpath
 
     def delete(self):
@@ -394,10 +440,11 @@ class Cache:
             self.index = shelve.open(self.indexpath)
             for key in self.index.keys():
                 path = self.index[key]
-                if path=="__READ_PENDING__": continue # Don't remove read-pending notifications
+                if path == "__READ_PENDING__":
+                    continue  # Don't remove read-pending notifications
                 try:
                     if cdmsobj._debug:
-                        print 'Process %d: Deleting cache file %s'%(os.getpid(),path)
+                        print 'Process %d: Deleting cache file %s' % (os.getpid(), path)
                     os.unlink(path)
                 except:
                     pass
@@ -415,7 +462,7 @@ class Cache:
         self.index = shelve.open(self.indexpath)
         for key in self.index.keys():
             path = self.index[key]
-            if path=="__READ_PENDING__":
+            if path == "__READ_PENDING__":
                 del self.index[key]
         self.index.close()
         unlock("index_lock")
diff --git a/Packages/cdms2/Lib/cdmsNode.py b/Packages/cdms2/Lib/cdmsNode.py
index 5d82a9526..3e34f34f7 100644
--- a/Packages/cdms2/Lib/cdmsNode.py
+++ b/Packages/cdms2/Lib/cdmsNode.py
@@ -1,5 +1,5 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS node classes
@@ -13,10 +13,12 @@ import sys
 from error import CDMSError
 
 # Regular expressions
-_Name = re.compile('[a-zA-Z0-9_:][-a-zA-Z0-9._:]*$') # Note: allows digit as first character
+_Name = re.compile('[a-zA-Z0-9_:][-a-zA-Z0-9._:]*$')
+                   # Note: allows digit as first character
 _Integer = re.compile('[0-9]+$')
 _ArraySep = re.compile('[\[\],\s]+')
-_Illegal = re.compile('([<>&\"\'])|([^\t\r\n -\176\240-\377])')   #" illegal chars in content
+_Illegal = re.compile('([<>&\"\'])|([^\t\r\n -\176\240-\377])')
+                      #" illegal chars in content
 
 # Data types
 
@@ -31,42 +33,51 @@ CdDouble = CDML.CdDouble
 CdString = CDML.CdString
 CdFromObject = CDML.CdFromObject
 CdAny = CDML.CdAny
-CdDatatypes = [CdChar,CdByte,CdShort,CdInt,CdLong,CdInt64,CdFloat,CdDouble,CdString]
+CdDatatypes = [
+    CdChar,
+     CdByte,
+     CdShort,
+     CdInt,
+     CdLong,
+     CdInt64,
+     CdFloat,
+     CdDouble,
+     CdString]
 
 CdScalar = CDML.CdScalar
 CdArray = CDML.CdArray
 
-NumericToCdType = {numpy.sctype2char(numpy.float32):CdFloat,
-                   numpy.sctype2char(numpy.float):CdDouble,
-                   numpy.sctype2char(numpy.int16):CdShort,
-                   numpy.sctype2char(numpy.int32):CdInt,
-                   numpy.sctype2char(numpy.int):CdLong,
-                   numpy.sctype2char(numpy.int64):CdInt64,
-                   numpy.sctype2char(numpy.intc):CdLong,
-                   numpy.sctype2char(numpy.int8):CdByte,
-                   'c':CdChar,
-                   'B':'B',
-                   'H':'H',
-                   'L':'L',
-                   'q':CdInt64,
-                   'Q':'Q',
-                   'S':'S'
+NumericToCdType = {numpy.sctype2char(numpy.float32): CdFloat,
+                   numpy.sctype2char(numpy.float): CdDouble,
+                   numpy.sctype2char(numpy.int16): CdShort,
+                   numpy.sctype2char(numpy.int32): CdInt,
+                   numpy.sctype2char(numpy.int): CdLong,
+                   numpy.sctype2char(numpy.int64): CdInt64,
+                   numpy.sctype2char(numpy.intc): CdLong,
+                   numpy.sctype2char(numpy.int8): CdByte,
+                   'c': CdChar,
+                   'B': 'B',
+                   'H': 'H',
+                   'L': 'L',
+                   'q': CdInt64,
+                   'Q': 'Q',
+                   'S': 'S'
                    }
 
-CdToNumericType = {CdChar:'c',
-                   CdByte:numpy.int8,
-                   CdShort:numpy.int16,
-                   CdInt:numpy.int32,
-                   CdLong:numpy.int,
-                   CdInt64:numpy.int64,
-                   CdFloat:numpy.float32,
-                   CdDouble:numpy.float}
+CdToNumericType = {CdChar: 'c',
+                   CdByte: numpy.int8,
+                   CdShort: numpy.int16,
+                   CdInt: numpy.int32,
+                   CdLong: numpy.int,
+                   CdInt64: numpy.int64,
+                   CdFloat: numpy.float32,
+                   CdDouble: numpy.float}
 
 # Grid types
 UnknownGridType = "unknown"
 GaussianGridType = "gaussian"
 UniformGridType = "uniform"
-CdGridtypes = [UnknownGridType,GaussianGridType,UniformGridType]
+CdGridtypes = [UnknownGridType, GaussianGridType, UniformGridType]
 
 DuplicateIdError = "Duplicate identifier: "
 InvalidArgumentError = "Invalid argument: "
@@ -74,6 +85,8 @@ InvalidDatatype = "Invalid datatype: "
 InvalidGridtype = "Invalid grid type: "
 InvalidIdError = "Invalid identifier: "
 NotMonotonic = "Result array is not monotonic "
+
+
 class NotMonotonicError(CDMSError):
     pass
 
@@ -94,6 +107,8 @@ CdSingleton = 2
 # '"' --> &quot;
 # "'" --> &apos;
 # all other illegal characters are removed #"
+
+
 def mapIllegalToEntity(matchobj):
     s = matchobj.group(0)
     if s == '<':
@@ -102,49 +117,55 @@ def mapIllegalToEntity(matchobj):
         return '&gt;'
     elif s == '&':
         return '&amp;'
-    elif s == '"':   #"
+    elif s == '"':  # "
         return '&quot;'
-    elif s=="'":
+    elif s == "'":
         return '&apos;'
     else:
         return ""
 
 # Named node
+
+
 class CdmsNode:
 
     def __init__(self, tag, id=None, parent=None):
         if id and _Name.match(id) is None:
             raise CDMSError, InvalidIdError + id
-        self.attribute = {}             # External attributes, attribute[name]=(value,cdDatatype)
+        self.attribute = {}
+            # External attributes, attribute[name]=(value,cdDatatype)
         self.child = []                 # Children
         self.id = id                    # Identifier string
         self.parent = parent            # Parent node in a tree, None for root
         self.tag = tag                  # XML tag string
         self.content = None             # XML content string
-        self.dtd = CDML.CDML().dtd.get(self.tag)  # CDML Document Type Definition for this tag
-        self.extra = CDML.CDML().extra.get(self.tag) # Extra datatype constraints
+        self.dtd = CDML.CDML().dtd.get(self.tag)
+                             # CDML Document Type Definition for this tag
+        self.extra = CDML.CDML().extra.get(
+            self.tag)  # Extra datatype constraints
         CdmsNode.mapToExternal(self)    # Don't call subclass mapToExternal!
 
     # Map to external attributes
     def mapToExternal(self):
         if self.id is not None and _Name.match(self.id) is None:
             raise CDMSError, InvalidIdError + self.id
-        if self.id is not None: self.setExternalAttr('id',self.id)
+        if self.id is not None:
+            self.setExternalAttr('id', self.id)
 
     # Set content from a string. The interpretation
     # of content is class-dependent
-    def setContentFromString(self,content):
-        self.content=content
+    def setContentFromString(self, content):
+        self.content = content
 
     # Get content
     def getContent(self):
         return self.content
 
     # Add a child node
-    def add(self,child):
+    def add(self, child):
         if child is not None:
             self.child.append(child)
-            child.parent=self
+            child.parent = self
         return child
 
     # Return a list of child nodes
@@ -152,13 +173,13 @@ class CdmsNode:
         return self.child
 
     # Get the child node at index k
-    def getChildAt(self,index):
+    def getChildAt(self, index):
         return self.child[index]
 
     # Remove and return the child at index k
-    def removeChildAt(self,index):
+    def removeChildAt(self, index):
         child = self.child[index]
-        self.child = self.child[:index]+self.child[index+1:]
+        self.child = self.child[:index] + self.child[index + 1:]
         return child
 
     # Get the number of children
@@ -166,7 +187,7 @@ class CdmsNode:
         return len(self.child)
 
     # Get the index of a node
-    def getIndex(self,node):
+    def getIndex(self, node):
         index = -1
         for i in range(len(self.child)):
             if node is self.child[i]:
@@ -180,20 +201,21 @@ class CdmsNode:
 
     # True iff node is a leaf node
     def isLeaf(self):
-        return self.child==[]
+        return self.child == []
 
     # Set an external attribute
     # 'attr' is an Attr object
     def setExternalAttrFromAttr(self, attr):
-        if attr.value is None: return
-        self.attribute[attr.name]=(attr.value,attr.getDatatype())
+        if attr.value is None:
+            return
+        self.attribute[attr.name] = (attr.value, attr.getDatatype())
 
     # Get an external attribute, as an Attr instance
     def getExternalAttrAsAttr(self, name):
         attrPair = self.attribute.get(name)
         if attrPair:
-            (value,datatype) = attrPair
-            attr = AttrNode(name,value)
+            (value, datatype) = attrPair
+            attr = AttrNode(name, value)
             attr.datatype = datatype
             return attr
         else:
@@ -201,7 +223,7 @@ class CdmsNode:
 
     # Set an external attribute
     def setExternalAttr(self, name, value, datatype=None):
-        attr = AttrNode(name,value)
+        attr = AttrNode(name, value)
         attr.datatype = datatype
         self.setExternalAttrFromAttr(attr)
 
@@ -209,7 +231,7 @@ class CdmsNode:
     def getExternalAttr(self, name):
         attrPair = self.attribute.get(name)
         if attrPair:
-            (value,datatype) = attrPair
+            (value, datatype) = attrPair
             return value
         else:
             return None
@@ -220,71 +242,88 @@ class CdmsNode:
 
     # Set the external attribute dictionary. The input dictionary
     # is of the form {name:value,...} where value is a string.
-    def setExternalDict(self,dict):
+    def setExternalDict(self, dict):
         for key in dict.keys():
-            self.attribute[key]=(dict[key],CdString)
+            self.attribute[key] = (dict[key], CdString)
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self,fd=None,tablevel=0,format=1):
-        if fd is None: fd = sys.stdout
+    def write(self, fd=None, tablevel=0, format=1):
+        if fd is None:
+            fd = sys.stdout
         printLimit = get_printoptions()['threshold']
-        set_printoptions(threshold=inf)            # Ensure that all Numeric array values will be printed
+        set_printoptions(threshold=inf)
+                         # Ensure that all Numeric array values will be printed
         if self.dtd:
             validAttrs = self.dtd.keys()
         else:
             validAttrs = None
 
-        if format: fd.write(tablevel*'\t')
-        fd.write('<'+self.tag)
-        if format: fd.write('\n')
+        if format:
+            fd.write(tablevel * '\t')
+        fd.write('<' + self.tag)
+        if format:
+            fd.write('\n')
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                if format: fd.write((tablevel+1)*'\t')
-                (attval,datatype)=self.attribute[attname]
+                if format:
+                    fd.write((tablevel + 1) * '\t')
+                (attval, datatype) = self.attribute[attname]
                 # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-                attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
+                attvalstr = _Illegal.sub(
+    mapIllegalToEntity,
+     str(attval))  # Map illegal chars to entities
                 if format:
-                    fd.write(attname+'\t="'+attvalstr+'"')
+                    fd.write(attname + '\t="' + attvalstr + '"')
                 else:
-                    fd.write(' '+attname+'="'+attvalstr+'"')
-                if format: fd.write('\n')
-        if format: fd.write((tablevel+1)*'\t')
+                    fd.write(' ' + attname + '="' + attvalstr + '"')
+                if format:
+                    fd.write('\n')
+        if format:
+            fd.write((tablevel + 1) * '\t')
         fd.write('>')
-        if format: fd.write('\n')
+        if format:
+            fd.write('\n')
 
         # Write extra attributes
         for attname in self.attribute.keys():
             if validAttrs and (attname not in validAttrs):
-                (attval,datatype)=self.attribute[attname]
-                attr = AttrNode(attname,attval)
-                attr.datatype=datatype
+                (attval, datatype) = self.attribute[attname]
+                attr = AttrNode(attname, attval)
+                attr.datatype = datatype
                 attr.mapToExternal()
-                attr.write(fd,tablevel+1,format)
+                attr.write(fd, tablevel + 1, format)
 
         # Write content
         content = self.getContent()
         if content is not None:
-            content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
-            if format: fd.write((tablevel+1)*'\t')
+            content = _Illegal.sub(
+    mapIllegalToEntity,
+     content)  # Map illegal chars to entities
+            if format:
+                fd.write((tablevel + 1) * '\t')
             fd.write(content)
-            if format: fd.write('\n')
+            if format:
+                fd.write('\n')
 
         # Write children
         for node in self.child:
-            node.write(fd,tablevel+1,format)
+            node.write(fd, tablevel + 1, format)
 
-        if format: fd.write((tablevel+1)*'\t')
-        fd.write('</'+self.tag+'>')
-        if format: fd.write('\n')
+        if format:
+            fd.write((tablevel + 1) * '\t')
+        fd.write('</' + self.tag + '>')
+        if format:
+            fd.write('\n')
         set_printoptions(threshold=printLimit)  # Restore original
 
-    # Write to a file without formatting. 
-    def write_raw(self,fd=None):
-        if fd is None: fd = sys.stdout
-        self.write(fd,0,0)
+    # Write to a file without formatting.
+    def write_raw(self, fd=None):
+        if fd is None:
+            fd = sys.stdout
+        self.write(fd, 0, 0)
 
     # Write an LDIF (LDAP interchange format) entry
     # parentdn is the parent LDAP distinguished name
@@ -292,41 +331,50 @@ class CdmsNode:
     # A trailing newline is added iff format==1
     # Note: unlike write, this does not write children as well
     def write_ldif(self, parentdn, userAttrs=[], fd=None, format=1):
-        if fd is None: fd = sys.stdout
+        if fd is None:
+            fd = sys.stdout
         if self.dtd:
             validAttrs = self.dtd.keys()
         else:
             validAttrs = None
 
         # Write distinguished name
-        newdn = "%s=%s,%s"%(self.tag,self.id,parentdn)
-        fd.write("dn: %s\n"%newdn)
+        newdn = "%s=%s,%s" % (self.tag, self.id, parentdn)
+        fd.write("dn: %s\n" % newdn)
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                (attval,datatype)=self.attribute[attname]
-                # attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
+                (attval, datatype) = self.attribute[attname]
+                # attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  #
+                # Map illegal chars to entities
                 if not isinstance(attval, basestring):
                     attval = `attval`
                 attvalstr = attval.strip()
-                attvalstr = re.sub('\n','\n ',attvalstr) # Make sure continuation lines are preceded with a space
-                if attvalstr=='': attvalstr = "none"
-                fd.write("%s: %s\n"%(attname,attvalstr))
-        
+                attvalstr = re.sub(
+    '\n',
+     '\n ',
+     attvalstr)  # Make sure continuation lines are preceded with a space
+                if attvalstr == '':
+                    attvalstr = "none"
+                fd.write("%s: %s\n" % (attname, attvalstr))
+
         # Write extra attributes
         for attname in self.attribute.keys():
             if validAttrs and (attname not in validAttrs):
-                (attval,datatype)=self.attribute[attname]
+                (attval, datatype) = self.attribute[attname]
                 if not isinstance(attval, basestring):
                     attval = `attval`
-                attval = re.sub('\n','\n ',attval) # Make sure continuation lines are preceded with a space
-                fd.write("attr: %s=%s\n"%(attname,attval))
+                attval = re.sub(
+    '\n',
+     '\n ',
+     attval)  # Make sure continuation lines are preceded with a space
+                fd.write("attr: %s=%s\n" % (attname, attval))
 
         # Write content
         # content = self.getContent()
         # if content is not None:
-        #     content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
+        # content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
         #     fd.write("value: %s"%(content,))
 
         # Write user attributes
@@ -335,61 +383,64 @@ class CdmsNode:
         else:
             newAttrs = userAttrs
         for entry in list(newAttrs):
-            fd.write("%s\n"%entry)
+            fd.write("%s\n" % entry)
 
         # Write classes
         fd.write("objectclass: top\n")
-        fd.write("objectclass: %s\n"%(self.tag))
+        fd.write("objectclass: %s\n" % (self.tag))
 
-        if format==1:
+        if format == 1:
             fd.write('\n')
 
         return newdn
 
     # Validate attributes
-    def validate(self,idtable=None):
+    def validate(self, idtable=None):
 
         # Check validity of enumerated values and references
         validKeys = self.dtd.keys()
         for attname in self.attribute.keys():
             if attname in validKeys:
-                (atttype,default)=self.dtd[attname]
-                if isinstance(atttype,tuple):
-                    attval=self.getExternalAttr(attname)
-                    assert attval in atttype, 'Invalid attribute %s=%s must be in %s'%(attname,attval,`atttype`)
-                elif atttype==CDML.Idref:
-                    attval=self.getExternalAttr(attname)
+                (atttype, default) = self.dtd[attname]
+                if isinstance(atttype, tuple):
+                    attval = self.getExternalAttr(attname)
+                    assert attval in atttype, 'Invalid attribute %s=%s must be in %s' % (
+                        attname, attval, `atttype`)
+                elif atttype == CDML.Idref:
+                    attval = self.getExternalAttr(attname)
                     if idtable:
                         if not idtable.has_key(attval):
-                            print 'Warning: ID reference not found: %s=%s'%(attname,attval)
-            
+                            print 'Warning: ID reference not found: %s=%s' % (attname, attval)
+
         # Validate children
         for node in self.children():
             node.validate(idtable)
-    
+
 # Container object for other CDMS objects
+
+
 class DatasetNode(CdmsNode):
 
     def __init__(self, id):
-        CdmsNode.__init__(self,"dataset",id )
+        CdmsNode.__init__(self, "dataset", id)
         self.idtable = {}
 
     # Validate the dataset and all child nodes
-    def validate(self,idtable=None):
+    def validate(self, idtable=None):
         if not idtable:
-            idtable=self.idtable
-        CdmsNode.validate(self,idtable)
+            idtable = self.idtable
+        CdmsNode.validate(self, idtable)
 
     # Add a child node with an ID
-    def addId(self,id,child):
-        if self.idtable.has_key(id): 
-            raise CDMSError, DuplicateIdError +id
-        CdmsNode.add(self,child)
-        self.idtable[id]=child
+    def addId(self, id, child):
+        if self.idtable.has_key(id):
+            raise CDMSError, DuplicateIdError + id
+        CdmsNode.add(self, child)
+        self.idtable[id] = child
         return child
 
     # Get a child node from its ID
-    def getChildNamed(self,id):
+    def getChildNamed(self, id):
         return self.idtable.get(id)
 
     # Get the ID table
@@ -399,66 +450,80 @@ class DatasetNode(CdmsNode):
     # Dump to a CDML file.
     # path is the file to dump to, or None for standard output.
     # if format is true, write with tab, newline formatting
-    def dump(self,path=None,format=1):
+    def dump(self, path=None, format=1):
         if path:
             try:
-                fd = open(path,'w')
+                fd = open(path, 'w')
             except IOError:
-                raise IOError,'%s: %s'%(sys.exc_value,path)
+                raise IOError, '%s: %s' % (sys.exc_value, path)
         else:
             fd = sys.stdout
         fd.write('<?xml version="1.0"?>')
-        if format: fd.write('\n')
-        fd.write('<!DOCTYPE dataset SYSTEM "http://www-pcmdi.llnl.gov/software/cdms/cdml.dtd">')
-        if format: fd.write('\n')
-        self.write(fd,0,format)
-        if fd!=sys.stdout: fd.close()
+        if format:
+            fd.write('\n')
+        fd.write(
+            '<!DOCTYPE dataset SYSTEM "http://www-pcmdi.llnl.gov/software/cdms/cdml.dtd">')
+        if format:
+            fd.write('\n')
+        self.write(fd, 0, format)
+        if fd != sys.stdout:
+            fd.close()
 
 # Spatio-temporal variable
 # Two ways to create a variable:
 # (1) var = VariableNode(id,datatype,domain)
 # (2) var = VariableNode(id,datatype)
 #     var.setDomain(domain)
+
+
 class VariableNode(CdmsNode):
 
     # Create a variable.
     # If validate is true, validate immediately
+
     def __init__(self, id, datatype, domain):
-        assert isinstance(datatype,basestring), 'Invalid datatype: '+`datatype`
-        assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
-        CdmsNode.__init__(self,"variable",id)
+        assert isinstance(
+    datatype, basestring), 'Invalid datatype: ' + `datatype`
+        assert datatype in CdDatatypes, 'Invalid datatype: ' + `datatype`
+        CdmsNode.__init__(self, "variable", id)
         self.datatype = datatype
         self.setDomain(domain)
         VariableNode.mapToExternal(self)
 
     # Set the domain
-    def setDomain(self,domain):
+    def setDomain(self, domain):
         if not self.isLeaf():
             self.removeChildAt(0)
         self.add(domain)
 
     # Get the domain
     def getDomain(self):
-        if self.getChildCount()>0:
+        if self.getChildCount() > 0:
             return self.getChildAt(0)
         else:
             return None
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('datatype',self.datatype)
-        
+        self.setExternalAttr('datatype', self.datatype)
+
 # Coordinate axis
+
+
 class AxisNode(CdmsNode):
 
     # If datatype is None, assume values [0,1,..,length-1]
     # data is a numpy array, if specified
-    def __init__(self, id, length, datatype=CdLong,data=None):
-        assert isinstance(length, int), 'Invalid length: '+`length`
-        assert isinstance(datatype, basestring), 'Invalid datatype: '+`datatype`
-        assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
-        if data is not None: assert isinstance(data, numpy.ndarray), 'data must be a 1-D Numeric array'
-        CdmsNode.__init__(self,"axis",id)
+
+    def __init__(self, id, length, datatype=CdLong, data=None):
+        assert isinstance(length, int), 'Invalid length: ' + `length`
+        assert isinstance(
+    datatype, basestring), 'Invalid datatype: ' + `datatype`
+        assert datatype in CdDatatypes, 'Invalid datatype: ' + `datatype`
+        if data is not None:
+            assert isinstance(
+    data, numpy.ndarray), 'data must be a 1-D Numeric array'
+        CdmsNode.__init__(self, "axis", id)
         self.datatype = datatype
         self.data = data
         # data representation is CdLinear or CdVector
@@ -477,48 +542,51 @@ class AxisNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('datatype',self.datatype)
-        self.setExternalAttr('length',self.length)
+        self.setExternalAttr('datatype', self.datatype)
+        self.setExternalAttr('length', self.length)
 
     # Set data from content string
     # The content of an axis is the data array.
-    def setContentFromString(self,datastring):
+    def setContentFromString(self, datastring):
         datatype = self.datatype
         numericType = CdToNumericType.get(datatype)
-        if numericType is None: raise CDMSError, InvalidDatatype + datatype
+        if numericType is None:
+            raise CDMSError, InvalidDatatype + datatype
         stringlist = _ArraySep.split(datastring)
         numlist = []
         for numstring in stringlist:
-            if numstring=='': continue
+            if numstring == '':
+                continue
             numlist.append(float(numstring))
-        if len(numlist)>0:
+        if len(numlist) > 0:
             # NB! len(zero-length array) causes IndexError on Linux!
-            dataArray = numpy.array(numlist,numericType)
+            dataArray = numpy.array(numlist, numericType)
             self.data = dataArray
             self.length = len(self.data)
 
     # Set the partition from a string. This does not
     # set the external string representation
-    def setPartitionFromString(self,partstring):
+    def setPartitionFromString(self, partstring):
         stringlist = _ArraySep.split(partstring)
         numlist = []
         for numstring in stringlist:
-            if numstring=='': continue
+            if numstring == '':
+                continue
             numlist.append(int(numstring))
-        dataArray = numpy.array(numlist,numpy.int)
-        if len(dataArray)>0:
+        dataArray = numpy.array(numlist, numpy.int)
+        if len(dataArray) > 0:
             self.partition = dataArray
 
     # Get the content string: the data values if the representation
     # is as a vector, or ane empty string otherwise
     def getContent(self):
-        if self.data is None or self.dataRepresent==CdLinear:
+        if self.data is None or self.dataRepresent == CdLinear:
             return ''
         else:
             return str(self.data)
 
     # Set the data as an array, check for monotonicity
-    def setData(self,data):
+    def setData(self, data):
 
         # If this axis is currently linear, remove the linear node
         if self.dataRepresent == CdLinear:
@@ -527,8 +595,8 @@ class AxisNode(CdmsNode):
         self.data = data
         self.dataRepresent = CdVector
         self.length = len(data)
-        self.setExternalAttr('length',self.length)
-        if self.monotonicity()==CdNotMonotonic:
+        self.setExternalAttr('length', self.length)
+        if self.monotonicity() == CdNotMonotonic:
             raise NotMonotonicError, NotMonotonic
 
     # Get the data as an array
@@ -540,9 +608,9 @@ class AxisNode(CdmsNode):
 
     # Set the data as a linear vector
     # If the partition is set, derive the vector length from it
-    def setLinearData(self,linearNode, partition=None):
+    def setLinearData(self, linearNode, partition=None):
         self.data = linearNode
-        if self.getChildCount()>0:
+        if self.getChildCount() > 0:
             self.removeChildAt(0)           # Remove the previous linear node
         self.add(linearNode)
         self.dataRepresent = CdLinear
@@ -553,23 +621,23 @@ class AxisNode(CdmsNode):
             self.partition = partition
             self.length = partition[-1]
             linearNode.length = self.length
-            self.setExternalAttr('partition',str(self.partition))
-        self.setExternalAttr('length',self.length)
+            self.setExternalAttr('partition', str(self.partition))
+        self.setExternalAttr('length', self.length)
 
     # Test if axis data vectors are equal
-    def equal(self,axis):
+    def equal(self, axis):
         # Require that partitions (if any) are equal
         if self.partition is not None and axis.partition is not None:
-            if len(self.partition)!=len(axis.partition):
+            if len(self.partition) != len(axis.partition):
                 return 0
-            if not numpy.alltrue(numpy.equal(self.partition,axis.partition)):
+            if not numpy.alltrue(numpy.equal(self.partition, axis.partition)):
                 return 0
         elif self.partition is not None or axis.partition is not None:
             return 0
-        
+
         if self.dataRepresent == axis.dataRepresent == CdVector:
             try:
-                return numpy.alltrue(numpy.equal(self.data,axis.data))
+                return numpy.alltrue(numpy.equal(self.data, axis.data))
             except ValueError:
                 return 0
         elif self.dataRepresent == axis.dataRepresent == CdLinear:
@@ -581,34 +649,34 @@ class AxisNode(CdmsNode):
 
     # Test if axis data vectors are element-wise close
     # True iff for each respective element a and b, abs((b-a)/b)<=eps
-    def isClose(self,axis,eps):
-        if eps==0:
+    def isClose(self, axis, eps):
+        if eps == 0:
             return self.equal(axis)
         if self.dataRepresent == axis.dataRepresent == CdVector:
             try:
-                return numpy.alltrue(numpy.less_equal(numpy.absolute(self.data-axis.data),numpy.absolute(eps*self.data)))
+                return numpy.alltrue(numpy.less_equal(numpy.absolute(self.data - axis.data), numpy.absolute(eps * self.data)))
             except ValueError:
                 return 0
         elif self.dataRepresent == axis.dataRepresent == CdLinear:
-            return self.data.isClose(axis.data,eps)
+            return self.data.isClose(axis.data, eps)
         elif self.dataRepresent == CdVector:
-            return axis.data.isCloseVector(self.data,eps)
+            return axis.data.isCloseVector(self.data, eps)
         else:
-            return self.data.isCloseVector(axis.data,eps)
+            return self.data.isCloseVector(axis.data, eps)
 
     # Test for strict monotonicity.
     # Returns CdNotMonotonic, CdIncreasing, CdDecreasing, or CdSingleton
     def monotonicity(self):
         if self.dataRepresent == CdLinear:
             return self.data.monotonicity()
-        elif self.length==1:
+        elif self.length == 1:
             return CdSingleton
         else:
             first = self.data[:-1]
             second = self.data[1:]
-            if numpy.alltrue(numpy.less(first,second)):
+            if numpy.alltrue(numpy.less(first, second)):
                 return CdIncreasing
-            elif numpy.alltrue(numpy.greater(first,second)):
+            elif numpy.alltrue(numpy.greater(first, second)):
                 return CdDecreasing
             else:
                 return CdNotMonotonic
@@ -616,24 +684,24 @@ class AxisNode(CdmsNode):
     # Extend axes. 'isreltime' is true iff
     # the axes are relative time axes
     # If allowgaps is true, allow gaps when extending linear vectors
-    def extend(self,axis,isreltime=0,allowgaps=0):
+    def extend(self, axis, isreltime=0, allowgaps=0):
         # Set trylin true if should try to catenate linear vectors
-        if self.dataRepresent==CdLinear:
+        if self.dataRepresent == CdLinear:
             anode = self.data
-            if axis.dataRepresent==CdLinear:
+            if axis.dataRepresent == CdLinear:
                 bnode = axis.data
                 trylin = 1
-            elif axis.length==1:
+            elif axis.length == 1:
                 bnode = LinearDataNode(axis.data[0], 0.0, 1)
                 trylin = 1
             else:
                 trylin = 0
-        elif self.length==1: 
+        elif self.length == 1:
             anode = LinearDataNode(self.data[0], 0.0, 1)
-            if axis.dataRepresent==CdLinear:
+            if axis.dataRepresent == CdLinear:
                 bnode = axis.data
                 trylin = 1
-            elif axis.length==1:
+            elif axis.length == 1:
                 bnode = LinearDataNode(axis.data[0], 0.0, 1)
                 trylin = 1
             else:
@@ -641,25 +709,25 @@ class AxisNode(CdmsNode):
         else:
             trylin = 0
 
-        if isreltime==1:
+        if isreltime == 1:
             units1 = self.getExternalAttr('units')
             units2 = axis.getExternalAttr('units')
         else:
             units1 = units2 = None
 
-        if trylin==1:
+        if trylin == 1:
             try:
                 aindex = 0
                 alength = anode.length
                 bindex = alength
                 blength = bnode.length
-                if isreltime==1 and units1 and units2 and units1!=units2:
-                    rtime = cdtime.reltime(bnode.start,units2)
+                if isreltime == 1 and units1 and units2 and units1 != units2:
+                    rtime = cdtime.reltime(bnode.start, units2)
                     offset = rtime.torel(units1).value
-                    bnode.start = bnode.start+offset
+                    bnode.start = bnode.start + offset
                 else:
                     offset = None
-                linNode = anode.concatenate(bnode,allowgaps)
+                linNode = anode.concatenate(bnode, allowgaps)
             except NotMonotonicError:
                 # The dimensions cannot be extended as linear arrays,
                 # so try to extend them as vectors
@@ -667,15 +735,20 @@ class AxisNode(CdmsNode):
             else:
                 # Extend the partition attribute
                 if offset is not None:
-                    bindex = int(offset/linNode.delta+0.5)
-                if self.partition  is None:
-                    partition = numpy.array([aindex,aindex+alength,bindex,bindex+blength])
-                    self.partition_length = alength+blength
+                    bindex = int(offset / linNode.delta + 0.5)
+                if self.partition is None:
+                    partition = numpy.array(
+    [aindex,
+     aindex + alength,
+     bindex,
+     bindex + blength])
+                    self.partition_length = alength + blength
                 else:
-                    partition = numpy.concatenate((self.partition,[bindex,bindex+blength]))
-                    self.partition_length = self.partition_length+blength
-                self.setLinearData(linNode,partition)
-                self.setExternalAttr('partition_length',self.partition_length)
+                    partition = numpy.concatenate(
+    (self.partition, [bindex, bindex + blength]))
+                    self.partition_length = self.partition_length + blength
+                self.setLinearData(linNode, partition)
+                self.setExternalAttr('partition_length', self.partition_length)
                 return self
 
         # Else get both axis vectors, concatenate
@@ -688,29 +761,31 @@ class AxisNode(CdmsNode):
         blength = len(ar2)
 
         # Adjust array2 if relative time and units differ
-        if isreltime==1:
+        if isreltime == 1:
             if units1 and units2 and units1 != units2:
-                rtime = cdtime.reltime(0.0,units2)
+                rtime = cdtime.reltime(0.0, units2)
                 delta = rtime.torel(units1).value
-                ar2 = ar2+delta
+                ar2 = ar2 + delta
 
-        ar = numpy.concatenate((ar1,ar2))
+        ar = numpy.concatenate((ar1, ar2))
         try:
             self.setData(ar)
         except NotMonotonicError:
             # Restore original array and resignal
             self.setData(ar1)
-            raise NotMonotonicError, NotMonotonic+`ar`
+            raise NotMonotonicError, NotMonotonic + `ar`
 
         # Extend the partition attribute
-        if self.partition  is None:
-            self.partition = numpy.array([aindex,aindex+alength,bindex,bindex+blength])
-            self.partition_length = alength+blength
+        if self.partition is None:
+            self.partition = numpy.array(
+    [aindex, aindex + alength, bindex, bindex + blength])
+            self.partition_length = alength + blength
         else:
-            self.partition = numpy.concatenate((self.partition,[bindex,bindex+blength]))
-            self.partition_length = self.partition_length+blength
-        self.setExternalAttr('partition',str(self.partition))
-        self.setExternalAttr('partition_length',self.partition_length)
+            self.partition = numpy.concatenate(
+                (self.partition, [bindex, bindex + blength]))
+            self.partition_length = self.partition_length + blength
+        self.setExternalAttr('partition', str(self.partition))
+        self.setExternalAttr('partition_length', self.partition_length)
 
         return self
 
@@ -718,16 +793,21 @@ class AxisNode(CdmsNode):
         return len(self.data)
 
 # Linear data element
+
+
 class LinearDataNode(CdmsNode):
 
-    validStartTypes = [int, float, type(cdtime.comptime(0)), type(cdtime.reltime(0,"hours"))]
+    validStartTypes = [
+        int, float, type(cdtime.comptime(0)), type(cdtime.reltime(0, "hours"))]
     validDeltaTypes = [int, float, list]
 
     def __init__(self, start, delta, length):
-        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(start) in self.validStartTypes), 'Invalid start argument: '+`start`
-        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(delta) in self.validDeltaTypes), 'Invalid delta argument: '+`delta`
-        assert isinstance(length, int), 'Invalid length argument: '+`length`
-        CdmsNode.__init__(self,"linear")
+        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (
+            type(start) in self.validStartTypes), 'Invalid start argument: ' + `start`
+        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (
+            type(delta) in self.validDeltaTypes), 'Invalid delta argument: ' + `delta`
+        assert isinstance(length, int), 'Invalid length argument: ' + `length`
+        CdmsNode.__init__(self, "linear")
         self.delta = delta
         self.length = length
         self.start = start
@@ -735,53 +815,57 @@ class LinearDataNode(CdmsNode):
 
     # Get an indexed value
     def __getitem__(self, index):
-        return self.start + index*self.delta
+        return self.start + index * self.delta
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("start",self.start)
-        self.setExternalAttr("delta",self.delta)
-        self.setExternalAttr("length",self.length)
+        self.setExternalAttr("start", self.start)
+        self.setExternalAttr("delta", self.delta)
+        self.setExternalAttr("length", self.length)
 
     # Equality of linear vectors
-    def equal(self,axis):
-        return self.delta==axis.delta and self.length==axis.length and self.start==axis.start
+    def equal(self, axis):
+        return self.delta == axis.delta and self.length == axis.length and self.start == axis.start
 
     # Closeness of linear vectors
-    def isClose(self,axis,eps):
-        if eps==0:
+    def isClose(self, axis, eps):
+        if eps == 0:
             return self.equal(axis)
         else:
-            return self.delta==axis.delta and self.length==axis.length and abs(self.start-axis.start)<=abs(eps*self.start)
+            return self.delta == axis.delta and self.length == axis.length and abs(self.start - axis.start) <= abs(eps * self.start)
 
     # Equality of linear vector and array
-    def equalVector(self,ar):
-        diff = ar[1:]-ar[:-1]
+    def equalVector(self, ar):
+        diff = ar[1:] - ar[:-1]
         try:
-            comp = numpy.alltrue(numpy.equal((self.delta)*numpy.ones(self.length-1),diff))
+            comp = numpy.alltrue(
+    numpy.equal((self.delta) * numpy.ones(self.length - 1), diff))
         except ValueError:
             return 0
         return comp
 
     # Closeness of linear vector and array
-    def isCloseVector(self,ar,eps):
-        if eps==0:
+    def isCloseVector(self, ar, eps):
+        if eps == 0:
             return self.equalVector(ar)
-        diff = ar[1:]-ar[:-1]
-        diff2 = self.delta*numpy.ones(self.length-1)
+        diff = ar[1:] - ar[:-1]
+        diff2 = self.delta * numpy.ones(self.length - 1)
         try:
-            comp = numpy.alltrue(numpy.less_equal(numpy.absolute(diff2-diff),numpy.absolute(eps*diff2)))
+            comp = numpy.alltrue(
+    numpy.less_equal(numpy.absolute(diff2 - diff),
+     numpy.absolute(eps * diff2)))
         except ValueError:
             return 0
         return comp
 
-    # Return monotonicity: CdNotMonotonic, CdIncreasing, CdDecreasing, or CdSingleton
+    # Return monotonicity: CdNotMonotonic, CdIncreasing, CdDecreasing, or
+    # CdSingleton
     def monotonicity(self):
-        if self.length==1:
+        if self.length == 1:
             return CdSingleton
-        elif self.delta>0.0:
+        elif self.delta > 0.0:
             return CdIncreasing
-        elif self.delta<0.0:
+        elif self.delta < 0.0:
             return CdDecreasing
         else:
             return CdNotMonotonic
@@ -789,34 +873,39 @@ class LinearDataNode(CdmsNode):
     # Return a vector representation, given a CDMS datatype
     def toVector(self, datatype):
         numericType = CdToNumericType.get(datatype)
-        if numericType is None: raise CDMSError, InvalidDatatype + datatype
+        if numericType is None:
+            raise CDMSError, InvalidDatatype + datatype
         start = self.start
         delta = self.delta
         length = self.length
-        if length>1:
-            stop = start + (length-0.99)*delta
-            if delta==0.0: delta=1.0
-            ar = numpy.arange(start,stop,delta,numericType)
+        if length > 1:
+            stop = start + (length - 0.99) * delta
+            if delta == 0.0:
+                delta = 1.0
+            ar = numpy.arange(start, stop, delta, numericType)
         else:
-            ar = numpy.array([start],numericType)
+            ar = numpy.array([start], numericType)
         return ar
 
     # Concatenate linear arrays, preserving linearity
     # If allowgaps is set, don't require that the linear arrays be contiguous
     # Return a new linear node
-    def concatenate(self,linearNode,allowgaps=0):
-        if self.length>1 and linearNode.length>1 and self.delta != linearNode.delta:
-            raise NotMonotonicError, NotMonotonic + 'linear vector deltas do not match: %s,%s'%(`self.delta`,`linearNode.delta`)
+    def concatenate(self, linearNode, allowgaps=0):
+        if self.length > 1 and linearNode.length > 1 and self.delta != linearNode.delta:
+            raise NotMonotonicError, NotMonotonic + \
+                'linear vector deltas do not match: %s,%s' % (
+                    `self.delta`, `linearNode.delta`)
 
-        if self.length>1:
+        if self.length > 1:
             delta = self.delta
-        elif linearNode.length>1:
+        elif linearNode.length > 1:
             delta = linearNode.delta
         else:
             delta = linearNode.start - self.start
-        if allowgaps==0:
-            if linearNode.start-self.start != self.length*delta:
-                raise NotMonotonicError, NotMonotonic + 'linear vectors are not contiguous'
+        if allowgaps == 0:
+            if linearNode.start - self.start != self.length * delta:
+                raise NotMonotonicError, NotMonotonic + \
+                    'linear vectors are not contiguous'
         length = self.length + linearNode.length
         return LinearDataNode(self.start, delta, length)
 
@@ -824,12 +913,16 @@ class LinearDataNode(CdmsNode):
         return self.length
 
 # Rectilinear lat-lon grid
+
+
 class RectGridNode(CdmsNode):
 
     # Create a grid
     # All arguments are strings
-    def __init__(self, id, latitude, longitude, gridtype=UnknownGridType, order="yx", mask=None):
-        CdmsNode.__init__(self,"rectGrid",id)
+
+    def __init__(self, id, latitude, longitude,
+                 gridtype=UnknownGridType, order="yx", mask=None):
+        CdmsNode.__init__(self, "rectGrid", id)
         self.latitude = latitude
         self.longitude = longitude
         self.gridtype = gridtype
@@ -839,17 +932,20 @@ class RectGridNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('type',self.gridtype)
+        self.setExternalAttr('type', self.gridtype)
         self.setExternalAttr('latitude', self.latitude)
-        self.setExternalAttr('longitude',self.longitude)
-        self.setExternalAttr('order',self.order)
-        if self.mask is not None: self.setExternalAttr('mask',self.mask)
+        self.setExternalAttr('longitude', self.longitude)
+        self.setExternalAttr('order', self.order)
+        if self.mask is not None:
+            self.setExternalAttr('mask', self.mask)
 
 # Link to an external element
+
+
 class XLinkNode(CdmsNode):
 
     def __init__(self, id, uri, contentRole, content=''):
-        CdmsNode.__init__(self,"xlink",id)
+        CdmsNode.__init__(self, "xlink", id)
         self.uri = uri
         self.contentRole = contentRole
         self.content = content
@@ -857,33 +953,39 @@ class XLinkNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("href",self.uri,CdString)
-        self.setExternalAttr("content-role",self.contentRole,CdString)
+        self.setExternalAttr("href", self.uri, CdString)
+        self.setExternalAttr("content-role", self.contentRole, CdString)
 
 # Link to a document
+
+
 class DocLinkNode(CdmsNode):
 
     def __init__(self, uri, content=''):
-        CdmsNode.__init__(self,"doclink")
+        CdmsNode.__init__(self, "doclink")
         self.uri = uri
         self.content = content
         DocLinkNode.mapToExternal(self)
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("href",self.uri,CdString)
+        self.setExternalAttr("href", self.uri, CdString)
 
 # Domain
+
+
 class DomainNode(CdmsNode):
 
     def __init__(self):
-        CdmsNode.__init__(self,"domain")
+        CdmsNode.__init__(self, "domain")
 
 # Domain element
+
+
 class DomElemNode(CdmsNode):
 
     def __init__(self, name, start=None, length=None):
-        CdmsNode.__init__(self,"domElem")
+        CdmsNode.__init__(self, "domElem")
         self.name = name
         self.start = start
         self.length = length
@@ -891,14 +993,16 @@ class DomElemNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('name',self.name)
-        if self.start is not None: self.setExternalAttr('start',self.start)
-        if self.length is not None: self.setExternalAttr('length',self.length)
+        self.setExternalAttr('name', self.name)
+        if self.start is not None:
+            self.setExternalAttr('start', self.start)
+        if self.length is not None:
+            self.setExternalAttr('length', self.length)
 
     # Set the name
-    def setName(self,name):
+    def setName(self, name):
         self.name = name
-        self.setExternalAttr('name',self.name)
+        self.setExternalAttr('name', self.name)
 
     # Get the name
     def getName(self):
@@ -906,78 +1010,85 @@ class DomElemNode(CdmsNode):
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self,fd=None,tablevel=0,format=1):
-        if fd is None: fd = sys.stdout
-        if format: fd.write(tablevel*'\t')
-        fd.write('<'+self.tag)
+    def write(self, fd=None, tablevel=0, format=1):
+        if fd is None:
+            fd = sys.stdout
+        if format:
+            fd.write(tablevel * '\t')
+        fd.write('<' + self.tag)
         for attname in self.attribute.keys():
-            (attval,datatype)=self.attribute[attname]
+            (attval, datatype) = self.attribute[attname]
             # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-            attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
-            fd.write(' '+attname+'="'+attvalstr+'"')
+            attvalstr = _Illegal.sub(
+    mapIllegalToEntity,
+     str(attval))  # Map illegal chars to entities
+            fd.write(' ' + attname + '="' + attvalstr + '"')
         fd.write('/>')
-        if format: fd.write('\n')
+        if format:
+            fd.write('\n')
 
 # Attribute node - only used as a placeholder during parse and write
 #   Attr nodes are not placed on the tree
 #
 # Two ways to create an Attr object:
 # (1) attr = AttrNode(name,value)
-#     datatype = sometype # optionally, to override intrinsic type
+# datatype = sometype # optionally, to override intrinsic type
 # (2) attr = AttrNode(name,None)
 #     attr.setValueFromString(somestring,sometype)
+
+
 class AttrNode(CdmsNode):
 
     def __init__(self, name, value=None):
-        CdmsNode.__init__(self,"attr")
-        if not (isinstance(value,(int, numpy.integer, float, numpy.floating, basestring)) \
+        CdmsNode.__init__(self, "attr")
+        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring))
                 or value is None:
-            raise CDMSError, 'Invalid attribute type: '+`value`
-        self.name = name
-        self.value = value
-        self.datatype = None            # CDMS datatype, use getDatatype to retrieve
-        self.content = ''             # string content
+            raise CDMSError, 'Invalid attribute type: ' + `value`
+        self.name=name
+        self.value=value
+        self.datatype=None            # CDMS datatype, use getDatatype to retrieve
+        self.content=''             # string content
 
     # Note: mapToExternal is not called at init time, must be called explicitly
     #   if needed
     def mapToExternal(self):
-        self.attribute['name']=(self.name,CdString)
-        self.attribute['datatype']=(self.getDatatype(),CdString)
-        self.content = self.getValueAsString()
+        self.attribute['name']=(self.name, CdString)
+        self.attribute['datatype']=(self.getDatatype(), CdString)
+        self.content=self.getValueAsString()
 
     def getDatatype(self):
         if self.datatype:
             return self.datatype
         elif isinstance(self.value, basestring):
             return CdString
-        elif isinstance(self.value, (float,numpy.floating)):
+        elif isinstance(self.value, (float, numpy.floating)):
             return CdDouble
         elif isinstance(self.value, (int, numpy.integer)):
             return CdLong
         else:
-            raise CDMSError, 'Invalid attribute type: '+`self.value`
+            raise CDMSError, 'Invalid attribute type: ' + `self.value`
 
     def getLength(self):
         return 1
 
     # Map a string of a given datatype to a value
     #   Returns ValueError if the conversion fails
-    def setValueFromString(self,valString,datatype):
-        val = None
+    def setValueFromString(self, valString, datatype):
+        val=None
         if not isinstance(valString, basestring):
             raise CDMSError, 'input value is not a string'
         if datatype == CdString:
             val=valString
-        elif datatype in (CdShort,CdInt,CdLong):
+        elif datatype in (CdShort, CdInt, CdLong):
             try:
                 val=int(valString)
             except ValueError:
-                raise CDMSError, 'value is not an integer: '+valString
-        elif datatype in (CdFloat,CdDouble):
+                raise CDMSError, 'value is not an integer: ' + valString
+        elif datatype in (CdFloat, CdDouble):
             try:
                 val=float(valString)
             except ValueError:
-                raise CDMSError, 'value is not floating-point: '+valString
+                raise CDMSError, 'value is not floating-point: ' + valString
         self.value=val
         self.datatype=datatype
         return val
@@ -987,36 +1098,40 @@ class AttrNode(CdmsNode):
 
     # Set content
     # This may be called multiple times, so append
-    def setContentFromString(self,content):
-        self.content = self.content+content
+    def setContentFromString(self, content):
+        self.content=self.content + content
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self,fd=None,tablevel=0,format=1):
-        if fd is None: fd = sys.stdout
+    def write(self, fd=None, tablevel=0, format=1):
+        if fd is None: fd=sys.stdout
         if self.dtd:
-            validAttrs = self.dtd.keys()
+            validAttrs=self.dtd.keys()
         else:
-            validAttrs = None
+            validAttrs=None
 
-        if format: fd.write(tablevel*'\t')
-        fd.write('<'+self.tag)
+        if format: fd.write(tablevel * '\t')
+        fd.write('<' + self.tag)
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                (attval,datatype)=self.attribute[attname]
+                (attval, datatype)=self.attribute[attname]
                 # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-                attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
-                fd.write(' '+attname+'="'+attvalstr+'"')
+                attvalstr=_Illegal.sub(
+    mapIllegalToEntity,
+     str(attval))  # Map illegal chars to entities
+                fd.write(' ' + attname + '="' + attvalstr + '"')
         fd.write('>')
 
         # Write content
         if self.content is not None:
-            content = _Illegal.sub(mapIllegalToEntity,self.content)  # Map illegal chars to entities
+            content=_Illegal.sub(
+    mapIllegalToEntity,
+     self.content)  # Map illegal chars to entities
             fd.write(content)
 
-        fd.write('</'+self.tag+'>')
+        fd.write('</' + self.tag + '>')
         if format: fd.write('\n')
 
 if __name__ == '__main__':
@@ -1049,7 +1164,7 @@ if __name__ == '__main__':
     # hAxis = AxisNode('h',len(h),CdDouble,h)
     # jAxis = AxisNode('j',len(j),CdDouble); jAxis.setLinearData(j)
     # kAxis = AxisNode('k',len(k),CdDouble,k)
-    
+
     # print aAxis.monotonicity()
     # print hAxis.monotonicity()
     # print kAxis.monotonicity()
@@ -1057,29 +1172,29 @@ if __name__ == '__main__':
     # print dAxis.monotonicity()
     # print jAxis.monotonicity()
 
-    m = LinearDataNode(1,2,3)
-    n = LinearDataNode(11,2,3)
-    p = LinearDataNode(15,-4,3)
-    q = numpy.array([4.,2.,1.])
-    r = numpy.array([11.,9.,8.])
-    s = numpy.array([7.])
-    t = numpy.array([9.])
-    v = numpy.array([5.])
-
-    mAxis = AxisNode('m',len(m),CdLong); mAxis.setLinearData(m)
-    nAxis = AxisNode('n',len(n),CdLong); nAxis.setLinearData(n)
-    pAxis = AxisNode('p',len(p),CdLong); pAxis.setLinearData(p)
-    qAxis = AxisNode('q',len(q),CdDouble,q)
-    rAxis = AxisNode('r',len(r),CdDouble,r)
-    sAxis = AxisNode('s',len(s),CdDouble,s)
-    tAxis = AxisNode('t',len(t),CdDouble,t)
-    vAxis = AxisNode('v',len(v),CdDouble,v)
+    m=LinearDataNode(1, 2, 3)
+    n=LinearDataNode(11, 2, 3)
+    p=LinearDataNode(15, -4, 3)
+    q=numpy.array([4., 2., 1.])
+    r=numpy.array([11., 9., 8.])
+    s=numpy.array([7.])
+    t=numpy.array([9.])
+    v=numpy.array([5.])
+
+    mAxis=AxisNode('m', len(m), CdLong); mAxis.setLinearData(m)
+    nAxis=AxisNode('n', len(n), CdLong); nAxis.setLinearData(n)
+    pAxis=AxisNode('p', len(p), CdLong); pAxis.setLinearData(p)
+    qAxis=AxisNode('q', len(q), CdDouble, q)
+    rAxis=AxisNode('r', len(r), CdDouble, r)
+    sAxis=AxisNode('s', len(s), CdDouble, s)
+    tAxis=AxisNode('t', len(t), CdDouble, t)
+    vAxis=AxisNode('v', len(v), CdDouble, v)
 
     def printType(axis):
-        if axis.dataRepresent==CdLinear: print 'linear'
+        if axis.dataRepresent == CdLinear: print 'linear'
         else: print 'vector'
-        
-    def testit(a,b):
+
+    def testit(a, b):
         import copy
         x=copy.copy(a)
         print x.extend(b).getData()
@@ -1095,6 +1210,6 @@ if __name__ == '__main__':
     # testit(mAxis,rAxis)
     # testit(vAxis,nAxis)
     # testit(sAxis,rAxis)
-    
+
     # Errors:
     # testit(mAxis,nAxis)
diff --git a/Packages/cdms2/Lib/cdmsURLopener.py b/Packages/cdms2/Lib/cdmsURLopener.py
index f401bf8d5..e0b710131 100644
--- a/Packages/cdms2/Lib/cdmsURLopener.py
+++ b/Packages/cdms2/Lib/cdmsURLopener.py
@@ -3,15 +3,16 @@
 
 import urllib
 
+
 class CDMSURLopener(urllib.FancyURLopener):
 
-	# Override FancyURLopener error handling - raise an exception
-        # Can also define function http_error_DDD where DDD is the 3-digit error code,
-        # to handle specific errors.
-	def http_error_default(self, url, fp, errcode, errmsg, headers):
-		void = fp.read()
-		fp.close()
-		raise IOError, ('http error', errcode, errmsg, headers)
+    # Override FancyURLopener error handling - raise an exception
+    # Can also define function http_error_DDD where DDD is the 3-digit error code,
+    # to handle specific errors.
 
-urllib._urlopener = CDMSURLopener()
+    def http_error_default(self, url, fp, errcode, errmsg, headers):
+        void = fp.read()
+        fp.close()
+        raise IOError('http error', errcode, errmsg, headers)
 
+urllib._urlopener = CDMSURLopener()
diff --git a/Packages/cdms2/Lib/cdmsobj.py b/Packages/cdms2/Lib/cdmsobj.py
index 8fad6b3ab..30961e26b 100644
--- a/Packages/cdms2/Lib/cdmsobj.py
+++ b/Packages/cdms2/Lib/cdmsobj.py
@@ -2,7 +2,7 @@
 CDMS module-level functions and definitions
 """
 
-import cdmsNode
+from . import cdmsNode
 import cdtime
 import glob
 import os
@@ -26,7 +26,7 @@ CdArray = cdmsNode.CdArray
 
 Unlimited = 1                           # Unlimited axis designator
 
-Max32int = 2**31-1                      # Maximum 32-bit integer
+Max32int = 2**31 - 1                      # Maximum 32-bit integer
 
 # Regular expressions for each template specifier
 _Daynum = '[0-3][0-9]'
@@ -41,7 +41,7 @@ _Percent = '%'
 _Second = '[0-5][0-9]'
 _Year2 = '[0-9][0-9]'
 _Year4 = '[0-9]{4,4}'
-_Zulu = _Hour+'[z|Z]'+_Year4+_Monthnum+_Daynum
+_Zulu = _Hour + '[z|Z]' + _Year4 + _Monthnum + _Daynum
 
 # Positions for time lists
 _yr = 0
@@ -93,42 +93,67 @@ _specifierMap = {
     '%v': (_Name, 'name', 'var', None),
     '%y': (_Year2, 'year2', 'time', _yr),
     '%z': (_Zulu, 'zulu', 'time', None),
-    }
-
-_monthListUpper = ['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC']
+}
+
+_monthListUpper = [
+    'JAN',
+     'FEB',
+     'MAR',
+     'APR',
+     'MAY',
+     'JUN',
+     'JUL',
+     'AUG',
+     'SEP',
+     'OCT',
+     'NOV',
+     'DEC']
 _monthMapUpper = {
-    'JAN':1,
-    'FEB':2,
-    'MAR':3,
-    'APR':4,
-    'MAY':5,
-    'JUN':6,
-    'JUL':7,
-    'AUG':8,
-    'SEP':9,
-    'OCT':10,
-    'NOV':11,
-    'DEC':12,
-    }
-
-_monthListLower = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']
+    'JAN': 1,
+    'FEB': 2,
+    'MAR': 3,
+    'APR': 4,
+    'MAY': 5,
+    'JUN': 6,
+    'JUL': 7,
+    'AUG': 8,
+    'SEP': 9,
+    'OCT': 10,
+    'NOV': 11,
+    'DEC': 12,
+}
+
+_monthListLower = [
+    'jan',
+     'feb',
+     'mar',
+     'apr',
+     'may',
+     'jun',
+     'jul',
+     'aug',
+     'sep',
+     'oct',
+     'nov',
+     'dec']
 _monthMapLower = {
-    'jan':1,
-    'feb':2,
-    'mar':3,
-    'apr':4,
-    'may':5,
-    'jun':6,
-    'jul':7,
-    'aug':8,
-    'sep':9,
-    'oct':10,
-    'nov':11,
-    'dec':12,
-    }
-
-_specre = re.compile('(%%|%G|%H|%L|%M|%S|%Y|%d|%eG|%eH|%eL|%eM|%eS|%eY|%ed|%ef|%eg|%eh|%em|%en|%ey|%ez|%f|%g|%h|%m|%n|%v|%y|%z)')
-_filere = re.compile('[^'+os.sep+']+')
+    'jan': 1,
+    'feb': 2,
+    'mar': 3,
+    'apr': 4,
+    'may': 5,
+    'jun': 6,
+    'jul': 7,
+    'aug': 8,
+    'sep': 9,
+    'oct': 10,
+    'nov': 11,
+    'dec': 12,
+}
+
+_specre = re.compile(
+    '(%%|%G|%H|%L|%M|%S|%Y|%d|%eG|%eH|%eL|%eM|%eS|%eY|%ed|%ef|%eg|%eh|%em|%en|%ey|%ez|%f|%g|%h|%m|%n|%v|%y|%z)')
+_filere = re.compile('[^' + os.sep + ']+')
 
 _debug = 0                              # Print debug info
 
@@ -137,12 +162,14 @@ AbsoluteTemplate = "Template must be a relative path: "
 # Module-level functions
 
 # Set debug mode, to 'on' or 'off'
+
+
 def setDebugMode(mode):
     global _debug
-    if mode=='on':
-        _debug=1
-    elif mode=='off':
-        _debug=0
+    if mode == 'on':
+        _debug = 1
+    elif mode == 'off':
+        _debug = 0
 
 # Map a template to a regular expression
 # Returns (regex,dimtypes), where regex is the regular expression
@@ -151,206 +178,226 @@ def setDebugMode(mode):
 # where specStrings is the specifier associated with the dimension type,
 # or for time, the list of specifiers in the order (yr,mo,dy,hr,mi,se)
 # where each element is the specifier for that time element
+
+
 def templateToRegex(template):
 
     matchspec = {}
-    dimtypes = {'var':None,
-                'time':[None,None,None,None,None,None],
-                'etime':[None,None,None,None,None,None],
-                'level':None,
-                'elevel':None,
+    dimtypes = {'var': None,
+                'time': [None, None, None, None, None, None],
+                'etime': [None, None, None, None, None, None],
+                'level': None,
+                'elevel': None,
                 }
 
     # Map spec to regex
     # (The default arg bring matchspec and dimtypes into the local scope)
     def retspec(matchobj, matchspec=matchspec, dimtypes=dimtypes):
         spec = matchobj.group(0)
-        pat,name,dimtype,pos = _specifierMap[spec]
+        pat, name, dimtype, pos = _specifierMap[spec]
 
-        if dimtype=='var':
-            dimtypes['var']=spec
-        elif dimtype in ('time','etime'):
+        if dimtype == 'var':
+            dimtypes['var'] = spec
+        elif dimtype in ('time', 'etime'):
             if pos is not None:
-                dimtypes[dimtype][pos]=spec
-            elif name in ('zulu','ezulu'):
-                pass # Crack Zulu time
-        elif dimtype in ('level','elevel'):
-            dimtypes[dimtype]=spec
-
-        if matchspec.has_key(spec):
-            return '(?P='+name+')'
+                dimtypes[dimtype][pos] = spec
+            elif name in ('zulu', 'ezulu'):
+                pass  # Crack Zulu time
+        elif dimtype in ('level', 'elevel'):
+            dimtypes[dimtype] = spec
+
+        if spec in matchspec:
+            return '(?P=' + name + ')'
         else:
             matchspec[spec] = 1
-            return '(?P<'+name+'>'+pat+')'
-        
-    templatere = _specre.sub(retspec,template)
-    return (templatere,dimtypes)
+            return '(?P<' + name + '>' + pat + ')'
+
+    templatere = _specre.sub(retspec, template)
+    return (templatere, dimtypes)
+
 
 def retglob(matchobj):
     return '*'
 
 # Generate a component time from a matchobj and list of specs
-def generateTime(matchobj,timespecs):
+
+
+def generateTime(matchobj, timespecs):
     iyr = 0
     imo = 1
     idy = 1
     ihr = 0
     imi = 0
     ise = 0
-    yrspec,mospec,dyspec,hrspec,mispec,sespec = timespecs
+    yrspec, mospec, dyspec, hrspec, mispec, sespec = timespecs
     if yrspec:
-        pat,name,dimtype,pos = _specifierMap[yrspec]
+        pat, name, dimtype, pos = _specifierMap[yrspec]
         yrstr = matchobj.group(name)
         iyr = int(yrstr)
 
         # Map 2-digit year to [1950,2049)
-        if yrspec in ('%y','%ey'):
-            if iyr<50:
-                iyr = iyr+2000
+        if yrspec in ('%y', '%ey'):
+            if iyr < 50:
+                iyr = iyr + 2000
             else:
-                iyr = iyr+1900
+                iyr = iyr + 1900
     if mospec:
-        pat,name,dimtype,pos = _specifierMap[mospec]
+        pat, name, dimtype, pos = _specifierMap[mospec]
         mostr = matchobj.group(name)
-        if mospec in ('%G','%eG'):
+        if mospec in ('%G', '%eG'):
             imo = _monthMapUpper[mostr]
-        elif mospec in ('%g','%eg'):
+        elif mospec in ('%g', '%eg'):
             imo = _monthMapLower[mostr]
-        elif mospec in ('%m','%em','%n','%en'):
+        elif mospec in ('%m', '%em', '%n', '%en'):
             imo = int(mostr)
     if dyspec:
-        pat,name,dimtype,pos = _specifierMap[dyspec]
+        pat, name, dimtype, pos = _specifierMap[dyspec]
         dystr = matchobj.group(name)
         idy = int(dystr)
     if hrspec:
-        pat,name,dimtype,pos = _specifierMap[hrspec]
+        pat, name, dimtype, pos = _specifierMap[hrspec]
         hrstr = matchobj.group(name)
         ihr = int(hrstr)
     if mispec:
-        pat,name,dimtype,pos = _specifierMap[mispec]
+        pat, name, dimtype, pos = _specifierMap[mispec]
         mistr = matchobj.group(name)
         imi = int(mistr)
     if sespec:
-        pat,name,dimtype,pos = _specifierMap[sespec]
+        pat, name, dimtype, pos = _specifierMap[sespec]
         sestr = matchobj.group(name)
         ise = int(sestr)
-    return cdtime.comptime(iyr,imo,idy,ihr,imi,ise)   
+    return cdtime.comptime(iyr, imo, idy, ihr, imi, ise)
 
 # Find all files in 'direc' which match 'template'.
 # template is a relative path, and may contain specifiers
 # in directory names. Returns a list [(f,m),..,(f,m)] where
 # f is a matching file name, and m is a list [var,time,etime,level,elevel]
 # of matching values in f. Any or all elems of the list may be None.
-def matchingFiles(direc,template):
+
+
+def matchingFiles(direc, template):
 
     if os.path.isabs(template):
-        raise AbsoluteTemplate, template
+        raise AbsoluteTemplate(template)
 
     # Generate a glob pattern
     normTemplate = os.path.normpath(template)
-    globPattern = _filere.sub(retglob,normTemplate)
+    globPattern = _filere.sub(retglob, normTemplate)
 
     # Map the template to a regular expression
-    templatere,dimtypes = templateToRegex(template)
+    templatere, dimtypes = templateToRegex(template)
     ctre = re.compile(templatere)
 
     # Get a list of candidate files
     try:
         os.chdir(direc)
     except os.error:
-        raise IOError,'%s: %s'%(sys.exc_value,direc)
-        
+        raise IOError('%s: %s' % (sys.exc_info()[1], direc))
+
     candlist = glob.glob(globPattern)
 
-    # Look for matches 
+    # Look for matches
     matchfiles = []
     for candfile in candlist:
         matchobj = ctre.match(candfile)
 
         # Create matching values
-        if matchobj is None: continue
-        matchnames = [None,None,None,None,None]
+        if matchobj is None:
+            continue
+        matchnames = [None, None, None, None, None]
         if dimtypes['var'] is not None:
             matchnames[_var] = matchobj.group('name')
-        if dimtypes['time'] != [None,None,None,None,None,None]:
-            matchnames[_time] = generateTime(matchobj,dimtypes['time'])
-        if dimtypes['etime'] != [None,None,None,None,None,None]:
-            matchnames[_etime] = generateTime(matchobj,dimtypes['etime'])
+        if dimtypes['time'] != [None, None, None, None, None, None]:
+            matchnames[_time] = generateTime(matchobj, dimtypes['time'])
+        if dimtypes['etime'] != [None, None, None, None, None, None]:
+            matchnames[_etime] = generateTime(matchobj, dimtypes['etime'])
         if dimtypes['level'] is not None:
             matchnames[_level] = int(matchobj.group('level'))
         if dimtypes['elevel'] is not None:
             matchnames[_elevel] = int(matchobj.group('elevel'))
-        matchfiles.append((candfile,matchnames))
+        matchfiles.append((candfile, matchnames))
 
     return matchfiles
 
 # Get a string time component from a spec and a component time
-def getTimeAsString(spec,time):
 
-    if spec in ('%G','%eG'):
+
+def getTimeAsString(spec, time):
+
+    if spec in ('%G', '%eG'):
         imo = time.month
-        specstr = _monthListUpper[imo-1]
-    elif spec in ('%H','%eH'):
+        specstr = _monthListUpper[imo - 1]
+    elif spec in ('%H', '%eH'):
         specstr = str(time.hour)
-    elif spec in ('%M','%eM'):
+    elif spec in ('%M', '%eM'):
         specstr = str(time.minute)
-    elif spec in ('%S','%eS'):
+    elif spec in ('%S', '%eS'):
         specstr = str(int(time.second))
-    elif spec in ('%Y','%eY'):
+    elif spec in ('%Y', '%eY'):
         specstr = str(time.year).zfill(4)
-    elif spec in ('%d','%ed'):
+    elif spec in ('%d', '%ed'):
         specstr = str(time.day)
-    elif spec in ('%f','%ef'):
+    elif spec in ('%f', '%ef'):
         specstr = str(time.day).zfill(2)
-    elif spec in ('%g','%eg'):
+    elif spec in ('%g', '%eg'):
         imo = time.month
-        specstr = _monthListLower[imo-1]
-    elif spec in ('%h','%eh'):
+        specstr = _monthListLower[imo - 1]
+    elif spec in ('%h', '%eh'):
         specstr = str(time.hour).zfill(2)
-    elif spec in ('%m','%em'):
+    elif spec in ('%m', '%em'):
         specstr = str(time.month)
-    elif spec in ('%n','%en'):
+    elif spec in ('%n', '%en'):
         specstr = str(time.month).zfill(2)
-    elif spec in ('%y','%ey'):
-        specstr = str(time.year%100).zfill(2)
-    elif spec in ('%z','%ez'):
-        specstr = getTimeAsString('%H',time)+'Z'+getTimeAsString('%Y',time)+getTimeAsString('%n',time)+getTimeAsString('%d',time)
+    elif spec in ('%y', '%ey'):
+        specstr = str(time.year % 100).zfill(2)
+    elif spec in ('%z', '%ez'):
+        specstr = getTimeAsString(
+            '%H',
+            time) + 'Z' + getTimeAsString(
+                '%Y',
+                time) + getTimeAsString(
+            '%n',
+             time) + getTimeAsString(
+                '%d',
+                 time)
     return specstr
 
 # Generate a file path, given a template and matchname list.
 # matchnames is a list [varname,time,etime,level,elevel], where
 # any or all elems may be None.  If matchnames be a longer list,
 # it is not an error but the additional elements are ignored.
-def getPathFromTemplate(template,matchnames):
+
+
+def getPathFromTemplate(template, matchnames):
 
     # Map spec to value string
     # (Default arg brings matchnames into the local scope)
     def retpath(matchobj, matchnames=matchnames):
         spec = matchobj.group(0)
-        pat,name,dimtype,pos = _specifierMap[spec]
-        var,time,etime,level,elevel = matchnames[0:5]
+        pat, name, dimtype, pos = _specifierMap[spec]
+        var, time, etime, level, elevel = matchnames[0:5]
 
-        if dimtype=='var':
+        if dimtype == 'var':
             if var is None:
                 specstr = spec
             else:
                 specstr = var
-        elif dimtype=='time':
+        elif dimtype == 'time':
             if time is None:
                 specstr = spec
             else:
-                specstr = getTimeAsString(spec,time)
-        elif dimtype=='etime':
+                specstr = getTimeAsString(spec, time)
+        elif dimtype == 'etime':
             if etime is None:
                 specstr = spec
             else:
-                specstr = getTimeAsString(spec,etime)
-        elif dimtype=='level':
+                specstr = getTimeAsString(spec, etime)
+        elif dimtype == 'level':
             if level is None:
                 specstr = spec
             else:
                 specstr = str(level)
-        elif dimtype=='elevel':
+        elif dimtype == 'elevel':
             if elevel is None:
                 specstr = spec
             else:
@@ -358,7 +405,7 @@ def getPathFromTemplate(template,matchnames):
 
         return specstr
 
-    path = _specre.sub(retpath,template)
+    path = _specre.sub(retpath, template)
     return path
 
 # Search an object or list of objects for a string attribute which
@@ -369,6 +416,7 @@ def getPathFromTemplate(template,matchnames):
 # 'rectGrid','xlink', or None.  If tag is None, all object classes are
 # searched.  If attribute is None, all attributes are searched.
 
+
 def searchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
         tag = tag.lower()
@@ -378,7 +426,7 @@ def searchPattern(objlist, pattern, attribute=None, tag=None):
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.searchPattern(regexp,attribute,tag)
+        returnlist = returnlist + obj.searchPattern(regexp, attribute, tag)
 
     return returnlist
 
@@ -390,6 +438,7 @@ def searchPattern(objlist, pattern, attribute=None, tag=None):
 # 'rectGrid','xlink', or None.  If tag is None, all object classes are
 # searched.  If attribute is None, all attributes are searched.
 
+
 def matchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
         tag = tag.lower()
@@ -399,7 +448,7 @@ def matchPattern(objlist, pattern, attribute=None, tag=None):
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.matchPattern(regexp,attribute,tag)
+        returnlist = returnlist + obj.matchPattern(regexp, attribute, tag)
 
     return returnlist
 
@@ -418,7 +467,9 @@ def matchPattern(objlist, pattern, attribute=None, tag=None):
 #
 #   lambda obj: obj.partition_length > 1000
 #
-# is sufficient, it is not necessary to test for the existence of the attribute.
+# is sufficient, it is not necessary to test for the existence of the
+# attribute.
+
 
 def searchPredicate(objlist, predicate, tag=None):
     if tag is not None:
@@ -428,7 +479,7 @@ def searchPredicate(objlist, predicate, tag=None):
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.searchPredicate(predicate,tag)
+        returnlist = returnlist + obj.searchPredicate(predicate, tag)
 
     return returnlist
 
@@ -436,74 +487,80 @@ def searchPredicate(objlist, predicate, tag=None):
 # Classes
 
 # Generic CDMS object has a tree node, attributes
+
+
 class CdmsObj (object):
-##     def __setattr__(self,name,value):
-##         object.__setattr__(self,name,value)
-##         if not name in self.__cdms_internals__ and not name[0]=='_':
-##             self.attributes[name]=value
-## ##             if name == 'shape' :
-## ##                 print self.__class__,name,value
+# def __setattr__(self,name,value):
+# object.__setattr__(self,name,value)
+# if not name in self.__cdms_internals__ and not name[0]=='_':
+# self.attributes[name]=value
+# if name == 'shape' :
+# print self.__class__,name,value
 
     def _listatts(self):
-        dic={}
-        for nm,val in self.__dict__.items():
-            if (nm[0]!='_' and not nm in self.__cdms_internals__) or nm in ['_FillValue']:
-                dic[nm]=val
+        dic = {}
+        for nm, val in self.__dict__.items():
+            if (nm[0] != '_' and not nm in self.__cdms_internals__) or nm in ['_FillValue']:
+                dic[nm] = val
             if nm == '_units':
-                dic['units']=val
+                dic['units'] = val
         return dic
-    def _setatts(self,value):
+
+    def _setatts(self, value):
         return
 
-    attributes = property(_listatts,_setatts)
-    
-        
-    def __init__(self, node = None):
-        if not hasattr(self,'___cdms_internals__'):
-            self.__dict__['___cdms_internals__']=[
-                '__cdms_internals__','___cdms_internals__',
-                '_node_','_obj_',
-                '_numericType_','_grid_','_bounds_',
-                'parent','attributes','shape','autoApiInfo']
-        self.attributes={}
+    attributes = property(_listatts, _setatts)
+
+    def __init__(self, node=None):
+        if not hasattr(self, '___cdms_internals__'):
+            self.__dict__['___cdms_internals__'] = [
+                '__cdms_internals__', '___cdms_internals__',
+                '_node_', '_obj_',
+                '_numericType_', '_grid_', '_bounds_',
+                'parent', 'attributes', 'shape', 'autoApiInfo']
+        self.attributes = {}
         self._node_ = node
         if node is not None:
-            # Build an attribute dictionary from the node, 
+            # Build an attribute dictionary from the node,
             # CDML datatype constraints
 
-            if hasattr(node,'datatype'):
+            if hasattr(node, 'datatype'):
                 parenttype = node.datatype
             else:
                 parenttype = None
             atts = node.getExternalDict()
             adict = self.__dict__
             for attname in atts.keys():
-                (attval,datatype)=atts[attname] # (XML value, datatype)
+                (attval, datatype) = atts[attname]  # (XML value, datatype)
                 constraint = node.extra.get(attname)
                 if constraint is not None:
-                    (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
-                    if reqtype==CdFromObject:
+                    (scaletype,
+                     reqtype) = constraint  # (CdScalar|CdArray, required type)
+                    if reqtype == CdFromObject:
                         reqtype = parenttype
-                    if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
-                        if reqtype in (CdFloat,CdDouble):
+                    if reqtype != datatype and datatype == CdString and scaletype == CdScalar:
+                        if reqtype in (CdFloat, CdDouble):
                             try:
                                 attval = float(attval)
                             except:
-                                raise RuntimeError,"%s=%s must be a float"%(attname,attval)
-                        elif reqtype in (CdShort,CdInt,CdLong,CdInt64):
+                                raise RuntimeError(
+                                    "%s=%s must be a float" %
+                                    (attname, attval))
+                        elif reqtype in (CdShort, CdInt, CdLong, CdInt64):
                             try:
                                 attval = int(attval)
                             except:
-                                raise RuntimeError,"%s=%s must be an integer"%(attname,attval)
+                                raise RuntimeError(
+                                    "%s=%s must be an integer" %
+                                    (attname, attval))
                 adict[attname] = attval
                 self.attributes[attname] = attval
 
-
     def searchone(self, pattern, attname):
         """Return true if the attribute with name attname is a string
         attribute which contains the compiled regular expression pattern, or
         if attname is None and pattern matches at least one string
-        attribute. Return false if the attribute is not found or is not 
+        attribute. Return false if the attribute is not found or is not
         a string.
         :::
         Input:::
@@ -516,12 +573,12 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if isinstance(attval,basestring) and pattern.search(attval) is not None:
+                if isinstance(attval, basestring) and pattern.search(attval) is not None:
                     return 1
             return 0
-        elif self.attributes.has_key(attname):
+        elif attname in self.attributes:
             attval = self.attributes[attname]
-            return isinstance(attval,basestring) and pattern.search(attval) is not None
+            return isinstance(attval, basestring) and pattern.search(attval) is not None
         else:
             return 0
 
@@ -546,18 +603,19 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if isinstance(attval,basestring) and pattern.match(attval) is not None:
+                if isinstance(attval, basestring) and pattern.match(attval) is not None:
                     return 1
             return 0
-        elif self.attributes.has_key(attname):
+        elif attname in self.attributes:
             attval = self.attributes[attname]
-            return isinstance(attval,basestring) and pattern.match(attval) is not None
+            return isinstance(attval, basestring) and pattern.match(attval) is not None
         else:
             return 0
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
-    # search all string attributes. If tag is not None, it must match the internal node tag.
-    def searchPattern(self,pattern,attribute,tag):
+    # search all string attributes. If tag is not None, it must match the
+    # internal node tag.
+    def searchPattern(self, pattern, attribute, tag):
         """
         Search for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -567,11 +625,11 @@ class CdmsObj (object):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
-        if tag is None or tag.lower()==self._node_.tag:
-            if self.searchone(pattern,attribute):
+        if tag is None or tag.lower() == self._node_.tag:
+            if self.searchone(pattern, attribute):
                 return [self]
             else:
                 return []
@@ -579,8 +637,9 @@ class CdmsObj (object):
             return []
 
     # Match a pattern in a string-valued attribute. If attribute is None,
-    # search all string attributes. If tag is not None, it must match the internal node tag.
-    def matchPattern(self,pattern,attribute,tag):
+    # search all string attributes. If tag is not None, it must match the
+    # internal node tag.
+    def matchPattern(self, pattern, attribute, tag):
         """
         Match for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -590,11 +649,11 @@ class CdmsObj (object):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
-        if tag is None or tag.lower()==self._node_.tag:
-            if self.matchone(pattern,attribute):
+        if tag is None or tag.lower() == self._node_.tag:
+            if self.matchone(pattern, attribute):
                 return [self]
             else:
                 return []
@@ -604,7 +663,7 @@ class CdmsObj (object):
     # Apply a truth-valued predicate. Return a list containing a single instance: [self]
     # if the predicate is true and either tag is None or matches the object node tag.
     # If the predicate returns false, return an empty list
-    def searchPredicate(self,predicate,tag):
+    def searchPredicate(self, predicate, tag):
         """
         Apply a truth-valued predicate. Return a list containing a single instance: [self] if the predicate is true and either tag is None or matches the object node tag. If the predicate returns false, return an empty list
         :::
@@ -613,12 +672,12 @@ class CdmsObj (object):
         tag :: (str/None) (1) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
-        if tag is None or tag.lower()==self._node_.tag:
+        if tag is None or tag.lower() == self._node_.tag:
             try:
-                if apply(predicate,(self,))==1:
+                if predicate(*(self,)) == 1:
                     result = [self]
             except:
                 result = []
@@ -626,7 +685,7 @@ class CdmsObj (object):
             result = []
         return result
 
-    def dump(self,path=None,format=1):
+    def dump(self, path=None, format=1):
         """ dump(self,path=None,format=1)
         Dump an XML representation of this object to a file.
         'path' is the result file name, None for standard output.
@@ -641,24 +700,21 @@ class CdmsObj (object):
         :::
         """
         if self._node_ is None:
-            raise CDMSError, "No tree node found"
-        self._node_.dump(path,format)
+            raise CDMSError("No tree node found")
+        self._node_.dump(path, format)
 
     def _getinternals(self):
         return self.___cdms_internals__
-    def _setinternals(self,value):
+
+    def _setinternals(self, value):
         self.___cdms_internals__ = value
-    __cdms_internals__ = property(_getinternals,_setinternals)
-#internattr.add_internal_attribute(CdmsObj)
+    __cdms_internals__ = property(_getinternals, _setinternals)
+# internattr.add_internal_attribute(CdmsObj)
 
 if __name__ == '__main__':
     x = CdmsObj(None)
     x.someatt = 1
     assert x.attributes['someatt'] == x.someatt
-    assert not x.attributes.has_key('_node')
+    assert '_node' not in x.attributes
     # need tests for the search routines...
     print "Test passed."
-
-
-
-
diff --git a/Packages/cdms2/Lib/cdurllib.py b/Packages/cdms2/Lib/cdurllib.py
index 6ae1848b3..eefccb114 100644
--- a/Packages/cdms2/Lib/cdurllib.py
+++ b/Packages/cdms2/Lib/cdurllib.py
@@ -1,13 +1,18 @@
 """Customized URLopener"""
 
-import urllib, getpass, socket, string, sys
+import urllib
+import getpass
+import socket
+import string
+import sys
 
 MAXFTPCACHE = 10        # Trim the ftp cache beyond this size
 
+
 class CDURLopener(urllib.URLopener):
 
     def __init__(self, proxies=None):
-        urllib.URLopener.__init__(self,proxies)
+        urllib.URLopener.__init__(self, proxies)
         self._userObject = None
 
     # Attach an object to be returned with callbacks
@@ -17,12 +22,15 @@ class CDURLopener(urllib.URLopener):
     # Use FTP protocol
     def open_ftp(self, url):
         host, path = urllib.splithost(url)
-        if not host: raise IOError, ('ftp error', 'no host given')
+        if not host:
+            raise IOError, ('ftp error', 'no host given')
         host, port = urllib.splitport(host)
         user, host = urllib.splituser(host)
         # if user: user, passwd = splitpasswd(user)
-        if user: passwd = getpass.getpass()
-        else: passwd = None
+        if user:
+            passwd = getpass.getpass()
+        else:
+            passwd = None
         host = urllib.unquote(host)
         user = urllib.unquote(user or '')
         passwd = urllib.unquote(passwd or '')
@@ -36,7 +44,8 @@ class CDURLopener(urllib.URLopener):
         path = urllib.unquote(path)
         dirs = string.splitfields(path, '/')
         dirs, file = dirs[:-1], dirs[-1]
-        if dirs and not dirs[0]: dirs = dirs[1:]
+        if dirs and not dirs[0]:
+            dirs = dirs[1:]
         key = (user, host, port, string.joinfields(dirs, '/'))
         # XXX thread unsafe!
         if len(self.ftpcache) > MAXFTPCACHE:
@@ -47,12 +56,14 @@ class CDURLopener(urllib.URLopener):
                     del self.ftpcache[k]
                     v.close()
         try:
-            if not self.ftpcache.has_key(key):
-                print 'Creating ftpwrapper: ',user,host,port,dirs
+            if key not in self.ftpcache:
+                print 'Creating ftpwrapper: ', user, host, port, dirs
                 self.ftpcache[key] = \
                     urllib.ftpwrapper(user, passwd, host, port, dirs)
-            if not file: type = 'D'
-            else: type = 'I'
+            if not file:
+                type = 'D'
+            else:
+                type = 'I'
             for attr in attrs:
                 attr, value = urllib.splitvalue(attr)
                 if string.lower(attr) == 'type' and \
@@ -60,18 +71,19 @@ class CDURLopener(urllib.URLopener):
                     type = string.upper(value)
             (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
             if retrlen is not None and retrlen >= 0:
-                import mimetools, StringIO
+                import mimetools
+                import StringIO
                 headers = mimetools.Message(StringIO.StringIO(
                     'Content-Length: %d\n' % retrlen))
             else:
                 headers = noheaders()
             return urllib.addinfourl(fp, headers, "ftp:" + url)
-        except urllib.ftperrors(), msg:
+        except urllib.ftperrors() as msg:
             raise IOError, ('ftp error', msg), sys.exc_info()[2]
 
     def retrieve(self, url, filename=None, reporthook=None, blocksize=262144):
         url = urllib.unwrap(url)
-        if self.tempcache and self.tempcache.has_key(url):
+        if self.tempcache and url in self.tempcache:
             return self.tempcache[url]
         type, url1 = urllib.splittype(url)
         if not filename and (not type or type == 'file'):
@@ -80,7 +92,7 @@ class CDURLopener(urllib.URLopener):
                 hdrs = fp.info()
                 del fp
                 return url2pathname(urllib.splithost(url1)[1]), hdrs
-            except IOError, msg:
+            except IOError as msg:
                 pass
         fp = self.open(url)
         headers = fp.info()
@@ -101,28 +113,28 @@ class CDURLopener(urllib.URLopener):
         size = -1
         blocknum = 1
         if reporthook:
-            if headers.has_key("content-length"):
+            if "content-length" in headers:
                 size = int(headers["Content-Length"])
             stayopen = reporthook(0, bs, size, self._userObject)
-            if stayopen==0:
+            if stayopen == 0:
                 raise KeyboardInterrupt
         bytesread = 0
         block = fp.read(bs)
         if reporthook:
             stayopen = reporthook(1, bs, size, self._userObject)
-            if stayopen==0:
+            if stayopen == 0:
                 raise KeyboardInterrupt
         while block:
             tfp.write(block)
             bytesread = bytesread + len(block)
-##             print blocknum, bytesread, size,
-##             if blocknum*blocksize!=bytesread:
-##                 print ' (*)'
-##             else:
-##                 print
+# print blocknum, bytesread, size,
+# if blocknum*blocksize!=bytesread:
+# print ' (*)'
+# else:
+# print
             if block and reporthook:
                 stayopen = reporthook(blocknum, bs, size, self._userObject)
-                if stayopen==0:
+                if stayopen == 0:
                     raise KeyboardInterrupt
             blocknum = blocknum + 1
             block = fp.read(bs)
@@ -132,23 +144,25 @@ class CDURLopener(urllib.URLopener):
         del tfp
         return result
 
+
 def sampleReportHook(blocknum, blocksize, size, userObj):
-    sizekb = size/1024
-    percent = min(100,int(100.0*float(blocknum*blocksize)/float(size)))
-    print "Read: %3d%% of %dK"%(percent,sizekb)
+    sizekb = size / 1024
+    percent = min(100, int(100.0 * float(blocknum * blocksize) / float(size)))
+    print "Read: %3d%% of %dK" % (percent, sizekb)
     return 1
 
 if __name__ == '__main__':
 
     import sys
-    if len(sys.argv)!=4:
+    if len(sys.argv) != 4:
         print 'Usage: cdurllib.py URL filename blocksize'
         sys.exit(1)
 
     url = sys.argv[1]
     filename = sys.argv[2]
     blocksize = int(sys.argv[3])
-    
+
     urlopener = CDURLopener()
-    fname, headers = urlopener.retrieve(url, filename, sampleReportHook, blocksize)
+    fname, headers = urlopener.retrieve(
+        url, filename, sampleReportHook, blocksize)
     print fname, 'written'
diff --git a/Packages/cdms2/Lib/cdurlparse.py b/Packages/cdms2/Lib/cdurlparse.py
index e743ebbc8..d88dd4087 100644
--- a/Packages/cdms2/Lib/cdurlparse.py
+++ b/Packages/cdms2/Lib/cdurlparse.py
@@ -9,25 +9,26 @@ from string import joinfields, splitfields, find, rfind
 
 # A classification of schemes ('' means apply by default)
 uses_relative = ['ftp', 'http', 'ldap', 'gopher', 'nntp', 'wais', 'file',
-		 'https', 'shttp',
-		 'prospero', '']
+                 'https', 'shttp',
+                 'prospero', '']
 uses_netloc = ['ftp', 'http', 'ldap', 'gopher', 'nntp', 'telnet', 'wais',
-	       'file',
-	       'https', 'shttp', 'snews',
-	       'prospero', '']
+               'file',
+               'https', 'shttp', 'snews',
+               'prospero', '']
 non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', 'telnet', 'wais',
-		    'snews',
-		    ]
+                    'snews',
+                    ]
 uses_params = ['ftp', 'hdl', 'prospero', 'http', 'ldap',
-	       'https', 'shttp',
-	       '']
+               'https', 'shttp',
+               '']
 uses_query = ['http', 'ldap', 'wais',
-	      'https', 'shttp',
-	      'gopher',
-	      '']
-uses_fragment = ['ftp', 'hdl', 'http', 'ldap', 'gopher', 'news', 'nntp', 'wais',
-		 'https', 'shttp', 'snews',
-		 'file', 'prospero', '']
+              'https', 'shttp',
+              'gopher',
+              '']
+uses_fragment = [
+    'ftp', 'hdl', 'http', 'ldap', 'gopher', 'news', 'nntp', 'wais',
+                 'https', 'shttp', 'snews',
+                 'file', 'prospero', '']
 
 # Characters valid in scheme names
 scheme_chars = string.letters + string.digits + '+-.'
@@ -35,6 +36,7 @@ scheme_chars = string.letters + string.digits + '+-.'
 MAX_CACHE_SIZE = 20
 _parse_cache = {}
 
+
 def clear_cache():
     """Clear the parse cache."""
     global _parse_cache
@@ -46,134 +48,141 @@ def clear_cache():
 # Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
 # Note that we don't break the components up in smaller bits
 # (e.g. netloc is a single string) and we don't expand % escapes.
-def urlparse(url, scheme = '', allow_fragments = 1):
-	key = url, scheme, allow_fragments
-	cached = _parse_cache.get(key, None)
-	if cached:
-		return cached
-	if len(_parse_cache) >= MAX_CACHE_SIZE:	# avoid runaway growth
-	    clear_cache()
-	find = string.find
-	netloc = path = params = query = fragment = ''
-	i = find(url, ':')
-	if i > 0:
-		if url[:i] in ['http','ldap']: # optimize the common case
-			scheme = string.lower(url[:i])
-			url = url[i+1:]
-			if url[:2] == '//':
-				i = find(url, '/', 2)
-				if i < 0:
-					i = len(url)
-				netloc = url[2:i]
-				url = url[i:]
-			if allow_fragments:
-				i = string.rfind(url, '#')
-				if i >= 0:
-					fragment = url[i+1:]
-					url = url[:i]
-			i = find(url, '?')
-			if i >= 0:
-				query = url[i+1:]
-				url = url[:i]
-			i = find(url, ';')
-			if i >= 0:
-				params = url[i+1:]
-				url = url[:i]
-			tuple = scheme, netloc, url, params, query, fragment
-			_parse_cache[key] = tuple
-			return tuple
-		for c in url[:i]:
-			if c not in scheme_chars:
-				break
-		else:
-			scheme, url = string.lower(url[:i]), url[i+1:]
-	if scheme in uses_netloc:
-		if url[:2] == '//':
-			i = find(url, '/', 2)
-			if i < 0:
-				i = len(url)
-			netloc, url = url[2:i], url[i:]
-	if allow_fragments and scheme in uses_fragment:
-		i = string.rfind(url, '#')
-		if i >= 0:
-			url, fragment = url[:i], url[i+1:]
-	if scheme in uses_query:
-		i = find(url, '?')
-		if i >= 0:
-			url, query = url[:i], url[i+1:]
-	if scheme in uses_params:
-		i = find(url, ';')
-		if i >= 0:
-			url, params = url[:i], url[i+1:]
-	tuple = scheme, netloc, url, params, query, fragment
-	_parse_cache[key] = tuple
-	return tuple
+def urlparse(url, scheme='', allow_fragments=1):
+    key = url, scheme, allow_fragments
+    cached = _parse_cache.get(key, None)
+    if cached:
+        return cached
+    if len(_parse_cache) >= MAX_CACHE_SIZE:  # avoid runaway growth
+        clear_cache()
+    find = string.find
+    netloc = path = params = query = fragment = ''
+    i = find(url, ':')
+    if i > 0:
+        if url[:i] in ['http', 'ldap']:  # optimize the common case
+            scheme = string.lower(url[:i])
+            url = url[i + 1:]
+            if url[:2] == '//':
+                i = find(url, '/', 2)
+                if i < 0:
+                    i = len(url)
+                netloc = url[2:i]
+                url = url[i:]
+            if allow_fragments:
+                i = string.rfind(url, '#')
+                if i >= 0:
+                    fragment = url[i + 1:]
+                    url = url[:i]
+            i = find(url, '?')
+            if i >= 0:
+                query = url[i + 1:]
+                url = url[:i]
+            i = find(url, ';')
+            if i >= 0:
+                params = url[i + 1:]
+                url = url[:i]
+            tuple = scheme, netloc, url, params, query, fragment
+            _parse_cache[key] = tuple
+            return tuple
+        for c in url[:i]:
+            if c not in scheme_chars:
+                break
+        else:
+            scheme, url = string.lower(url[:i]), url[i + 1:]
+    if scheme in uses_netloc:
+        if url[:2] == '//':
+            i = find(url, '/', 2)
+            if i < 0:
+                i = len(url)
+            netloc, url = url[2:i], url[i:]
+    if allow_fragments and scheme in uses_fragment:
+        i = string.rfind(url, '#')
+        if i >= 0:
+            url, fragment = url[:i], url[i + 1:]
+    if scheme in uses_query:
+        i = find(url, '?')
+        if i >= 0:
+            url, query = url[:i], url[i + 1:]
+    if scheme in uses_params:
+        i = find(url, ';')
+        if i >= 0:
+            url, params = url[:i], url[i + 1:]
+    tuple = scheme, netloc, url, params, query, fragment
+    _parse_cache[key] = tuple
+    return tuple
 
 # Put a parsed URL back together again.  This may result in a slightly
 # different, but equivalent URL, if the URL that was parsed originally
 # had redundant delimiters, e.g. a ? with an empty query (the draft
 # states that these are equivalent).
-def urlunparse((scheme, netloc, url, params, query, fragment)):
-	if netloc or (scheme in uses_netloc and url[:2] == '//'):
-		if url[:1] != '/': url = '/' + url
-		url = '//' + (netloc or '') + url
-	if scheme:
-		url = scheme + ':' + url
-	if params:
-		url = url + ';' + params
-	if query:
-		url = url + '?' + query
-	if fragment:
-		url = url + '#' + fragment
-	return url
+
+
+def urlunparse(xxx_todo_changeme):
+    (scheme, netloc, url, params, query, fragment) = xxx_todo_changeme
+    if netloc or (scheme in uses_netloc and url[:2] == '//'):
+        if url[:1] != '/':
+            url = '/' + url
+        url = '//' + (netloc or '') + url
+    if scheme:
+        url = scheme + ':' + url
+    if params:
+        url = url + ';' + params
+    if query:
+        url = url + '?' + query
+    if fragment:
+        url = url + '#' + fragment
+    return url
 
 # Join a base URL and a possibly relative URL to form an absolute
 # interpretation of the latter.
-def urljoin(base, url, allow_fragments = 1):
-	if not base:
-		return url
-	bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
-		urlparse(base, '', allow_fragments)
-	scheme, netloc, path, params, query, fragment = \
-		urlparse(url, bscheme, allow_fragments)
-	if scheme != bscheme or scheme not in uses_relative:
-		return urlunparse((scheme, netloc, path,
-				   params, query, fragment))
-	if scheme in uses_netloc:
-		if netloc:
-			return urlunparse((scheme, netloc, path,
-					   params, query, fragment))
-		netloc = bnetloc
-	if path[:1] == '/':
-		return urlunparse((scheme, netloc, path,
-				   params, query, fragment))
-	if not path:
-		return urlunparse((scheme, netloc, bpath,
-				   params, query or bquery, fragment))
-	i = rfind(bpath, '/')
-	if i >= 0:
-		path = bpath[:i] + '/' + path
-	segments = splitfields(path, '/')
-	if segments[-1] == '.':
-		segments[-1] = ''
-	while '.' in segments:
-		segments.remove('.')
-	while 1:
-		i = 1
-		n = len(segments) - 1
-		while i < n:
-			if segments[i] == '..' and segments[i-1]:
-				del segments[i-1:i+1]
-				break
-			i = i+1
-		else:
-			break
-	if len(segments) == 2 and segments[1] == '..' and segments[0] == '':
-		segments[-1] = ''
-	elif len(segments) >= 2 and segments[-1] == '..':
-		segments[-2:] = ['']
-	return urlunparse((scheme, netloc, joinfields(segments, '/'),
-			   params, query, fragment))
+
+
+def urljoin(base, url, allow_fragments=1):
+    if not base:
+        return url
+    bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
+        urlparse(base, '', allow_fragments)
+    scheme, netloc, path, params, query, fragment = \
+        urlparse(url, bscheme, allow_fragments)
+    if scheme != bscheme or scheme not in uses_relative:
+        return urlunparse((scheme, netloc, path,
+                           params, query, fragment))
+    if scheme in uses_netloc:
+        if netloc:
+            return urlunparse((scheme, netloc, path,
+                               params, query, fragment))
+        netloc = bnetloc
+    if path[:1] == '/':
+        return urlunparse((scheme, netloc, path,
+                           params, query, fragment))
+    if not path:
+        return urlunparse((scheme, netloc, bpath,
+                           params, query or bquery, fragment))
+    i = rfind(bpath, '/')
+    if i >= 0:
+        path = bpath[:i] + '/' + path
+    segments = splitfields(path, '/')
+    if segments[-1] == '.':
+        segments[-1] = ''
+    while '.' in segments:
+        segments.remove('.')
+    while True:
+        i = 1
+        n = len(segments) - 1
+        while i < n:
+            if segments[i] == '..' and segments[i - 1]:
+                del segments[i - 1:i + 1]
+                break
+            i = i + 1
+        else:
+            break
+    if len(segments) == 2 and segments[1] == '..' and segments[0] == '':
+        segments[-1] = ''
+    elif len(segments) >= 2 and segments[-1] == '..':
+        segments[-2:] = ['']
+    return urlunparse((scheme, netloc, joinfields(segments, '/'),
+                       params, query, fragment))
+
 
 def urldefrag(url):
     """Removes any existing fragment from URL.
@@ -222,35 +231,37 @@ test_input = """
 """
 # XXX The result for //g is actually http://g/; is this a problem?
 
+
 def test():
-	import sys
-	base = ''
-	if sys.argv[1:]:
-		fn = sys.argv[1]
-		if fn == '-':
-			fp = sys.stdin
-		else:
-			fp = open(fn)
-	else:
-		import StringIO
-		fp = StringIO.StringIO(test_input)
-	while 1:
-		line = fp.readline()
-		if not line: break
-		words = string.split(line)
-		if not words:
-			continue
-		url = words[0]
-		parts = urlparse(url)
-		print '%-10s : %s' % (url, parts)
-		abs = urljoin(base, url)
-		if not base:
-			base = abs
-		wrapped = '<URL:%s>' % abs
-		print '%-10s = %s' % (url, wrapped)
-		if len(words) == 3 and words[1] == '=':
-			if wrapped != words[2]:
-				print 'EXPECTED', words[2], '!!!!!!!!!!'
+    import sys
+    base = ''
+    if sys.argv[1:]:
+        fn = sys.argv[1]
+        if fn == '-':
+            fp = sys.stdin
+        else:
+            fp = open(fn)
+    else:
+        import StringIO
+        fp = StringIO.StringIO(test_input)
+    while True:
+        line = fp.readline()
+        if not line:
+            break
+        words = string.split(line)
+        if not words:
+            continue
+        url = words[0]
+        parts = urlparse(url)
+        print '%-10s : %s' % (url, parts)
+        abs = urljoin(base, url)
+        if not base:
+            base = abs
+        wrapped = '<URL:%s>' % abs
+        print '%-10s = %s' % (url, wrapped)
+        if len(words) == 3 and words[1] == '=':
+            if wrapped != words[2]:
+                print 'EXPECTED', words[2], '!!!!!!!!!!'
 
 if __name__ == '__main__':
-	test()
+    test()
diff --git a/Packages/cdms2/Lib/cdxmllib.py b/Packages/cdms2/Lib/cdxmllib.py
index 7c1aff308..70b91d9ba 100644
--- a/Packages/cdms2/Lib/cdxmllib.py
+++ b/Packages/cdms2/Lib/cdxmllib.py
@@ -11,6 +11,7 @@ import re
 
 version = '0.3'
 
+
 class Error(RuntimeError):
     pass
 
@@ -20,7 +21,7 @@ _S = '[ \t\r\n]+'                       # white space
 _opS = '[ \t\r\n]*'                     # optional white space
 _Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*'    # valid XML name
 _QStr = "(?:'[^']*'|\"[^\"]*\")"        # quoted XML string
-illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
+illegal = re.compile('[^\t\r\n -\176\240-\377]')  # illegal chars in content
 interesting = re.compile('[]&<]')
 
 amp = re.compile('&')
@@ -33,37 +34,37 @@ newline = re.compile('\n')
 attrfind = re.compile(
     _S + '(?P<name>' + _Name + ')'
     '(' + _opS + '=' + _opS +
-    '(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
+    '(?P<value>' + _QStr + '|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
 starttagopen = re.compile('<' + _Name)
 starttagend = re.compile(_opS + '(?P<slash>/?)>')
-starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
-                      '(?P<attrs>(?:'+attrfind.pattern+')*)'+
-                      starttagend.pattern)
+starttagmatch = re.compile('<(?P<tagname>' + _Name + ')'
+                           '(?P<attrs>(?:' + attrfind.pattern + ')*)' +
+                           starttagend.pattern)
 endtagopen = re.compile('</')
 endbracket = re.compile(_opS + '>')
-endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
+endbracketfind = re.compile('(?:[^>\'"]|' + _QStr + ')*>')
 tagfind = re.compile(_Name)
 cdataopen = re.compile(r'<!\[CDATA\[')
 cdataclose = re.compile(r'\]\]>')
 # this matches one of the following:
 # SYSTEM SystemLiteral
 # PUBLIC PubidLiteral SystemLiteral
-_SystemLiteral = '(?P<%s>'+_QStr+')'
+_SystemLiteral = '(?P<%s>' + _QStr + ')'
 _PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
-                        "'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
+    "'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
 _ExternalId = '(?:SYSTEM|' \
-                 'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
-              ')'+_S+_SystemLiteral%'syslit'
-doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
-                     '(?:'+_S+_ExternalId+')?'+_opS)
-xmldecl = re.compile('<\?xml'+_S+
-                     'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
-                     '(?:'+_S+'encoding'+_opS+'='+_opS+
-                        "(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
-                        '"[A-Za-z][-A-Za-z0-9._]*"))?'
-                     '(?:'+_S+'standalone'+_opS+'='+_opS+
-                        '(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
-                     _opS+'\?>')
+    'PUBLIC' + _S + _PublicLiteral % 'pubid' + \
+              ')' + _S + _SystemLiteral % 'syslit'
+doctype = re.compile('<!DOCTYPE' + _S + '(?P<name>' + _Name + ')'
+                     '(?:' + _S + _ExternalId + ')?' + _opS)
+xmldecl = re.compile('<\?xml' + _S +
+                     'version' + _opS + '=' + _opS + '(?P<version>' + _QStr + ')' +
+                     '(?:' + _S + 'encoding' + _opS + '=' + _opS +
+                     "(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
+                     '"[A-Za-z][-A-Za-z0-9._]*"))?'
+                     '(?:' + _S + 'standalone' + _opS + '=' + _opS +
+                     '(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?' +
+                     _opS + '\?>')
 procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
 procclose = re.compile(_opS + r'\?>')
 commentopen = re.compile('<!--')
@@ -74,10 +75,10 @@ attrtrans = string.maketrans(' \r\n\t', '    ')
 # definitions for XML namespaces
 _NCName = '[a-zA-Z_][-a-zA-Z0-9._]*'    # XML Name, minus the ":"
 ncname = re.compile(_NCName + '$')
-qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
+qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?'  # optional prefix
                    '(?P<local>' + _NCName + ')$')
 
-xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
+xmlns = re.compile('xmlns(?::(?P<ncname>' + _NCName + '))?$')
 
 # XML parser base class -- find tags and call handler functions.
 # Usage: p = XMLParser(); p.feed(data); ...; p.close().
@@ -87,6 +88,7 @@ xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
 # parser by calling self.handle_data() with some data as argument (the
 # data may be split up in arbitrary chunks).
 
+
 class XMLParser:
     attributes = {}                     # default, to be overridden
     elements = {}                       # default, to be overridden
@@ -102,15 +104,18 @@ class XMLParser:
     def __init__(self, **kw):
         self.__fixed = 0
         if 'accept_unquoted_attributes' in kw:
-            self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
+            self.__accept_unquoted_attributes = kw[
+                'accept_unquoted_attributes']
         if 'accept_missing_endtag_name' in kw:
-            self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
+            self.__accept_missing_endtag_name = kw[
+                'accept_missing_endtag_name']
         if 'map_case' in kw:
             self.__map_case = kw['map_case']
         if 'accept_utf8' in kw:
             self.__accept_utf8 = kw['accept_utf8']
         if 'translate_attribute_references' in kw:
-            self.__translate_attribute_references = kw['translate_attribute_references']
+            self.__translate_attribute_references = kw[
+                'translate_attribute_references']
         self.reset()
 
     def __fixelements(self):
@@ -148,7 +153,7 @@ class XMLParser:
         self.__seen_doctype = None
         self.__seen_starttag = 0
         self.__use_namespaces = 0
-        self.__namespaces = {'xml':None}   # xml is implicitly declared
+        self.__namespaces = {'xml': None}   # xml is implicitly declared
         # backward compatibility hack: if elements not overridden,
         # fill it in ourselves
         if self.elements is XMLParser.elements:
@@ -179,11 +184,11 @@ class XMLParser:
             del self.elements
 
     # Interface -- translate references
-    def translate_references(self, data, all = 1):
+    def translate_references(self, data, all=1):
         if not self.__translate_attribute_references:
             return data
         i = 0
-        while 1:
+        while True:
             res = amp.search(data, i)
             if res is None:
                 return data
@@ -191,7 +196,7 @@ class XMLParser:
             res = ref.match(data, s)
             if res is None:
                 self.syntax_error("bogus `&'")
-                i = s+1
+                i = s + 1
                 continue
             i = res.end(0)
             str = res.group(1)
@@ -203,21 +208,23 @@ class XMLParser:
                     str = chr(int(str[1:]))
                 if data[i - 1] != ';':
                     self.syntax_error("`;' missing after char reference")
-                    i = i-1
+                    i = i - 1
             elif all:
                 if str in self.entitydefs:
                     str = self.entitydefs[str]
                     rescan = 1
                 elif data[i - 1] != ';':
                     self.syntax_error("bogus `&'")
-                    i = s + 1 # just past the &
+                    i = s + 1  # just past the &
                     continue
                 else:
-                    self.syntax_error("reference to unknown entity `&%s;'" % str)
+                    self.syntax_error(
+                        "reference to unknown entity `&%s;'" %
+                        str)
                     str = '&' + str + ';'
             elif data[i - 1] != ';':
                 self.syntax_error("bogus `&'")
-                i = s + 1 # just past the &
+                i = s + 1  # just past the &
                 continue
 
             # when we get here, str contains the translated text and i points
@@ -268,63 +275,72 @@ class XMLParser:
                 self.handle_data(data)
                 self.lineno = self.lineno + data.count('\n')
             i = j
-            if i == n: break
+            if i == n:
+                break
             if rawdata[i] == '<':
                 if starttagopen.match(rawdata, i):
                     if self.literal:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i+1
+                        i = i + 1
                         continue
                     k = self.parse_starttag(i)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.__seen_starttag = 1
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 if endtagopen.match(rawdata, i):
                     k = self.parse_endtag(i)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
-                    i =  k
+                    i = k
                     continue
                 if commentopen.match(rawdata, i):
                     if self.literal:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i+1
+                        i = i + 1
                         continue
                     k = self.parse_comment(i)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 if cdataopen.match(rawdata, i):
                     k = self.parse_cdata(i)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 res = xmldecl.match(rawdata, i)
                 if res:
                     if not self.__at_start:
-                        self.syntax_error("<?xml?> declaration not at start of document")
+                        self.syntax_error(
+                            "<?xml?> declaration not at start of document")
                     version, encoding, standalone = res.group('version',
                                                               'encoding',
                                                               'standalone')
                     if version[1:-1] != '1.0':
                         raise Error('only XML version 1.0 supported')
-                    if encoding: encoding = encoding[1:-1]
-                    if standalone: standalone = standalone[1:-1]
+                    if encoding:
+                        encoding = encoding[1:-1]
+                    if standalone:
+                        standalone = standalone[1:-1]
                     self.handle_xml(encoding, standalone)
                     i = res.end(0)
                     continue
                 res = procopen.match(rawdata, i)
                 if res:
                     k = self.parse_proc(i)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
@@ -334,14 +350,16 @@ class XMLParser:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i+1
+                        i = i + 1
                         continue
                     if self.__seen_doctype:
                         self.syntax_error('multiple DOCTYPE elements')
                     if self.__seen_starttag:
-                        self.syntax_error('DOCTYPE not at beginning of document')
+                        self.syntax_error(
+                            'DOCTYPE not at beginning of document')
                     k = self.parse_doctype(res)
-                    if k < 0: break
+                    if k < 0:
+                        break
                     self.__seen_doctype = res.group('name')
                     if self.__map_case:
                         self.__seen_doctype = self.__seen_doctype.lower()
@@ -352,14 +370,14 @@ class XMLParser:
                 if self.literal:
                     data = rawdata[i]
                     self.handle_data(data)
-                    i = i+1
+                    i = i + 1
                     continue
                 res = charref.match(rawdata, i)
                 if res is not None:
                     i = res.end(0)
-                    if rawdata[i-1] != ';':
+                    if rawdata[i - 1] != ';':
                         self.syntax_error("`;' missing in charref")
-                        i = i-1
+                        i = i - 1
                     if not self.stack:
                         self.syntax_error('data not in content')
                     self.handle_charref(res.group('char')[:-1])
@@ -368,14 +386,15 @@ class XMLParser:
                 res = entityref.match(rawdata, i)
                 if res is not None:
                     i = res.end(0)
-                    if rawdata[i-1] != ';':
+                    if rawdata[i - 1] != ';':
                         self.syntax_error("`;' missing in entityref")
-                        i = i-1
+                        i = i - 1
                     name = res.group('name')
                     if self.__map_case:
                         name = name.lower()
                     if name in self.entitydefs:
-                        self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
+                        self.rawdata = rawdata = rawdata[
+                            :res.start(0)] + self.entitydefs[name] + rawdata[i:]
                         n = len(rawdata)
                         i = res.start(0)
                     else:
@@ -386,14 +405,14 @@ class XMLParser:
                 if self.literal:
                     data = rawdata[i]
                     self.handle_data(data)
-                    i = i+1
+                    i = i + 1
                     continue
-                if n-i < 3:
+                if n - i < 3:
                     break
                 if cdataclose.match(rawdata, i):
                     self.syntax_error("bogus `]]>'")
                 self.handle_data(rawdata[i])
-                i = i+1
+                i = i + 1
                 continue
             else:
                 raise Error('neither < nor & ??')
@@ -410,7 +429,7 @@ class XMLParser:
                 self.syntax_error('illegal character in content')
             self.handle_data(data)
             self.lineno = self.lineno + data.count('\n')
-            self.rawdata = rawdata[i+1:]
+            self.rawdata = rawdata[i + 1:]
             return self.goahead(end)
         self.rawdata = rawdata[i:]
         if end:
@@ -424,19 +443,19 @@ class XMLParser:
     # Internal -- parse comment, return length or -1 if not terminated
     def parse_comment(self, i):
         rawdata = self.rawdata
-        if rawdata[i:i+4] != '<!--':
+        if rawdata[i:i + 4] != '<!--':
             raise Error('unexpected call to handle_comment')
-        res = commentclose.search(rawdata, i+4)
+        res = commentclose.search(rawdata, i + 4)
         if res is None:
             return -1
-        if doubledash.search(rawdata, i+4, res.start(0)):
+        if doubledash.search(rawdata, i + 4, res.start(0)):
             self.syntax_error("`--' inside comment")
-        if rawdata[res.start(0)-1] == '-':
+        if rawdata[res.start(0) - 1] == '-':
             self.syntax_error('comment cannot end in three dashes')
         if not self.__accept_utf8 and \
-           illegal.search(rawdata, i+4, res.start(0)):
+           illegal.search(rawdata, i + 4, res.start(0)):
             self.syntax_error('illegal character in comment')
-        self.handle_comment(rawdata[i+4: res.start(0)])
+        self.handle_comment(rawdata[i + 4: res.start(0)])
         return res.end(0)
 
     # Internal -- handle DOCTYPE tag, return length or -1 if not terminated
@@ -449,14 +468,15 @@ class XMLParser:
         pubid, syslit = res.group('pubid', 'syslit')
         if pubid is not None:
             pubid = pubid[1:-1]         # remove quotes
-            pubid = ' '.join(pubid.split()) # normalize
-        if syslit is not None: syslit = syslit[1:-1] # remove quotes
+            pubid = ' '.join(pubid.split())  # normalize
+        if syslit is not None:
+            syslit = syslit[1:-1]  # remove quotes
         j = k = res.end(0)
         if k >= n:
             return -1
         if rawdata[k] == '[':
             level = 0
-            k = k+1
+            k = k + 1
             dq = sq = 0
             while k < n:
                 c = rawdata[k]
@@ -467,10 +487,10 @@ class XMLParser:
                 elif sq or dq:
                     pass
                 elif level <= 0 and c == ']':
-                    res = endbracket.match(rawdata, k+1)
+                    res = endbracket.match(rawdata, k + 1)
                     if res is None:
                         return -1
-                    self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
+                    self.handle_doctype(name, pubid, syslit, rawdata[j + 1:k])
                     return res.end(0)
                 elif c == '<':
                     level = level + 1
@@ -478,7 +498,7 @@ class XMLParser:
                     level = level - 1
                     if level < 0:
                         self.syntax_error("bogus `>' in DOCTYPE")
-                k = k+1
+                k = k + 1
         res = endbracketfind.match(rawdata, k)
         if res is None:
             return -1
@@ -490,30 +510,31 @@ class XMLParser:
     # Internal -- handle CDATA tag, return length or -1 if not terminated
     def parse_cdata(self, i):
         rawdata = self.rawdata
-        if rawdata[i:i+9] != '<![CDATA[':
+        if rawdata[i:i + 9] != '<![CDATA[':
             raise Error('unexpected call to parse_cdata')
-        res = cdataclose.search(rawdata, i+9)
+        res = cdataclose.search(rawdata, i + 9)
         if res is None:
             return -1
         if not self.__accept_utf8 and \
-           illegal.search(rawdata, i+9, res.start(0)):
+           illegal.search(rawdata, i + 9, res.start(0)):
             self.syntax_error('illegal character in CDATA')
         if not self.stack:
             self.syntax_error('CDATA not in content')
-        self.handle_cdata(rawdata[i+9:res.start(0)])
+        self.handle_cdata(rawdata[i + 9:res.start(0)])
         return res.end(0)
 
-    __xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
+    __xml_namespace_attributes = {'ns': None, 'src': None, 'prefix': None}
     # Internal -- handle a processing instruction tag
+
     def parse_proc(self, i):
         rawdata = self.rawdata
         end = procclose.search(rawdata, i)
         if end is None:
             return -1
         j = end.start(0)
-        if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
+        if not self.__accept_utf8 and illegal.search(rawdata, i + 2, j):
             self.syntax_error('illegal character in processing instruction')
-        res = tagfind.match(rawdata, i+2)
+        res = tagfind.match(rawdata, i + 2)
         if res is None:
             raise Error('unexpected call to parse_proc')
         k = res.end(0)
@@ -527,13 +548,17 @@ class XMLParser:
             # this must come after the <?xml?> declaration (if any)
             # and before the <!DOCTYPE> (if any).
             if self.__seen_doctype or self.__seen_starttag:
-                self.syntax_error('xml:namespace declaration too late in document')
+                self.syntax_error(
+                    'xml:namespace declaration too late in document')
             attrdict, namespace, k = self.parse_attributes(name, k, j)
             if namespace:
-                self.syntax_error('namespace declaration inside namespace declaration')
+                self.syntax_error(
+                    'namespace declaration inside namespace declaration')
             for attrname in attrdict.keys():
                 if not attrname in self.__xml_namespace_attributes:
-                    self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
+                    self.syntax_error(
+                        "unknown attribute `%s' in xml:namespace tag" %
+                        attrname)
             if not 'ns' in attrdict or not 'prefix' in attrdict:
                 self.syntax_error('xml:namespace without required attributes')
             prefix = attrdict.get('prefix')
@@ -563,10 +588,12 @@ class XMLParser:
                 attrname = attrname.lower()
             i = res.end(0)
             if attrvalue is None:
-                self.syntax_error("no value specified for attribute `%s'" % attrname)
+                self.syntax_error(
+                    "no value specified for attribute `%s'" %
+                    attrname)
                 attrvalue = attrname
             elif attrvalue[:1] == "'" == attrvalue[-1:] or \
-                 attrvalue[:1] == '"' == attrvalue[-1:]:
+                    attrvalue[:1] == '"' == attrvalue[-1:]:
                 attrvalue = attrvalue[1:-1]
             elif not self.__accept_unquoted_attributes:
                 self.syntax_error("attribute `%s' value not quoted" % attrname)
@@ -576,7 +603,7 @@ class XMLParser:
                 ncname = res.group('ncname')
                 namespace[ncname or ''] = attrvalue or None
                 if not self.__use_namespaces:
-                    self.__use_namespaces = len(self.stack)+1
+                    self.__use_namespaces = len(self.stack) + 1
                 continue
             if '<' in attrvalue:
                 self.syntax_error("`<' illegal in attribute value")
@@ -590,7 +617,7 @@ class XMLParser:
     def parse_starttag(self, i):
         rawdata = self.rawdata
         # i points to start of tag
-        end = endbracketfind.match(rawdata, i+1)
+        end = endbracketfind.match(rawdata, i + 1)
         if end is None:
             return -1
         tag = starttagmatch.match(rawdata, i)
@@ -625,10 +652,11 @@ class XMLParser:
             if ns is not None:
                 nstag = ns + ' ' + nstag
             elif prefix != '':
-                nstag = prefix + ':' + nstag # undo split
+                nstag = prefix + ':' + nstag  # undo split
             self.stack[-1] = tagname, nsdict, nstag
         # translate namespace of attributes
-        attrnamemap = {} # map from new name to old name (used for error reporting)
+        attrnamemap = {}
+            # map from new name to old name (used for error reporting)
         for key in attrdict.keys():
             attrnamemap[key] = key
         if self.__use_namespaces:
@@ -661,7 +689,9 @@ class XMLParser:
         if attributes is not None:
             for key in attrdict.keys():
                 if not key in attributes:
-                    self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
+                    self.syntax_error(
+                        "unknown attribute `%s' in tag `%s'" %
+                        (attrnamemap[key], tagname))
             for key, val in attributes.items():
                 if val is not None and not key in attrdict:
                     attrdict[key] = val
@@ -674,18 +704,18 @@ class XMLParser:
     # Internal -- parse endtag
     def parse_endtag(self, i):
         rawdata = self.rawdata
-        end = endbracketfind.match(rawdata, i+1)
+        end = endbracketfind.match(rawdata, i + 1)
         if end is None:
             return -1
-        res = tagfind.match(rawdata, i+2)
+        res = tagfind.match(rawdata, i + 2)
         if res is None:
             if self.literal:
                 self.handle_data(rawdata[i])
-                return i+1
+                return i + 1
             if not self.__accept_missing_endtag_name:
                 self.syntax_error('no name specified in end tag')
             tag = self.stack[-1][0]
-            k = i+2
+            k = i + 2
         else:
             tag = res.group(0)
             if self.__map_case:
@@ -693,7 +723,7 @@ class XMLParser:
             if self.literal:
                 if not self.stack or tag != self.stack[-1][0]:
                     self.handle_data(rawdata[i])
-                    return i+1
+                    return i + 1
             k = res.end(0)
         if endbracket.match(rawdata, k) is None:
             self.syntax_error('garbage in end tag')
@@ -726,7 +756,9 @@ class XMLParser:
                 return
         while len(self.stack) > found:
             if found < len(self.stack) - 1:
-                self.syntax_error('missing close tag for %s' % self.stack[-1][2])
+                self.syntax_error(
+                    'missing close tag for %s' %
+                    self.stack[-1][2])
             nstag = self.stack[-1][2]
             method = self.elements.get(nstag, (None, None))[1]
             if method is not None:
@@ -798,8 +830,11 @@ class XMLParser:
 
     # To be overridden -- handlers for unknown objects
     def unknown_starttag(self, tag, attrs): pass
+
     def unknown_endtag(self, tag): pass
+
     def unknown_charref(self, ref): pass
+
     def unknown_entityref(self, name):
         self.syntax_error("reference to unknown entity `&%s;'" % name)
 
@@ -812,34 +847,34 @@ class TestXMLParser(XMLParser):
 
     def handle_xml(self, encoding, standalone):
         self.flush()
-        print 'xml: encoding =',encoding,'standalone =',standalone
+        print 'xml: encoding =', encoding, 'standalone =', standalone
 
     def handle_doctype(self, tag, pubid, syslit, data):
         self.flush()
-        print 'DOCTYPE:',tag, `data`
+        print 'DOCTYPE:', tag, repr(data)
 
     def handle_data(self, data):
         self.testdata = self.testdata + data
-        if len(`self.testdata`) >= 70:
+        if len(repr(self.testdata)) >= 70:
             self.flush()
 
     def flush(self):
         data = self.testdata
         if data:
             self.testdata = ""
-            print 'data:', `data`
+            print 'data:', repr(data)
 
     def handle_cdata(self, data):
         self.flush()
-        print 'cdata:', `data`
+        print 'cdata:', repr(data)
 
     def handle_proc(self, name, data):
         self.flush()
-        print 'processing:',name,`data`
+        print 'processing:', name, repr(data)
 
     def handle_comment(self, data):
         self.flush()
-        r = `data`
+        r = repr(data)
         if len(r) > 68:
             r = r[:32] + '...' + r[-32:]
         print 'comment:', r
@@ -873,8 +908,10 @@ class TestXMLParser(XMLParser):
         XMLParser.close(self)
         self.flush()
 
-def test(args = None):
-    import sys, getopt
+
+def test(args=None):
+    import sys
+    import getopt
     from time import time
 
     if not args:
@@ -899,7 +936,7 @@ def test(args = None):
     else:
         try:
             f = open(file, 'r')
-        except IOError, msg:
+        except IOError as msg:
             print file, ":", msg
             sys.exit(1)
 
@@ -917,15 +954,15 @@ def test(args = None):
             for c in data:
                 x.feed(c)
             x.close()
-    except Error, msg:
+    except Error as msg:
         t1 = time()
         print msg
         if do_time:
-            print 'total time: %g' % (t1-t0)
+            print 'total time: %g' % (t1 - t0)
         sys.exit(1)
     t1 = time()
     if do_time:
-        print 'total time: %g' % (t1-t0)
+        print 'total time: %g' % (t1 - t0)
 
 
 if __name__ == '__main__':
diff --git a/Packages/cdms2/Lib/convention.py b/Packages/cdms2/Lib/convention.py
index 0ed475877..43086d8e4 100644
--- a/Packages/cdms2/Lib/convention.py
+++ b/Packages/cdms2/Lib/convention.py
@@ -1,6 +1,6 @@
 """ metadata conventions """
 
-from error import CDMSError
+from .error import CDMSError
 from UserList import UserList
 
 # On in order to turn off some warnings
@@ -8,13 +8,18 @@ WITH_GRIDSPEC_SUPPORT = True
 
 MethodNotImplemented = "Method not yet implemented"
 
+
 class AliasList (UserList):
+
     def __init__(self, alist):
-        UserList.__init__(self,alist)
-    def __setitem__ (self, i, value):
+        UserList.__init__(self, alist)
+
+    def __setitem__(self, i, value):
         self.data[i] = value.lower()
+
     def __setslice(self, i, j, values):
         self.data[i:j] = map(lambda x: x.lower(), values)
+
     def append(self, value):
         self.data.append(value.lower())
 
@@ -24,16 +29,17 @@ latitude_aliases = AliasList([])
 time_aliases = AliasList([])
 forecast_aliases = AliasList([])
 
+
 class AbstractConvention:
 
     def getAxisIds(self, vardict):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getAxisAuxIds(self, vardict, axiskeys):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getDsetnodeAuxAxisIds(self, dsetnode):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def axisIsLatitude(self, axis):
         id = axis.id.lower()
@@ -49,7 +55,7 @@ class AbstractConvention:
 
         for obj in [d[0] for d in var.getDomain()]:
             if self.axisIsLatitude(obj):
-                if nlat==0:
+                if nlat == 0:
                     lat = obj
                 nlat += 1
         return (lat, nlat)
@@ -59,11 +65,12 @@ class AbstractConvention:
         nlon = 0
         for obj in [d[0] for d in var.getDomain()]:
             if self.axisIsLongitude(obj):
-                if nlon==0:
+                if nlon == 0:
                     lon = obj
                 nlon += 1
         return (lon, nlon)
 
+
 class NUGConvention(AbstractConvention):
 
     def __init__(self, version=None):
@@ -74,18 +81,20 @@ class NUGConvention(AbstractConvention):
         result = []
         for name in vardict.keys():
             dimensions = vardict[name].dimensions
-            if len(dimensions)==1 and (name in dimensions):
+            if len(dimensions) == 1 and (name in dimensions):
                 result.append(name)
         return result
 
     def getAxisAuxIds(self, vardict, axiskeys):
         return []
 
+
 class COARDSConvention(NUGConvention):
 
     def __init__(self, version=None):
         NUGConvention.__init__(self, version)
 
+
 class CFConvention(COARDSConvention):
 
     current = 'CF-1.0'
@@ -111,11 +120,11 @@ class CFConvention(COARDSConvention):
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if not WITH_GRIDSPEC_SUPPORT:
-                    print 'Warning: coordinate attribute points to non-existent variable: %s'%key
+                    print 'Warning: coordinate attribute points to non-existent variable: %s' % key
                 del coorddict[key]
                 continue
             # Omit scalar dimensions, and dimensions greater than 2-D
-            if len(coord.shape) not in [1,2]:
+            if len(coord.shape) not in [1, 2]:
                 del coorddict[key]
         return coorddict.keys()
 
@@ -129,12 +138,13 @@ class CFConvention(COARDSConvention):
                 coordnames = coordnames.split()
                 for item in coordnames:
                     # Don't include if already a 1D coordinate axis.
-                    if dsetdict.has_key(item) and dsetdict[item].tag=='axis':
+                    if item in dsetdict and dsetdict[item].tag == 'axis':
                         continue
                     # It's not an axis node, so must be a variable, so getDomain is defined.
-                    # Check the rank, don't include if not 1D or 2D (e.g., scalar coordinate)
+                    # Check the rank, don't include if not 1D or 2D (e.g.,
+                    # scalar coordinate)
                     domnode = dsetdict[item].getDomain()
-                    if domnode.getChildCount() not in [1,2]:
+                    if domnode.getChildCount() not in [1, 2]:
                         continue
                     coorddict[item] = 1
         return coorddict.keys()
@@ -155,14 +165,14 @@ class CFConvention(COARDSConvention):
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if coord is not None and hasattr(coord, 'isLatitude') and coord.isLatitude():
-                    if nlat==0:
+                    if nlat == 0:
                         lat = coord
                     nlat += 1
         if lat is None:
             lat, nlat = AbstractConvention.getVarLatId(self, var, vardict)
 
         return (lat, nlat)
-                
+
     def getVarLonId(self, var, vardict):
         lon = None
         nlon = 0
@@ -179,30 +189,30 @@ class CFConvention(COARDSConvention):
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if coord is not None and hasattr(coord, 'isLongitude') and coord.isLongitude():
-                    if nlon==0:
+                    if nlon == 0:
                         lon = coord
                     nlon += 1
         if lon is None:
             lon, nlon = AbstractConvention.getVarLonId(self, var, vardict)
 
         return (lon, nlon)
-                
+
     def axisIsLatitude(self, axis):
-        if (hasattr(axis,'axis') and axis.axis=='Y'):
+        if (hasattr(axis, 'axis') and axis.axis == 'Y'):
             return 1
         elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', 'degreen', 'degreesn']):
             return 1
-        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower()=='latitude'):
+        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower() == 'latitude'):
             return 1
         else:
             return AbstractConvention.axisIsLatitude(self, axis)
-        
+
     def axisIsLongitude(self, axis):
-        if (hasattr(axis,'axis') and axis.axis=='X'):
+        if (hasattr(axis, 'axis') and axis.axis == 'X'):
             return 1
         elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese']):
             return 1
-        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower()=='longitude'):
+        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower() == 'longitude'):
             return 1
         else:
             return AbstractConvention.axisIsLongitude(self, axis)
@@ -211,10 +221,10 @@ class CFConvention(COARDSConvention):
         """Get the bounds variable for the variable, from a dataset or file."""
         if hasattr(var, 'bounds'):
             boundsid = var.bounds
-            if dset.variables.has_key(boundsid):
+            if boundsid in dset.variables:
                 result = dset[boundsid]
             else:
-                print 'Warning: bounds variable not found in %s: %s'%(dset.id, boundsid)
+                print 'Warning: bounds variable not found in %s: %s' % (dset.id, boundsid)
                 result = None
         else:
             result = None
@@ -225,7 +235,7 @@ NUG = NUGConvention()
 COARDS = COARDSConvention()
 CF1 = CFConvention('CF-1')
 
+
 def getDatasetConvention(dset):
     "Return an appropriate convention object. dset is a file or dataset object"
     return CF1
-
diff --git a/Packages/cdms2/Lib/coord.py b/Packages/cdms2/Lib/coord.py
index 8460485e6..8935220af 100644
--- a/Packages/cdms2/Lib/coord.py
+++ b/Packages/cdms2/Lib/coord.py
@@ -1,24 +1,25 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS CoordinateAxis objects
 """
 import sys
-import cdmsNode
+from . import cdmsNode
 import cdtime
 import copy
 import numpy
-#import internattr
-from cdmsobj import CdmsObj
-from axis import createAxis, TransientVirtualAxis
-from error import CDMSError
-from convention import AliasList, level_aliases, longitude_aliases, latitude_aliases, time_aliases,\
-     forecast_aliases
-from fvariable import FileVariable
-from variable import DatasetVariable
-from tvariable import TransientVariable
-from avariable import AbstractVariable
+# import internattr
+from .cdmsobj import CdmsObj
+from .axis import createAxis, TransientVirtualAxis
+from .error import CDMSError
+from .convention import AliasList, level_aliases, longitude_aliases, latitude_aliases, time_aliases,\
+    forecast_aliases
+from .fvariable import FileVariable
+from .variable import DatasetVariable
+from .tvariable import TransientVariable
+from .avariable import AbstractVariable
+from functools import reduce
 
 MethodNotImplemented = "Method not yet implemented"
 NoSuchAxisOrGrid = "No such axis or grid: "
@@ -28,34 +29,38 @@ std_axis_attributes = ['name', 'units', 'length', 'values', 'bounds']
 
 # Map between cdtime calendar and CF tags
 calendarToTag = {
-    cdtime.MixedCalendar : 'gregorian',
-    cdtime.NoLeapCalendar : 'noleap',
-    cdtime.GregorianCalendar : 'proleptic_gregorian',
-    cdtime.JulianCalendar : 'julian',
-    cdtime.Calendar360 : '360_day'
-    }
+    cdtime.MixedCalendar: 'gregorian',
+    cdtime.NoLeapCalendar: 'noleap',
+    cdtime.GregorianCalendar: 'proleptic_gregorian',
+    cdtime.JulianCalendar: 'julian',
+    cdtime.Calendar360: '360_day'
+}
 
 tagToCalendar = {
-    'gregorian' : cdtime.MixedCalendar,
-    'standard' : cdtime.GregorianCalendar,
-    'noleap' : cdtime.NoLeapCalendar,
-    'julian' : cdtime.JulianCalendar,
-    'proleptic_gregorian' : cdtime.GregorianCalendar,
-    '360_day' : cdtime.Calendar360,
-    '360' : cdtime.Calendar360,
-    '365_day' : cdtime.NoLeapCalendar,
-    }
+    'gregorian': cdtime.MixedCalendar,
+    'standard': cdtime.GregorianCalendar,
+    'noleap': cdtime.NoLeapCalendar,
+    'julian': cdtime.JulianCalendar,
+    'proleptic_gregorian': cdtime.GregorianCalendar,
+    '360_day': cdtime.Calendar360,
+    '360': cdtime.Calendar360,
+    '365_day': cdtime.NoLeapCalendar,
+}
 
 # This is not an error message, it is used to detect which things have
 # been left as default indices or coordinates.
 unspecified = "No value specified."
 
 # Create a transient axis
+
+
 def createCoordinateAxis(data, bounds=None, id=None, copy=0):
     return TransientAxis(data, bounds, id, copy=copy)
 
 # AbstractCoordinateAxis defines the common interface
 # for coordinate variables/axes.
+
+
 class AbstractCoordinateAxis(CdmsObj):
 
     axis_count = 0                      # Transient axis count
@@ -63,15 +68,15 @@ class AbstractCoordinateAxis(CdmsObj):
     def __init__(self, parent=None, variableNode=None, bounds=None):
         CdmsObj.__init__(self, variableNode)
         self._bounds_ = bounds
-        
+
     def isAbstractCoordinate(self):
         return 1
 
-    def clone (self, copyData=1):
+    def clone(self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     # Designate axis as a latitude axis.
     # If persistent is true, write metadata to the container.
@@ -115,17 +120,17 @@ class AbstractCoordinateAxis(CdmsObj):
     # or None. If the axis does not have a calendar attribute, return the global
     # calendar.
     def getCalendar(self):
-        if hasattr(self,'calendar'):
+        if hasattr(self, 'calendar'):
             calendar = self.calendar.lower()
         else:
             calendar = None
 
-        cdcal = tagToCalendar.get(calendar,None)
+        cdcal = tagToCalendar.get(calendar, None)
         return cdcal
 
     def getData(self):
-        raise CDMSError, MethodNotImplemented
- 
+        raise CDMSError(MethodNotImplemented)
+
     # Return None if not explicitly defined
     def getExplicitBounds(self):
         if self._bounds_ is not None:
@@ -135,7 +140,8 @@ class AbstractCoordinateAxis(CdmsObj):
 
     def info(self, flag=None, device=None):
         "Write info about axis; include dimension values and weights if flag"
-        if device is None: device = sys.stdout
+        if device is None:
+            device = sys.stdout
         device.write(str(self))
 
     # Return true iff the axis is a latitude axis
@@ -146,7 +152,8 @@ class AbstractCoordinateAxis(CdmsObj):
     # Return true iff the axis is a level axis
     def isLevel(self):
         id = self.id.lower()
-        if (hasattr(self,'axis') and self.axis=='Z'): return 1
+        if (hasattr(self, 'axis') and self.axis == 'Z'):
+            return 1
         return ((id[0:3] == 'lev') or (id[0:5] == 'depth') or (id in level_aliases))
 
     # Return true iff the axis is a longitude axis
@@ -157,24 +164,30 @@ class AbstractCoordinateAxis(CdmsObj):
     # Return true iff the axis is a time axis
     def isTime(self):
         id = self.id.lower()
-        if (hasattr(self,'axis') and self.axis=='T'): return 1
+        if (hasattr(self, 'axis') and self.axis == 'T'):
+            return 1
         return (id[0:4] == 'time') or (id in time_aliases)
 
     # Return true iff the axis is a forecast axis
     def isForecast(self):
         id = self.id.lower()
-        if (hasattr(self,'axis') and self.axis=='F'): return 1
+        if (hasattr(self, 'axis') and self.axis == 'F'):
+            return 1
         return (id[0:6] == 'fctau0') or (id in forecast_aliases)
 
-    def listall (self, all=None):
+    def listall(self, all=None):
         "Get list of info about this axis."
         aname = self.id
         result = []
         result.append('   id: ' + aname)
-        if self.isLatitude(): result.append('   Designated a latitude axis.')
-        if self.isLongitude(): result.append('   Designated a longitude axis.')
-        if self.isTime(): result.append('   Designated a time axis.')
-        if self.isLevel(): result.append('   Designated a level axis.')
+        if self.isLatitude():
+            result.append('   Designated a latitude axis.')
+        if self.isLongitude():
+            result.append('   Designated a longitude axis.')
+        if self.isTime():
+            result.append('   Designated a time axis.')
+        if self.isLevel():
+            result.append('   Designated a level axis.')
         try:
             units = self.units
             result.append('   units:  ' + units)
@@ -184,11 +197,12 @@ class AbstractCoordinateAxis(CdmsObj):
         result.append('   Shape: ' + str(d.shape))
         flag = 1
         for k in self.attributes.keys():
-            if k in std_axis_attributes: continue
+            if k in std_axis_attributes:
+                continue
             if flag:
                 result.append('   Other axis attributes:')
                 flag = 0
-            result.append('      '+k+': '+str(self.attributes[k]))
+            result.append('      ' + k + ': ' + str(self.attributes[k]))
         result.append('   Python id:  %s' % hex(id(self)))
 
         if all:
@@ -213,86 +227,106 @@ class AbstractCoordinateAxis(CdmsObj):
         if persistent:
             self.calendar = calendarToTag.get(calendar, None)
             if self.calendar is None:
-                raise CDMSError, InvalidCalendar + calendar
+                raise CDMSError(InvalidCalendar + calendar)
         else:
             self.__dict__['calendar'] = calendarToTag.get(calendar, None)
             if self.__dict__['calendar'] is None:
-                raise CDMSError, InvalidCalendar + calendar
+                raise CDMSError(InvalidCalendar + calendar)
 
-    def size (self, axis = None):
+    def size(self, axis=None):
         "Number of elements in array, or in a particular axis."
         s = self.shape
         if axis is None:
             if len(s) == 0:
                 return 1
             else:
-                return reduce(lambda x,y: x*y, s)
+                return reduce(lambda x, y: x * y, s)
         else:
             return s[axis]
-        
+
     def writeToFile(self, file):
 
         if self._bounds_ is not None:
-            if hasattr(self,"bounds"):
+            if hasattr(self, "bounds"):
                 boundsid = self.bounds
             else:
-                boundsid = "bounds_"+self.id
+                boundsid = "bounds_" + self.id
             self.bounds = boundsid
 
         fvar = file.write(self)
 
-        # Create the bounds variable 
-        if (self._bounds_ is not None) and not file.variables.has_key(boundsid):
+        # Create the bounds variable
+        if (self._bounds_ is not None) and boundsid not in file.variables:
             boundslen = self._bounds_.shape[-1]
             try:
                 boundid = self._bounds_.getAxis(-1).id
-                boundsaxis = file.getBoundsAxis(boundslen,boundid=boundid)
+                boundsaxis = file.getBoundsAxis(boundslen, boundid=boundid)
             except:
                 boundsaxis = file.getBoundsAxis(boundslen)
-            
+
             axislist = fvar.getAxisList()
             axislist.append(boundsaxis)
-            boundsvar = file.createVariable(boundsid, cdmsNode.NumericToCdType.get(self.dtype.char), axislist)
+            boundsvar = file.createVariable(
+                boundsid,
+                cdmsNode.NumericToCdType.get(self.dtype.char),
+                axislist)
             boundsvar[:] = self._bounds_.astype(boundsvar.dtype)
         return fvar
 
+
 class AbstractAxis2D(AbstractCoordinateAxis):
 
-    def __init__ (self, parent=None, variableNode=None, bounds=None):
-        AbstractCoordinateAxis.__init__(self, parent, variableNode, bounds=bounds)
+    def __init__(self, parent=None, variableNode=None, bounds=None):
+        AbstractCoordinateAxis.__init__(
+            self,
+            parent,
+            variableNode,
+            bounds=bounds)
 
-    def clone (self, copyData=1):
+    def clone(self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        result = TransientAxis2D(self[:], copy=copyData, axes=self.getAxisList(), attributes=self.attributes, bounds=self.getBounds())
+        result = TransientAxis2D(
+            self[:],
+            copy=copyData,
+            axes=self.getAxisList(),
+            attributes=self.attributes,
+            bounds=self.getBounds())
         return result
 
     def setBounds(self, bounds):
         if bounds is not None:
-            if len(bounds.shape)!=3:
-                raise CDMSError, 'Bounds must have rank=3'
-            if bounds.shape[0:2]!=self.shape:
-                raise CDMSError, 'Bounds shape %s is inconsistent with axis shape %s'%(`bounds.shape`,`self.shape`)
+            if len(bounds.shape) != 3:
+                raise CDMSError('Bounds must have rank=3')
+            if bounds.shape[0:2] != self.shape:
+                raise CDMSError(
+                    'Bounds shape %s is inconsistent with axis shape %s' %
+                    (repr(bounds.shape), repr(self.shape)))
         AbstractCoordinateAxis.setBounds(self, bounds)
 
-    def subSlice (self, *specs, **keys):
+    def subSlice(self, *specs, **keys):
         # Take a subslice, returning a TransientAxis2D
         avar = AbstractVariable.subSlice(self, *specs, **keys)
         bounds = self.getBounds()
         if bounds is None:
             newbounds = None
         else:
-            newbounds = bounds[specs]   # bounds can be a numarray or DatasetVariable
+            newbounds = bounds[
+                specs]   # bounds can be a numarray or DatasetVariable
 
-        # Note: disable axis copy to preserve identity of grid and variable domains
-        result = TransientAxis2D(avar, bounds=newbounds, copyaxes=0)    
+        # Note: disable axis copy to preserve identity of grid and variable
+        # domains
+        result = TransientAxis2D(avar, bounds=newbounds, copyaxes=0)
         return result
 
 # Two-dimensional coordinate axis in a dataset.
+
+
 class DatasetAxis2D(AbstractAxis2D, DatasetVariable):
 
     # Note: node is a VariableNode
+
     def __init__(self, parent, id=None, variableNode=None, bounds=None):
         AbstractAxis2D.__init__(self, parent, variableNode, bounds=bounds)
         DatasetVariable.__init__(self, parent, id, variableNode)
@@ -300,13 +334,16 @@ class DatasetAxis2D(AbstractAxis2D, DatasetVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<DatasetAxis2D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
+            return "<DatasetAxis2D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
         else:
-            return "<DatasetAxis2D: %s, file: **CLOSED**>"%self.id
+            return "<DatasetAxis2D: %s, file: **CLOSED**>" % self.id
 
-## internattr.initialize_internal_attributes(DatasetAxis2D) Copy internal attrs from parents
+# internattr.initialize_internal_attributes(DatasetAxis2D) Copy internal
+# attrs from parents
 
 # Two-dimensional coordinate axis in a file.
+
+
 class FileAxis2D(AbstractAxis2D, FileVariable):
 
     def __init__(self, parent, id, obj=None, bounds=None):
@@ -316,15 +353,18 @@ class FileAxis2D(AbstractAxis2D, FileVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<FileAxis2D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
+            return "<FileAxis2D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
         else:
-            return "<FileAxis2D: %s, file: **CLOSED**>"%self.id
+            return "<FileAxis2D: %s, file: **CLOSED**>" % self.id
+
+# internattr.initialize_internal_attributes(FileAxis2D) # Copy internal
+# attrs from parents
 
-## internattr.initialize_internal_attributes(FileAxis2D) # Copy internal attrs from parents
 
 class TransientAxis2D(AbstractAxis2D, TransientVariable):
 
-    def __init__(self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
+    def __init__(
+        self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
                  axes=None, attributes=None, id=None, copyaxes=1, bounds=None):
         """Create a transient 2D axis.
         All arguments are as for TransientVariable.
@@ -332,11 +372,12 @@ class TransientAxis2D(AbstractAxis2D, TransientVariable):
           nvert is the max number of vertices per cell.
         """
         AbstractAxis2D.__init__(self, None, None, bounds=bounds)
-        TransientVariable.__init__(self, data, typecode=typecode, copy=copy, savespace=savespace,
+        TransientVariable.__init__(
+            self, data, typecode=typecode, copy=copy, savespace=savespace,
                                    mask=mask, fill_value=fill_value, axes=axes, attributes=attributes,
                                    id=id, copyaxes=copyaxes)
         if axes is not None:
             self.setBounds(bounds)
 
-## internattr.initialize_internal_attributes(TransientAxis2D) # Copy internal attrs from parents
-
+# internattr.initialize_internal_attributes(TransientAxis2D) # Copy
+# internal attrs from parents
diff --git a/Packages/cdms2/Lib/cudsinterface.py b/Packages/cdms2/Lib/cudsinterface.py
index 4232bdf44..04673c28e 100644
--- a/Packages/cdms2/Lib/cudsinterface.py
+++ b/Packages/cdms2/Lib/cudsinterface.py
@@ -1,42 +1,46 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 "Emulation of old cu package"
 import sys
-from error import CDMSError
-from dataset import openDataset, createDataset
-from tvariable import createVariable
+from .error import CDMSError
+from .dataset import openDataset, createDataset
+from .tvariable import createVariable
 import numpy
 
+
 class cuDataset():
+
     "A mixin class to support the old cu interface"
-    def __init__ (self):
+
+    def __init__(self):
         self.cleardefault()
 
-    def __call__ (self, id, *args, **kwargs):
+    def __call__(self, id, *args, **kwargs):
         """Call a variable object with the given id. Exception if not found.
            Call the variable with the other arguments.
         """
-# note defined here because this is the class all the dataset-type classes inherit
+# note defined here because this is the class all the dataset-type classes
+# inherit
         v = self.variables.get(id)
         if v is None:
             try:
-                if ( self.is_gridspec_grid_file() and
-                     ( id=='' or id=='grid' or id=='gridspec' ) and
-                     len(args)==0 and len(kwargs)==0
-                     ):
+                if (self.is_gridspec_grid_file() and
+                     (id == '' or id == 'grid' or id == 'gridspec') and
+                     len(args) == 0 and len(kwargs) == 0
+                    ):
                     return self.readg()
                 else:
-                    raise CDMSError, "No such variable or grid, " + id
-            except ( AttributeError, TypeError ):
-                raise CDMSError, "No such variable, " + id
+                    raise CDMSError("No such variable or grid, " + id)
+            except (AttributeError, TypeError):
+                raise CDMSError("No such variable, " + id)
         return v(*args, **kwargs)
 
     def __getitem__(self, key):
         """Implement f['varname'] for file/dataset f.
         """
         for d in [self.variables, self.axes, self.grids]:
-            if d.has_key(key):
+            if key in d:
                 result = d[key]
                 break
         else:
@@ -48,19 +52,19 @@ class cuDataset():
         try:
             v = self.variables[vname]
         except KeyError:
-            raise CDMSError, "No variable named " + vname + " in file " + \
-                  self.id
+            raise CDMSError("No variable named " + vname + " in file " +
+                  self.id)
         return v
 
-    def default_variable (self, vname):
+    def default_variable(self, vname):
         "Set the default variable name."
         self.__dict__['default_variable_name'] = vname
 
-    def cleardefault (self):
+    def cleardefault(self):
         "Clear the default variable name."
         self.default_variable("no_default_variable_name_specified")
-    
-    def listall (self, vname=None, all=None):
+
+    def listall(self, vname=None, all=None):
         """Get info about data from the file.
         :::
         Options:::
@@ -68,11 +72,12 @@ class cuDataset():
         all :: (None/True/False/int) (None) include axes information
         :::
         """
-        if vname is None: vname = self.default_variable_name
+        if vname is None:
+            vname = self.default_variable_name
         try:
             m = numpy.get_printoptions()['threshold']
             result = []
-            result.append('*** Description of slab %s in file %s***' % \
+            result.append('*** Description of slab %s in file %s***' %
                           (vname, self.id))
             result.append('Name: ' + vname)
             v = self._v(vname)
@@ -94,23 +99,24 @@ class cuDataset():
                 result.append('Last: ' + str(axis[-1]))
                 if all:
                     result.append(str(self.dimensionarray(axis.id, vname)))
-            result.append ('*** End of description of %s ***' %vname)
+            result.append('*** End of description of %s ***' % vname)
             return result
         finally:
-            numpy.set_printoptions (threshold=m)
-        
-    def listattribute (self, vname=None):
+            numpy.set_printoptions(threshold=m)
+
+    def listattribute(self, vname=None):
         """Get attributes of data from the file.
         :::
         Options:::
         vname :: (str/None) (None) variable name
         :::
         """
-        if vname is None: vname = self.default_variable_name
+        if vname is None:
+            vname = self.default_variable_name
         v = self._v(vname)
         return v.attributes.keys()
 
-    def listdimension (self, vname=None):
+    def listdimension(self, vname=None):
         """Return a list of the dimension names associated with a variable.
            If no argument, return the file.axes.keys()
         :::
@@ -118,20 +124,20 @@ class cuDataset():
         vname :: (str/None) (None) variable name
         :::
         """
-        if vname is None: 
+        if vname is None:
             return self.axes.keys()
         v = self._v(vname)
         d = v.getDomain()
         x = map(lambda n: n[0], d)
-        return map (lambda n: getattr(n, 'id'), x)
+        return map(lambda n: getattr(n, 'id'), x)
 
-    def listglobal (self):
+    def listglobal(self):
         """Returns a list of the global attributes in the file.
         :::
         """
         return self.attributes.keys()
 
-    def listvariable (self):
+    def listvariable(self):
         """Return a list of the variables in the file.
         :::
         """
@@ -139,35 +145,37 @@ class cuDataset():
 
     listvariables = listvariable
 
-    def showglobal (self, device=None):
+    def showglobal(self, device=None):
         """Show the global attributes in the file.
         :::
         Options:::
         device :: (None/file) (None) output device
         :::
         """
-        if device is None: device=sys.stdout
+        if device is None:
+            device = sys.stdout
         device.write("Global attributes in file ")
         device.write(self.id)
         device.write(":\n")
         device.write(str(self.listglobal()))
         device.write("\n")
 
-    def showvariable (self, device=None):
+    def showvariable(self, device=None):
         """Show the variables in the file.
         :::
         Options:::
         device :: (None/file) (None) output device
         :::
         """
-        if device is None: device=sys.stdout
+        if device is None:
+            device = sys.stdout
         device.write("Variables in file ")
         device.write(self.id)
         device.write(":\n")
         device.write(str(self.listvariable()))
         device.write("\n")
 
-    def showattribute (self, vname=None, device=None):
+    def showattribute(self, vname=None, device=None):
         """Show the attributes of vname.
         :::
         Options:::
@@ -175,8 +183,10 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None: device=sys.stdout
-        if vname is None: vname = self.default_variable_name
+        if device is None:
+            device = sys.stdout
+        if vname is None:
+            vname = self.default_variable_name
         device.write("Attributes of ")
         device.write(vname)
         device.write(" in file ")
@@ -184,8 +194,8 @@ class cuDataset():
         device.write(":\n")
         device.write(str(self.listattribute(vname)))
         device.write("\n")
-        
-    def showdimension (self, vname=None, device=None):
+
+    def showdimension(self, vname=None, device=None):
         """Show the dimension names associated with a variable.
         :::
         Options:::
@@ -193,8 +203,10 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None: device=sys.stdout
-        if vname is None: vname = self.default_variable_name
+        if device is None:
+            device = sys.stdout
+        if vname is None:
+            vname = self.default_variable_name
         device.write("Dimension names of ")
         device.write(vname)
         device.write(" in file ")
@@ -202,8 +214,8 @@ class cuDataset():
         device.write(":\n")
         device.write(str(self.listdimension(vname)))
         device.write("\n")
-        
-    def showall (self, vname=None, all=None, device=None):
+
+    def showall(self, vname=None, all=None, device=None):
         """Show a full description of the variable.
         :::
         Options:::
@@ -212,13 +224,15 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None: device=sys.stdout
-        if vname is None: vname = self.default_variable_name
+        if device is None:
+            device = sys.stdout
+        if vname is None:
+            vname = self.default_variable_name
         alist = self.listall(vname, all=all)
         device.write("\n".join(alist))
         device.write("\n")
 
-    def dimensionobject (self, dname, vname=None):
+    def dimensionobject(self, dname, vname=None):
         """CDMS axis object for the dimension named dname.
         :::
         Options:::
@@ -231,12 +245,12 @@ class cuDataset():
         axis :: (cdms2.axis.FileAxis) (0) file axis whose id is vname
         :::
         """
-        if vname is None: 
+        if vname is None:
             try:
                 return self.axes[dname]
             except KeyError:
-                raise CDMSError, "No axis named " + dname + " in file " +\
-                                self.id + "."
+                raise CDMSError("No axis named " + dname + " in file " +
+                                self.id + ".")
         else:
             v = self._v(vname)
             d = v.getDomain()
@@ -244,10 +258,10 @@ class cuDataset():
                 if x[0].id == dname:
                     return x[0]
             else:
-                raise CDMSError, vname + " has no axis named " + dname + \
-                                " in file " + self.id + "."
-        
-    def dimensionarray (self, dname, vname=None):
+                raise CDMSError(vname + " has no axis named " + dname +
+                                " in file " + self.id + ".")
+
+    def dimensionarray(self, dname, vname=None):
         """Values of the dimension named dname.
         :::
         Options:::
@@ -261,8 +275,8 @@ class cuDataset():
         :::
         """
         return self.dimensionobject(dname, vname).getValue()
-    
-    def getdimensionunits (self, dname, vname=None):
+
+    def getdimensionunits(self, dname, vname=None):
         """Get the units for the given dimension.
         :::
         Options:::
@@ -278,7 +292,7 @@ class cuDataset():
         x = self.dimensionobject(dname, vname)
         return x.units
 
-    def getglobal (self, attribute):
+    def getglobal(self, attribute):
         """Get the value of the global attribute.
         :::
         Input:::
@@ -292,8 +306,8 @@ class cuDataset():
             return self.attributes[attribute]
         except KeyError:
             return None
-    
-    def getattribute (self, vname, attribute):
+
+    def getattribute(self, vname, attribute):
         """Get the value of attribute for variable vname
         :::
         Input:::
@@ -306,8 +320,8 @@ class cuDataset():
         """
         v = self._v(vname)
         return getattr(v, attribute)
-            
-    def getslab (self, vname, *args,**keys):
+
+    def getslab(self, vname, *args, **keys):
         """getslab('name', arg1, arg2, ....) returns a cdms variable
            containing the data.
 
@@ -337,40 +351,42 @@ class cuDataset():
         nargs = len(args)
         v = self._v(vname)
         if nargs == 0:
-           return v.subRegion()
+            return v.subRegion()
 # note CDMS treats None as a colon in getRegion and mapInterval
         ndims = v.rank()
-        cdms_args = [':'] * ndims 
+        cdms_args = [':'] * ndims
         i = 0
         idim = 0
         ne = 0
         while i < nargs:
             if not (idim < ndims):
-                raise CDMSError, "Too many arguments to getslab."
+                raise CDMSError("Too many arguments to getslab.")
             x = args[i]
-            if x == ':' or x == None:
+            if x == ':' or x is None:
                 i = i + 1
                 idim = idim + 1
                 continue
             elif x == Ellipsis:
-                if ne: raise CDMSError, "Only one ellipsis allowed."
+                if ne:
+                    raise CDMSError("Only one ellipsis allowed.")
                 idim = ndims - (nargs - i - 1)
                 i = i + 1
                 ne = 1
-            elif isinstance(x,tuple):
+            elif isinstance(x, tuple):
                 cdms_args[idim] = x
                 idim = idim + 1
                 i = i + 1
             else:
-                if not ((i+1) < nargs):
-                    raise CDMSError, "Arguments to getslab not paired properly."
+                if not ((i + 1) < nargs):
+                    raise CDMSError(
+                        "Arguments to getslab not paired properly.")
                 low = float(x)
-                high = float(args[i+1])
+                high = float(args[i + 1])
                 cdms_args[idim] = (low, high, 'cc')
                 idim = idim + 1
                 i = i + 2
         sq = keys.get('squeeze', 0)
-        result = apply(v.subRegion, tuple(cdms_args), {'squeeze':sq})
+        result = v.subRegion(*tuple(cdms_args), **{'squeeze': sq})
         result.parent = self
         result.id = vname
         return result
@@ -391,8 +407,8 @@ class cuDataset():
         grid :: (cdms2.hgrid.TransientCurveGrid/cdms2.gengrid.TransientGenericGrid) (0) variable requested
         :::
         """
-        
-        import hgrid, gengrid
+
+        from . import hgrid, gengrid
 
         # Grid file
         if 'grid_dims' in self.variables.keys():
@@ -400,7 +416,7 @@ class cuDataset():
             whichType = "grid"
 
         # Destination grid from mapping file
-        elif whichGrid=="destination":
+        elif whichGrid == "destination":
             dims = self('dst_grid_dims')
             whichType = "mapping"
 
@@ -409,16 +425,16 @@ class cuDataset():
             dims = self('src_grid_dims')
             whichType = "mapping"
 
-        if len(dims)==2:
+        if len(dims) == 2:
             result = hgrid.readScripCurveGrid(self, dims, whichType, whichGrid)
-        elif len(dims)==1:
-            result = gengrid.readScripGenericGrid(self, dims, whichType, whichGrid)
+        elif len(dims) == 1:
+            result = gengrid.readScripGenericGrid(
+                self, dims, whichType, whichGrid)
         else:
-            raise CDMSError, "Grid rank must be 1 or 2, found: %d"%len(dims)
+            raise CDMSError("Grid rank must be 1 or 2, found: %d" % len(dims))
 
-        if checkGrid==1:
+        if checkGrid == 1:
             nonConvexCells = result.checkConvex()
             result.fixCutCells(nonConvexCells)
 
         return result
-
diff --git a/Packages/cdms2/Lib/database.py b/Packages/cdms2/Lib/database.py
index ac8b0dc7a..682f14628 100644
--- a/Packages/cdms2/Lib/database.py
+++ b/Packages/cdms2/Lib/database.py
@@ -1,16 +1,16 @@
 """CDMS database objects"""
 
-from error import CDMSError
-import cdmsobj
-import cdurlparse
-## import internattr
+from .error import CDMSError
+from . import cdmsobj
+from . import cdurlparse
+# import internattr
 import copy
 import os
 import re
 import sys
-from CDMLParser import CDMLParser
-from cdmsobj import CdmsObj
-from dataset import Dataset
+from .CDMLParser import CDMLParser
+from .cdmsobj import CdmsObj
+from .dataset import Dataset
 
 try:
     import ldap
@@ -31,9 +31,11 @@ MethodNotImplemented = "Method not yet implemented"
 PermissionError = "No permission to access"
 SchemeNotSupported = "Scheme not supported"
 
-_Att = re.compile('([a-zA-Z_:][-a-zA-Z0-9._:]*)=(.*)',re.DOTALL)
+_Att = re.compile('([a-zA-Z_:][-a-zA-Z0-9._:]*)=(.*)', re.DOTALL)
 
 # Open a database connection
+
+
 def connect(uri=None, user="", password=""):
     """
     Method:
@@ -62,36 +64,48 @@ def connect(uri=None, user="", password=""):
         try:
             uri = os.environ['CDMSROOT']
         except KeyError:
-            raise CDMSError, ConnectError + '%s\nSet environment variable CDMSROOT to default database location'%uri
-    (scheme,netloc,path,parameters,query,fragment)=cdurlparse.urlparse(uri)
-
-    if scheme in ['','ldap']:
+            raise CDMSError(
+                ConnectError +
+                '%s\nSet environment variable CDMSROOT to default database location' %
+                uri)
+    (scheme, netloc, path, parameters, query,
+     fragment) = cdurlparse.urlparse(uri)
+
+    if scheme in ['', 'ldap']:
         try:
             ldapdb = ldap.open(netloc)
         except:
-            raise CDMSError, ConnectError +"%s\n%s"%(uri,sys.exc_value)
+            raise CDMSError(ConnectError + "%s\n%s" % (uri, sys.exc_info()[1]))
 
         try:
-            ldapdb.simple_bind_s(user,password)
+            ldapdb.simple_bind_s(user, password)
         except:
-            raise CDMSError, AuthenticationError + "%s\n%s"%(uri,sys.exc_value)
+            raise CDMSError(
+                AuthenticationError + "%s\n%s" %
+                (uri, sys.exc_info()[1]))
 
         try:
-            result = ldapdb.search_s(path[1:], ldap.SCOPE_SUBTREE, "objectclass=database")
+            result = ldapdb.search_s(
+                path[1:],
+                ldap.SCOPE_SUBTREE,
+                "objectclass=database")
         except:
-            raise CDMSError, DatabaseNotFound + "%s\n%s"%(uri,sys.exc_value)
+            raise CDMSError(
+                DatabaseNotFound + "%s\n%s" %
+                (uri, sys.exc_info()[1]))
 
         try:
             dn, attrs = result[0]
         except:
-            raise CDMSError, PermissionError + uri
-        newuri = "ldap://%s/%s"%(netloc,dn)
+            raise CDMSError(PermissionError + uri)
+        newuri = "ldap://%s/%s" % (netloc, dn)
         db = LDAPDatabase(newuri, ldapdb)
         db.setExternalDict(attrs)
         return db
 
     else:
-        raise CDMSError, SchemeNotSupported +  scheme
+        raise CDMSError(SchemeNotSupported + scheme)
+
 
 def loadString(text, uri, parent=None, datapath=None):
     """ Create a dataset from a text string. <text> is the string in CDML format.
@@ -99,59 +113,61 @@ def loadString(text, uri, parent=None, datapath=None):
         <parent> is the containing database object, if any.
         <datapath> is the location of data files relative to the parent database URL.
     """
-    p=CDMLParser()
+    p = CDMLParser()
     p.feed(text)
     p.close()
-    return Dataset(uri,'r',p.getRoot(),parent,datapath)
-    
+    return Dataset(uri, 'r', p.getRoot(), parent, datapath)
+
 
 class AbstractDatabase(CdmsObj):
-    """AbstractDatabase defines the common database interface. Concrete database classes are 
+
+    """AbstractDatabase defines the common database interface. Concrete database classes are
        derived from this class.
     """
 
     def __init__(self, uri, path):
-        CdmsObj.__init__(self,None)
+        CdmsObj.__init__(self, None)
         self.uri = uri
         self.path = path
         self._cache_ = {}
         self._cdmlcache_ = {}
-        self._datacache_ = None # datasetdn: obj # Remote file data cache
+        self._datacache_ = None  # datasetdn: obj # Remote file data cache
         self.lcBaseDN = None            # Logical Collection base distinguished name
         self.useReplica = None          # Use replica catalog if true (request manager transfers only)
         self.userid = None              # User ID for request manager transfers
 
     def close(self):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def cachecdml(self, name, cdml):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getDataset(self, name):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getObjFromDataset(self, name):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def openDataset(self, dsetid, mode='r'):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
-    def searchFilter(self, filter, classtag=None, relbase=None, scope=Subtree, attnames=[]):
-        raise CDMSError, MethodNotImplemented
+    def searchFilter(self, filter, classtag=None,
+                     relbase=None, scope=Subtree, attnames=[]):
+        raise CDMSError(MethodNotImplemented)
 
     def enableCache(self):
         if self._datacache_ is None:
-            import cache
+            from . import cache
             self._datacache_ = cache.Cache()
         return self._datacache_
 
     def disableCache(self):
-        if self._datacache_ != None:
+        if self._datacache_ is not None:
             self._datacache_.delete()
             self._datacache_ = None
 
-    def useRequestManager(self, lcBaseDN, useReplica=1, userid = "anonymous"):
-        import cache
+    def useRequestManager(self, lcBaseDN, useReplica=1, userid="anonymous"):
+        from . import cache
         self.enableCache()
         cache.useRequestManagerTransfer()
         self.lcBaseDN = lcBaseDN
@@ -159,20 +175,23 @@ class AbstractDatabase(CdmsObj):
         self.userid = userid
 
     def usingRequestManager(self):
-        import cache
-        return (cache._transferMethod==cache._requestManagerTransfer)
+        from . import cache
+        return (cache._transferMethod == cache._requestManagerTransfer)
 
     def __repr__(self):
-        return "<Database '%s'>"%(self.uri)
+        return "<Database '%s'>" % (self.uri)
 
-## internattr.add_internal_attribute(AbstractDatabase, 'uri', 'path')
+# internattr.add_internal_attribute(AbstractDatabase, 'uri', 'path')
 
 # Database implemented via LDAP (Lightweight Directory Access Protocol)
+
+
 class LDAPDatabase(AbstractDatabase):
 
     def __init__(self, uri, db):
-        (scheme,netloc,path,parameters,query,fragment)=cdurlparse.urlparse(uri)
-        AbstractDatabase.__init__(self,uri,path[1:])
+        (scheme, netloc, path, parameters, query,
+         fragment) = cdurlparse.urlparse(uri)
+        AbstractDatabase.__init__(self, uri, path[1:])
         self.netloc = netloc
         self.db = db
 
@@ -191,58 +210,64 @@ class LDAPDatabase(AbstractDatabase):
           None
 
         """
-        if self.db != None:
+        if self.db is not None:
             self.db.unbind()
         self.db = None
         self.disableCache()
-        
+
     def __del__(self):
         # if cdmsobj._debug==1:
         #    print 'Deleting object',self
         self.close()
-    
+
     def normalizedn(self, dn):
         explodeddn = ldap.explode_dn(dn)
         return ','.join(explodeddn)
 
     def cachecdml(self, name, cdml, datapath):
         normaldn = self.normalizedn(name)
-        self._cdmlcache_[normaldn] = (cdml,datapath)
+        self._cdmlcache_[normaldn] = (cdml, datapath)
 
     def getDataset(self, dn):
         normaldn = self.normalizedn(dn)
-        if self._cache_.has_key(normaldn):
+        if normaldn in self._cache_:
             dataset = self._cache_[normaldn]
-        elif self._cdmlcache_.has_key(normaldn):
-            (text,datapath) = self._cdmlcache_[normaldn]
-            uri = "ldap://%s/%s"%(self.netloc,normaldn)
-            if cdmsobj._debug==1:
-                print 'Loading %s from cached CDML'%uri
-            dataset = loadString(text,uri,self,datapath)
+        elif normaldn in self._cdmlcache_:
+            (text, datapath) = self._cdmlcache_[normaldn]
+            uri = "ldap://%s/%s" % (self.netloc, normaldn)
+            if cdmsobj._debug == 1:
+                print 'Loading %s from cached CDML' % uri
+            dataset = loadString(text, uri, self, datapath)
             self._cache_[normaldn] = dataset
         else:
-            if cdmsobj._debug==1:
-                print 'Search filter: (objectclass=dataset), scope: base, base: "%s", attributes=["cdml"]'%(dn,)
-            result = self.db.search_s(dn, ldap.SCOPE_BASE, "objectclass=dataset",["cdml","datapath"])
-            resultdn,attrs = result[0]
+            if cdmsobj._debug == 1:
+                print 'Search filter: (objectclass=dataset), scope: base, base: "%s", attributes=["cdml"]' % (dn,)
+            result = self.db.search_s(
+                dn,
+                ldap.SCOPE_BASE,
+                "objectclass=dataset",
+                ["cdml",
+                 "datapath"])
+            resultdn, attrs = result[0]
             text = attrs["cdml"][0]
-            uri = "ldap://%s/%s"%(self.netloc,normaldn)
+            uri = "ldap://%s/%s" % (self.netloc, normaldn)
             datapath = attrs.get("datapath")
-            if datapath: datapath = datapath[0]
-            dataset = loadString(text,uri,self,datapath)
+            if datapath:
+                datapath = datapath[0]
+            dataset = loadString(text, uri, self, datapath)
             self._cache_[normaldn] = dataset
         return dataset
- 
+
     def getObjFromDataset(self, dn):
 
         # Get the parent dataset
         explodeddn = ldap.explode_dn(dn)
-        dsetdn = ','.join(explodeddn[1:]) # Dataset node is parent of variable
+        dsetdn = ','.join(explodeddn[1:])  # Dataset node is parent of variable
         dset = self.getDataset(dsetdn)
         rdn = explodeddn[0]
         matchobj = _Att.match(rdn)
         if matchobj is None:
-            raise CDMSError, InvalidEntryName +  dn
+            raise CDMSError(InvalidEntryName + dn)
         tag, id = matchobj.groups()
 
         # Get the correct dictionary for this tag
@@ -273,7 +298,7 @@ class LDAPDatabase(AbstractDatabase):
 
           dset = db.openDataset('ncep_reanalysis_mo')
         """
-        dn = "dataset=%s,%s"%(dsetid,self.path)
+        dn = "dataset=%s,%s" % (dsetid, self.path)
         dset = self.getDataset(dn)
         return dset
 
@@ -285,23 +310,24 @@ class LDAPDatabase(AbstractDatabase):
     def setExternalDict(self, ldapattrs):
         for attname in ldapattrs.keys():
             attvals = ldapattrs[attname]
-            if attname=='objectclass':
+            if attname == 'objectclass':
                 continue
-            elif attname=='attr':       # Handle attr: name=value
+            elif attname == 'attr':       # Handle attr: name=value
                 for attval in attvals:
                     matchobj = _Att.match(attval)
                     if matchobj is not None:
-                        newname,newval = matchobj.groups()
+                        newname, newval = matchobj.groups()
                         self.attributes[newname] = newval
 
             # If the attribute value is a multi-valued list, keep it as a list
             # otherwise copy the single value from the list.
-            if len(attvals)==1:
+            if len(attvals) == 1:
                 self.attributes[attname] = attvals[0]
             else:
                 self.attributes[attname] = attvals
 
-    def searchFilter(self, filter=None, tag=None, relbase=None, scope=Subtree, attnames=None, timeout=None):
+    def searchFilter(self, filter=None, tag=None,
+                     relbase=None, scope=Subtree, attnames=None, timeout=None):
         """
         Method:
 
@@ -332,7 +358,7 @@ class LDAPDatabase(AbstractDatabase):
                              "<=" |     # lexicographically less than or equal to
                              ">="       # lexicographically greater than or equal to
               value      ::= string, may include '*' as a wild card
-                             
+
           tag: string class tag ("dataset" | "variable" | "database" | "axis" | "grid").
             Restricts the search to a class of objects
           relbase: string search base, relative to the database path
@@ -364,25 +390,26 @@ class LDAPDatabase(AbstractDatabase):
           result = db.searchFilter(relbase="dataset=ncep_reanalysis_mo"), scope=cdms.Onelevel)
 
         """
-        if tag is None: tag='*'
-        newfilter = "(objectclass=%s)"%tag
+        if tag is None:
+            tag = '*'
+        newfilter = "(objectclass=%s)" % tag
         if filter is not None:
-            if filter[0]!='(':
-                filter = "(%s)"%filter
-            newfilter = "(&%s%s)"%(newfilter,filter)
+            if filter[0] != '(':
+                filter = "(%s)" % filter
+            newfilter = "(&%s%s)" % (newfilter, filter)
 
         if relbase is None:
             base = self.path
         else:
-            base = "%s,%s"%(relbase,self.path)
+            base = "%s,%s" % (relbase, self.path)
 
         if attnames is None:
             atts = None
         else:
-            atts = ["objectclass","cdml","id"]+attnames
-            
-        if cdmsobj._debug==1:
-            print 'Search filter:%s, scope %s, base: "%s", attributes=%s'%(newfilter,`scope`,base,`atts`)
+            atts = ["objectclass", "cdml", "id"] + attnames
+
+        if cdmsobj._debug == 1:
+            print 'Search filter:%s, scope %s, base: "%s", attributes=%s' % (newfilter, repr(scope), base, repr(atts))
         if timeout is None:
             result = self.db.search_s(base, scope, newfilter, atts)
         else:
@@ -392,11 +419,12 @@ class LDAPDatabase(AbstractDatabase):
 
     def listDatasets(self):
         """ Return a list of the dataset IDs in this database."""
-        entries = self.searchFilter(tag='dataset', scope=Onelevel )
+        entries = self.searchFilter(tag='dataset', scope=Onelevel)
         result = map(lambda x: x.attributes['id'][0], entries)
         return result
 
-## internattr.add_internal_attribute(LDAPDatabase, 'netloc', 'db')
+# internattr.add_internal_attribute(LDAPDatabase, 'netloc', 'db')
+
 
 class AbstractSearchResult:
 
@@ -409,6 +437,7 @@ class AbstractSearchResult:
     def searchPredicate(self, predicate, tag=None):
         MethodNotImplemented = "Method not yet implemented"
 
+
 class LDAPSearchResult(AbstractSearchResult):
 
     def __init__(self, db, LDAPresult):
@@ -417,15 +446,15 @@ class LDAPSearchResult(AbstractSearchResult):
 
         # Scan the result for CDML attributes, cache them in the database
         for dn, attrs in self.result:
-            if attrs.has_key('cdml') and attrs.has_key('datapath'):
+            if 'cdml' in attrs and 'datapath' in attrs:
                 cdml = attrs['cdml'][0]
                 datapath = attrs['datapath'][0]
-                self.db.cachecdml(dn,cdml,datapath)
+                self.db.cachecdml(dn, cdml, datapath)
                 del attrs['cdml']
 
     def __getitem__(self, key):
-        if key>=len(self):
-            raise IndexError, 'index out of bounds'
+        if key >= len(self):
+            raise IndexError('index out of bounds')
 
         dn, attributes = self.result[key]
 
@@ -471,10 +500,10 @@ class LDAPSearchResult(AbstractSearchResult):
         resultlist = []
         for entry in self:
             obj = entry.getObject()
-            if tag is None or tag==entry.tag:
+            if tag is None or tag == entry.tag:
                 try:
-                    if apply(predicate,(obj,))==1:
-                        resultlist.append((entry.name,entry.attributes))
+                    if predicate(*(obj,)) == 1:
+                        resultlist.append((entry.name, entry.attributes))
                 except:
                     pass
 
@@ -483,6 +512,7 @@ class LDAPSearchResult(AbstractSearchResult):
     def __len__(self):
         return len(self.result)
 
+
 class AbstractResultEntry:
 
     def __init__(self, db):
@@ -504,15 +534,16 @@ class AbstractResultEntry:
 
         """
 
-        if self.tag=="database":
+        if self.tag == "database":
             obj = self.db
-        elif self.tag=="dataset":
+        elif self.tag == "dataset":
             obj = self.db.getDataset(self.name)
         else:
             obj = self.db.getObjFromDataset(self.name)
 
         return obj
 
+
 class LDAPResultEntry(AbstractResultEntry):
 
     def __init__(self, db, dn, attributes):
@@ -525,9 +556,6 @@ class LDAPResultEntry(AbstractResultEntry):
         rdn = explodeddn[0]
         matchobj = _Att.match(rdn)
         if matchobj is None:
-            raise IndexError, InvalidEntryName + dn
+            raise IndexError(InvalidEntryName + dn)
 
         self.tag = matchobj.group(1)
-
-
-
diff --git a/Packages/cdms2/Lib/dataset.py b/Packages/cdms2/Lib/dataset.py
index 6074615de..f9f9562ec 100644
--- a/Packages/cdms2/Lib/dataset.py
+++ b/Packages/cdms2/Lib/dataset.py
@@ -1,37 +1,38 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by foo
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by foo
+# Further modified to be pure new numpy June 24th 2008
 
 """ CDMS dataset and file objects"""
-from error import CDMSError
+from .error import CDMSError
 import Cdunif
 import numpy
-import cdmsNode
-import os, sys
+from . import cdmsNode
+import os
+import sys
 import urllib
-import cdmsURLopener                    # Import after urllib, to handle errors
+from . import cdmsURLopener                    # Import after urllib, to handle errors
 import urlparse
-## import internattr
-import cdmsobj
+# import internattr
+from . import cdmsobj
 import re
-from CDMLParser import CDMLParser
-from cdmsobj import CdmsObj
-from axis import Axis, FileAxis, FileVirtualAxis, isOverlapVector
-from coord import FileAxis2D, DatasetAxis2D
-from auxcoord import FileAuxAxis1D, DatasetAuxAxis1D
-from grid import RectGrid, FileRectGrid
-from hgrid import FileCurveGrid, DatasetCurveGrid
-from gengrid import FileGenericGrid, DatasetGenericGrid
-from variable import DatasetVariable
-from fvariable import FileVariable
-from tvariable import asVariable
-from cdmsNode import CdDatatypes
-import convention
-import typeconv
+from .CDMLParser import CDMLParser
+from .cdmsobj import CdmsObj
+from .axis import Axis, FileAxis, FileVirtualAxis, isOverlapVector
+from .coord import FileAxis2D, DatasetAxis2D
+from .auxcoord import FileAuxAxis1D, DatasetAuxAxis1D
+from .grid import RectGrid, FileRectGrid
+from .hgrid import FileCurveGrid, DatasetCurveGrid
+from .gengrid import FileGenericGrid, DatasetGenericGrid
+from .variable import DatasetVariable
+from .fvariable import FileVariable
+from .tvariable import asVariable
+from .cdmsNode import CdDatatypes
+from . import convention
+from . import typeconv
 
 # Default is serial mode until setNetcdfUseParallelFlag(1) is called
 rk = 0
 sz = 1
-Cdunif.CdunifSetNCFLAGS("use_parallel",0)
+Cdunif.CdunifSetNCFLAGS("use_parallel", 0)
 CdMpi = False
 
 try:
@@ -42,17 +43,19 @@ except:
     rk = 0
 
 try:
-    import gsHost
+    from . import gsHost
     from pycf import libCFConfig as libcf
 except:
     libcf = None
 
 try:
-    import cache
+    from . import cache
 except ImportError:
     pass
 
 DuplicateAxis = "Axis already defined: "
+
+
 class DuplicateAxisError(CDMSError):
     pass
 DuplicateGrid = "Grid already defined: "
@@ -75,12 +78,37 @@ _IndexPat = r'(\d+|-)'
 _FilePath = r"([^\s\]\',]+)"
 # Two file map patterns, _IndexList4 is the original one, _IndexList5 supports
 # forecast data too...
-_IndexList4 = re.compile(_ListStartPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_FilePath+_ListEndPat)
-_IndexList5 = re.compile(_ListStartPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_FilePath+_ListEndPat)
+_IndexList4 = re.compile(
+    _ListStartPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _FilePath +
+    _ListEndPat)
+_IndexList5 = re.compile(
+    _ListStartPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _IndexPat +
+    _ListSepPat +
+    _FilePath +
+    _ListEndPat)
 
 _NPRINT = 20
 _showCompressWarnings = True
 
+
 def setCompressionWarnings(value=None):
     """Turn on/off the warnings for compression
     Usage:
@@ -93,49 +121,57 @@ def setCompressionWarnings(value=None):
     global _showCompressWarnings
     if value is None:
         value = not _showCompressWarnings
-    if isinstance(value,str):
-        if not value.slower() in ['y','n','yes','no']:
-            raise CDMSError("setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
-        if value.lower()[0]=='y':
+    if isinstance(value, str):
+        if not value.slower() in ['y', 'n', 'yes', 'no']:
+            raise CDMSError(
+                "setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
+        if value.lower()[0] == 'y':
             value = 1
         else:
             value = 0
-    if not isinstance(value, (int,bool)):
-        raise CDMSError("setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
+    if not isinstance(value, (int, bool)):
+        raise CDMSError(
+            "setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
 
-    if value in [1,True]:
+    if value in [1, True]:
         _showCompressWarnings = True
-    elif value in [0,False]:
+    elif value in [0, False]:
         _showCompressWarnings = False
     else:
-        raise CDMSError("setCompressionWarnings flags must be yes\/no or 1\/0, or None to invert it")
+        raise CDMSError(
+            "setCompressionWarnings flags must be yes\/no or 1\/0, or None to invert it")
 
     return _showCompressWarnings
 
+
 def setNetcdfUseNCSwitchModeFlag(value):
     """ Tells cdms2 to switch constantly between netcdf define/write modes"""
 
-    if value not in [True,False,0,1]:
-        raise CDMSError("Error UseNCSwitchMode flag must be 1(can use)/0(do not use) or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("use_define_mode",0)
+    if value not in [True, False, 0, 1]:
+        raise CDMSError(
+            "Error UseNCSwitchMode flag must be 1(can use)/0(do not use) or true/False")
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("use_define_mode", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("use_define_mode",1)
+        Cdunif.CdunifSetNCFLAGS("use_define_mode", 1)
+
 
 def setNetcdfUseParallelFlag(value):
     """ Sets NetCDF classic flag value"""
     global CdMpi
-    if value not in [True,False,0,1]:
-        raise CDMSError("Error UseParallel flag must be 1(can use)/0(do not use) or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("use_parallel",0)
+    if value not in [True, False, 0, 1]:
+        raise CDMSError(
+            "Error UseParallel flag must be 1(can use)/0(do not use) or true/False")
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("use_parallel", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("use_parallel",1)
+        Cdunif.CdunifSetNCFLAGS("use_parallel", 1)
         CdMpi = True
         if not MPI.Is_initialized():
             MPI.Init()
         rk = MPI.COMM_WORLD.Get_rank()
 
+
 def getMpiRank():
     ''' Return number of processor available '''
     if CdMpi:
@@ -144,6 +180,7 @@ def getMpiRank():
     else:
         return 0
 
+
 def getMpiSize():
     if CdMpi:
         sz = MPI.COMM_WORLD.Get_size()
@@ -151,75 +188,90 @@ def getMpiSize():
     else:
         return 1
 
+
 def setNetcdf4Flag(value):
     """ Sets NetCDF classic flag value"""
-    if value not in [True,False,0,1]:
+    if value not in [True, False, 0, 1]:
         raise CDMSError("Error NetCDF4 flag must be 1/0 or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("netcdf4",0)
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("netcdf4", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("netcdf4",1)
+        Cdunif.CdunifSetNCFLAGS("netcdf4", 1)
 
-def setNetcdfClassicFlag(value):        
+
+def setNetcdfClassicFlag(value):
     """ Sets NetCDF classic flag value"""
-    if value not in [True,False,0,1]:
+    if value not in [True, False, 0, 1]:
         raise CDMSError("Error NetCDF Classic flag must be 1/0 or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("classic",0)
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("classic", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("classic",1)
+        Cdunif.CdunifSetNCFLAGS("classic", 1)
+
 
-def setNetcdfShuffleFlag(value):        
+def setNetcdfShuffleFlag(value):
     """ Sets NetCDF shuffle flag value"""
-    if value not in [True,False,0,1]:
+    if value not in [True, False, 0, 1]:
         raise CDMSError("Error NetCDF Shuffle flag must be 1/0 or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("shuffle",0)
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("shuffle", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("shuffle",1)
+        Cdunif.CdunifSetNCFLAGS("shuffle", 1)
+
 
 def setNetcdfDeflateFlag(value):
     """ Sets NetCDF deflate flag value"""
-    if value not in [True,False,0,1]:
+    if value not in [True, False, 0, 1]:
         raise CDMSError("Error NetCDF deflate flag must be 1/0 or true/False")
-    if value in [0,False]:
-        Cdunif.CdunifSetNCFLAGS("deflate",0)
+    if value in [0, False]:
+        Cdunif.CdunifSetNCFLAGS("deflate", 0)
     else:
-        Cdunif.CdunifSetNCFLAGS("deflate",1)
-        
+        Cdunif.CdunifSetNCFLAGS("deflate", 1)
+
+
 def setNetcdfDeflateLevelFlag(value):
     """ Sets NetCDF deflate level flag value"""
-    if value not in [0,1,2,3,4,5,6,7,8,9]:
-        raise CDMSError("Error NetCDF deflate_level flag must be an integer < 10")
-    Cdunif.CdunifSetNCFLAGS("deflate_level",value)
+    if value not in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]:
+        raise CDMSError(
+            "Error NetCDF deflate_level flag must be an integer < 10")
+    Cdunif.CdunifSetNCFLAGS("deflate_level", value)
+
 
 def getNetcdfUseNCSwitchModeFlag():
     """ Returns NetCDF UseParallel flag value"""
     return Cdunif.CdunifGetNCFLAGS("use_define_mode")
 
+
 def getNetcdfUseParallelFlag():
     """ Returns NetCDF UseParallel flag value"""
     return Cdunif.CdunifGetNCFLAGS("use_parallel")
 
+
 def getNetcdf4Flag():
     """ Returns NetCDF4 flag value"""
     return Cdunif.CdunifGetNCFLAGS("netcdf4")
 
+
 def getNetcdfClassicFlag():
     """ Returns NetCDF classic flag value"""
     return Cdunif.CdunifGetNCFLAGS("classic")
 
+
 def getNetcdfShuffleFlag():
     """ Returns NetCDF shuffle flag value"""
     return Cdunif.CdunifGetNCFLAGS("shuffle")
 
+
 def getNetcdfDeflateFlag():
     """ Returns NetCDF deflate flag value"""
     return Cdunif.CdunifGetNCFLAGS("deflate")
 
+
 def getNetcdfDeflateLevelFlag():
     """ Returns NetCDF deflate level flag value"""
     return Cdunif.CdunifGetNCFLAGS("deflate_level")
+
+
 def useNetcdf3():
     """ Turns off (0) NetCDF flags for shuffle/defalte/defaltelevel
     Output files are generated as NetCDF3 Classic after that
@@ -230,11 +282,13 @@ def useNetcdf3():
 
 # Create a tree from a file path.
 # Returns the parse tree root node.
+
+
 def load(path):
     fd = open(path)
     text = fd.read()
     fd.close()
-    p=CDMLParser()
+    p = CDMLParser()
     p.feed(text)
     p.close()
     return p.getRoot()
@@ -243,13 +297,16 @@ def load(path):
 # URI is of the form scheme://netloc/path;parameters?query#fragment
 # where fragment may be an XPointer.
 # Returns the parse tree root node.
+
+
 def loadURI(uri):
-    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(uri)
-    uripath = urlparse.urlunparse((scheme,netloc,path,'','',''))
+    (scheme, netloc, path, parameters,
+     query, fragment) = urlparse.urlparse(uri)
+    uripath = urlparse.urlunparse((scheme, netloc, path, '', '', ''))
     fd = urllib.urlopen(uripath)
     text = fd.read()
     fd.close()
-    p=CDMLParser()
+    p = CDMLParser()
     p.feed(text)
     p.close()
     return p.getRoot()
@@ -257,14 +314,19 @@ def loadURI(uri):
 # Create a dataset
 # 'path' is the XML file name, or netCDF filename for simple file create
 # 'template' is a string template for the datafile(s), for dataset creation
-def createDataset(path,template=None):
-    return openDataset(path,'w',template)
+
+
+def createDataset(path, template=None):
+    return openDataset(path, 'w', template)
 
 # Open an existing dataset
 # 'uri' is a Uniform Resource Identifier, referring to a cdunif file, XML file,
 #   or LDAP URL of a catalog dataset entry.
 # 'mode' is 'r', 'r+', 'a', or 'w'
-def openDataset(uri,mode='r',template=None,dods=1,dpath=None, hostObj=None):
+
+
+def openDataset(uri, mode='r', template=None,
+                dods=1, dpath=None, hostObj=None):
     """
     Options:::
 mode :: (str) ('r') mode to open the file in read/write/append
@@ -280,41 +342,45 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
 :::
     """
     uri = uri.strip()
-    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(uri)
-    if scheme in ('','file'):
+    (scheme, netloc, path, parameters,
+     query, fragment) = urlparse.urlparse(uri)
+    if scheme in ('', 'file'):
         if netloc:
             # In case of relative path...
             path = netloc + path
         path = os.path.expanduser(path)
         path = os.path.normpath(os.path.join(os.getcwd(), path))
 
-        root,ext = os.path.splitext(path)
-        if ext in ['.xml','.cdml']:
-            if mode!='r': raise ModeNotSupported(mode)
+        root, ext = os.path.splitext(path)
+        if ext in ['.xml', '.cdml']:
+            if mode != 'r':
+                raise ModeNotSupported(mode)
             datanode = load(path)
         else:
             # If the doesn't exist allow it to be created
-            ##Ok mpi has issues with bellow we need to test this only with 1 rank
+            # Ok mpi has issues with bellow we need to test this only with 1
+            # rank
             if not os.path.exists(path):
-                return CdmsFile(path,mode,mpiBarrier=CdMpi)
-            elif mode=="w":
+                return CdmsFile(path, mode, mpiBarrier=CdMpi)
+            elif mode == "w":
                 try:
                     os.remove(path)
                 except:
                     pass
-                return CdmsFile(path,mode,mpiBarrier=CdMpi)
-            
+                return CdmsFile(path, mode, mpiBarrier=CdMpi)
+
             # The file exists
-            file1 = CdmsFile(path,"r")
+            file1 = CdmsFile(path, "r")
             if libcf is not None:
                 if hasattr(file1, libcf.CF_FILETYPE):
                     if getattr(file1, libcf.CF_FILETYPE) == libcf.CF_GLATT_FILETYPE_HOST:
                         file = gsHost.open(path, mode)
-                    elif mode=='r' and hostObj is None:
-                        # helps performance on machines where file open (in CdmsFile) is costly
+                    elif mode == 'r' and hostObj is None:
+                        # helps performance on machines where file open (in
+                        # CdmsFile) is costly
                         file = file1
                     else:
-                        file = CdmsFile(path, mode, hostObj = hostObj)
+                        file = CdmsFile(path, mode, hostObj=hostObj)
                     file1.close()
                 else:
                     file1.close()
@@ -324,17 +390,18 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
                 file1.close()
                 return CdmsFile(path, mode)
     elif scheme in ['http', 'gridftp']:
-        
+
         if (dods):
-            if mode!='r': raise ModeNotSupported(mode)
+            if mode != 'r':
+                raise ModeNotSupported(mode)
             # DODS file?
             try:
-                file = CdmsFile(uri,mode)
+                file = CdmsFile(uri, mode)
                 return file
-            except Exception,err:
-                msg = "Error in DODS open of: "+uri
-                if os.path.exists(os.path.join(os.path.expanduser("~"),".dodsrc")):
-                  msg+="\nYou have a .dodsrc in your HOME directory, try to remove it"
+            except Exception as err:
+                msg = "Error in DODS open of: " + uri
+                if os.path.exists(os.path.join(os.path.expanduser("~"), ".dodsrc")):
+                    msg += "\nYou have a .dodsrc in your HOME directory, try to remove it"
                 raise CDMSError(msg)
         else:
             try:
@@ -342,8 +409,8 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
                 return datanode
             except:
                 datanode = loadURI(uri)
-                raise CDMSError("Error in loadURI of: "+uri)
-            
+                raise CDMSError("Error in loadURI of: " + uri)
+
     else:
         raise SchemeNotSupported(scheme)
 
@@ -357,13 +424,13 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
     # Note: In general, dset.datapath is relative to the URL of the
     #   enclosing database, but here the database is null, so the
     #   datapath should be absolute.
-    if dpath==None:
+    if dpath is None:
         direc = datanode.getExternalAttr('directory')
         head = os.path.dirname(path)
         if direc and os.path.isabs(direc):
             dpath = direc
         elif direc:
-            dpath = os.path.join(head,direc)
+            dpath = os.path.join(head, direc)
         else:
             dpath = head
 
@@ -371,6 +438,8 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
     return dataset
 
 # Functions for parsing the file map.
+
+
 def parselist(text, f):
     """Parse a string of the form [A, A, ...].
     f is a function which parses A and returns (A, nconsumed)
@@ -379,13 +448,13 @@ def parselist(text, f):
     n = 0
     m = _ListStart.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
     result = []
     n += m.end()
     s, nconsume = f(text[n:])
     result.append(s)
     n += nconsume
-    while 1:
+    while True:
         m = _ListSep.match(text[n:])
         if m is None:
             break
@@ -396,10 +465,11 @@ def parselist(text, f):
         n += nconsume
     m = _ListEnd.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
     n += m.end()
     return result, n
 
+
 def parseIndexList(text):
     """Parse a string of the form [i,j,k,l,...,path] where
     i,j,k,l,... are indices or '-', and path is a filename.
@@ -411,45 +481,48 @@ def parseIndexList(text):
         m = _IndexList5.match(text)
         nindices = 5
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
-    result = [None]*(nindices+1)
+        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
+    result = [None] * (nindices + 1)
     for i in range(nindices):
-        s = m.group(i+1)
-        if s!='-':
+        s = m.group(i + 1)
+        if s != '-':
             result[i] = int(s)
-    result[nindices] = m.group(nindices+1)
+    result[nindices] = m.group(nindices + 1)
     return result, m.end()
 
+
 def parseName(text):
     m = _Name.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
     return m.group(), m.end()
 
+
 def parseVarMap(text):
     """Parse a string of the form [ namelist, slicelist ]"""
     n = 0
     m = _ListStart.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
     result = []
     n += m.end()
-    s, nconsume = parselist(text[n:],parseName)
+    s, nconsume = parselist(text[n:], parseName)
     result.append(s)
     n += nconsume
     m = _ListSep.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
     n += m.end()
     s, nconsume = parselist(text[n:], parseIndexList)
     result.append(s)
     n += nconsume
     m = _ListEnd.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
     n += m.end()
     return result, n
 
+
 def parseFileMap(text):
     """Parse a CDMS filemap. having the form:
     filemap :== [ varmap, varmap, ...]
@@ -459,38 +532,43 @@ def parseFileMap(text):
     indexlist :== [i,j,k,l,path]
     """
     result, n = parselist(text, parseVarMap)
-    if n<len(text):
-        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
+    if n < len(text):
+        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
     return result
 
 
 # A CDMS dataset consists of a CDML/XML file and one or more data files
-from cudsinterface import cuDataset
+from .cudsinterface import cuDataset
+
+
 class Dataset(CdmsObj, cuDataset):
-    def __init__(self, uri, mode, datasetNode=None, parent=None, datapath=None):
-        if datasetNode is not None and datasetNode.tag !='dataset':
+
+    def __init__(self, uri, mode,
+                 datasetNode=None, parent=None, datapath=None):
+        if datasetNode is not None and datasetNode.tag != 'dataset':
             raise CDMSError('Node is not a dataset node')
-        CdmsObj.__init__(self,datasetNode)
-        for v in [ 'datapath',
-                   'variables',
-                   'axes',
-                   'grids',
-                   'xlinks',
-                   'dictdict',
-                   'default_variable_name',
-                   'parent',
-                   'uri',
-                   'mode']:
+        CdmsObj.__init__(self, datasetNode)
+        for v in ['datapath',
+                  'variables',
+                  'axes',
+                  'grids',
+                  'xlinks',
+                  'dictdict',
+                  'default_variable_name',
+                  'parent',
+                  'uri',
+                  'mode']:
             if not v in self.__cdms_internals__:
-                val = self.__cdms_internals__+[v,]
+                val = self.__cdms_internals__ + [v, ]
                 self.___cdms_internals__ = val
-                
+
         cuDataset.__init__(self)
         self.parent = parent
         self.uri = uri
         self.mode = mode
             # Path of data files relative to parent db.
-            # Note: .directory is the location of data relative to the location of the XML file
+            # Note: .directory is the location of data relative to the location
+            # of the XML file
         self.datapath = datapath
         self.variables = {}
         self.axes = {}
@@ -498,19 +576,20 @@ class Dataset(CdmsObj, cuDataset):
         self.xlinks = {}
         self._gridmap_ = {}
             # Gridmap:(latname,lonname,order,maskname,gridclass) => grid
-        (scheme,netloc,xmlpath,parameters,query,fragment)=urlparse.urlparse(uri)
+        (scheme, netloc, xmlpath, parameters,
+         query, fragment) = urlparse.urlparse(uri)
         self._xmlpath_ = xmlpath
         # Dictionary of dictionaries, keyed on node tags
-        self.dictdict = {'variable':self.variables, 
-                         'axis':self.axes, 
-                         'rectGrid':self.grids, 
-                         'curveGrid':self.grids,
-                         'genericGrid':self.grids,
-                         'xlink':self.xlinks
-                        }
+        self.dictdict = {'variable': self.variables,
+                         'axis': self.axes,
+                         'rectGrid': self.grids,
+                         'curveGrid': self.grids,
+                         'genericGrid': self.grids,
+                         'xlink': self.xlinks
+                         }
         # Dataset IDs are external, so may not have been defined yet.
-        if not hasattr(self,'id'):
-            self.id='<None>'
+        if not hasattr(self, 'id'):
+            self.id = '<None>'
         self._status_ = 'open'
         self._convention_ = convention.getDatasetConvention(self)
 
@@ -519,7 +598,7 @@ class Dataset(CdmsObj, cuDataset):
             coordsaux = self._convention_.getDsetnodeAuxAxisIds(datasetNode)
 
             for node in datasetNode.getIdDict().values():
-                if node.tag=='variable':
+                if node.tag == 'variable':
                     if node.id in coordsaux:
                         if node.getDomain().getChildCount() == 1:
                             obj = DatasetAuxAxis1D(self, node.id, node)
@@ -527,22 +606,22 @@ class Dataset(CdmsObj, cuDataset):
                             obj = DatasetAxis2D(self, node.id, node)
                     else:
                         obj = DatasetVariable(self, node.id, node)
-                    self.variables[node.id]=obj
-                elif node.tag=='axis':
-                    obj = Axis(self,node)
-                    self.axes[node.id]=obj
-                elif node.tag=='rectGrid':
-                    obj = RectGrid(self,node)
-                    self.grids[node.id]=obj
-                elif node.tag=='xlink':
+                    self.variables[node.id] = obj
+                elif node.tag == 'axis':
+                    obj = Axis(self, node)
+                    self.axes[node.id] = obj
+                elif node.tag == 'rectGrid':
+                    obj = RectGrid(self, node)
+                    self.grids[node.id] = obj
+                elif node.tag == 'xlink':
                     obj = Xlink(node)
-                    self.xlinks[node.id]=obj
+                    self.xlinks[node.id] = obj
                 else:
                     dict = self.dictdict.get(node.tag)
                     if dict is not None:
-                        dict[node.id]=node
+                        dict[node.id] = node
                     else:
-                        self.dictdict[node.tag] = {node.id:node}
+                        self.dictdict[node.tag] = {node.id: node}
 
             # Initialize grid domains
             for grid in self.grids.values():
@@ -554,7 +633,11 @@ class Dataset(CdmsObj, cuDataset):
                     maskname = ""
                 else:
                     maskname = mask.id
-                self._gridmap_[(latname, lonname, grid.getOrder(), maskname)] = grid
+                self._gridmap_[(
+                               latname,
+                               lonname,
+                               grid.getOrder(),
+                               maskname)] = grid
 
             # Initialize variable domains.
             for var in self.variables.values():
@@ -564,40 +647,43 @@ class Dataset(CdmsObj, cuDataset):
 
                 # Get grid information for the variable. gridkey has the form
                 # (latname,lonname,order,maskname,abstract_class).
-                gridkey, lat, lon = var.generateGridkey(self._convention_, self.variables)
+                gridkey, lat, lon = var.generateGridkey(
+                    self._convention_, self.variables)
 
                 # If the variable is gridded, lookup the grid. If no such grid exists,
-                # create a unique gridname, create the grid, and add to the gridmap.
+                # create a unique gridname, create the grid, and add to the
+                # gridmap.
                 if gridkey is None:
                     grid = None
                 else:
                     grid = self._gridmap_.get(gridkey)
                     if grid is None:
-                        if hasattr(var,'grid_type'):
+                        if hasattr(var, 'grid_type'):
                             gridtype = var.grid_type
                         else:
                             gridtype = "generic"
 
                         candidateBasename = None
                         if gridkey[4] == 'rectGrid':
-                            gridshape = (len(lat),len(lon))
+                            gridshape = (len(lat), len(lon))
                         elif gridkey[4] == 'curveGrid':
                             gridshape = lat.shape
                         elif gridkey[4] == 'genericGrid':
                             gridshape = lat.shape
-                            candidateBasename = 'grid_%d'%gridshape
+                            candidateBasename = 'grid_%d' % gridshape
                         else:
-                            gridshape = (len(lat),len(lon))
+                            gridshape = (len(lat), len(lon))
 
                         if candidateBasename is None:
-                            candidateBasename = 'grid_%dx%d'%gridshape
-                        if not self.grids.has_key(candidateBasename):
+                            candidateBasename = 'grid_%dx%d' % gridshape
+                        if candidateBasename not in self.grids:
                             gridname = candidateBasename
                         else:
                             foundname = 0
-                            for i in range(97,123): # Lower-case letters
-                                candidateName = candidateBasename+'_'+chr(i)
-                                if not self.grids.has_key(candidateName):
+                            for i in range(97, 123):  # Lower-case letters
+                                candidateName = candidateBasename + \
+                                    '_' + chr(i)
+                                if candidateName not in self.grids:
                                     gridname = candidateName
                                     foundname = 1
                                     break
@@ -605,11 +691,16 @@ class Dataset(CdmsObj, cuDataset):
                             if not foundname:
                                 print 'Warning: cannot generate a grid for variable', var.id
                                 continue
-                            
+
                         # Create the grid
                         if gridkey[4] == 'rectGrid':
-                            node = cdmsNode.RectGridNode(gridname, lat.id, lon.id, gridtype, gridkey[2])
-                            grid = RectGrid(self,node)
+                            node = cdmsNode.RectGridNode(
+                                gridname,
+                                lat.id,
+                                lon.id,
+                                gridtype,
+                                gridkey[2])
+                            grid = RectGrid(self, node)
                             grid.initDomain(self.axes, self.variables)
                         elif gridkey[4] == 'curveGrid':
                             grid = DatasetCurveGrid(lat, lon, gridname, self)
@@ -650,34 +741,39 @@ class Dataset(CdmsObj, cuDataset):
                     # but now there _may_ be an additional item before path...
                     for varm1 in varmap:
                         tstart, tend, levstart, levend = varm1[0:4]
-                        if (len(varm1)>=6):
+                        if (len(varm1) >= 6):
                             forecast = varm1[4]
                         else:
                             forecast = None
                         path = varm1[-1]
-                        self._filemap_[(varname, tstart, levstart, forecast)] = path
+                        self._filemap_[(
+                                       varname,
+                                       tstart,
+                                       levstart,
+                                       forecast)] = path
                         if tstart is not None:
-                            timemap[(tstart, tend)] = 1 # Collect unique (tstart, tend) tuples
+                            timemap[
+                                (tstart, tend)] = 1  # Collect unique (tstart, tend) tuples
                         if levstart is not None:
                             levmap[(levstart, levend)] = 1
                         if forecast is not None:
-                            fcmap[(forecast,forecast)] = 1
+                            fcmap[(forecast, forecast)] = 1
                     tkeys = timemap.keys()
-                    if len(tkeys)>0:
+                    if len(tkeys) > 0:
                         tkeys.sort()
                         tpart = map(lambda x: list(x), tkeys)
                     else:
                         tpart = None
                     levkeys = levmap.keys()
-                    if len(levkeys)>0:
+                    if len(levkeys) > 0:
                         levkeys.sort()
                         levpart = map(lambda x: list(x), levkeys)
                     else:
                         levpart = None
                     fckeys = fcmap.keys()
-                    if len(fckeys)>0:
+                    if len(fckeys) > 0:
                         fckeys.sort()
-                    if self.variables.has_key(varname):
+                    if varname in self.variables:
                         self.variables[varname]._varpart_ = [tpart, levpart]
 
     def getConvention(self):
@@ -685,7 +781,7 @@ class Dataset(CdmsObj, cuDataset):
         return self._convention_
 
     # Get a dictionary of objects with the given tag
-    def getDictionary(self,tag):
+    def getDictionary(self, tag):
         return self.dictdict[tag]
 
     # Synchronize writes with data/metadata files
@@ -706,24 +802,24 @@ class Dataset(CdmsObj, cuDataset):
         self.parent = None
         self._status_ = 'closed'
 
-##  Note: Removed to allow garbage collection of reference cycles
-##     def __del__(self):
-##         if cdmsobj._debug==1:
-##             print 'Deleting dataset',self.id
-##         self.close()
-    
+# Note: Removed to allow garbage collection of reference cycles
+# def __del__(self):
+# if cdmsobj._debug==1:
+# print 'Deleting dataset',self.id
+# self.close()
+
     # Create an axis
     # 'name' is the string name of the Axis
     # 'ar' is the 1-D data array, or None for an unlimited axis
     # Return an axis object.
-    def createAxis(self,name,ar):
+    def createAxis(self, name, ar):
         pass
 
     # Create an implicit rectilinear grid. lat, lon, and mask are objects.
     # order and type are strings
-    def createRectGrid(self,id, lat, lon, order, type="generic", mask=None):
+    def createRectGrid(self, id, lat, lon, order, type="generic", mask=None):
         node = cdmsNode.RectGridNode(id, lat.id, lon.id, type, order, mask.id)
-        grid = RectGrid(self,node)
+        grid = RectGrid(self, node)
         grid.initDomain(self.axes, self.variables)
         self.grids[grid.id] = grid
 #        self._gridmap_[gridkey] = grid
@@ -733,31 +829,31 @@ class Dataset(CdmsObj, cuDataset):
     # 'datatype' is a CDMS datatype
     # 'axisnames' is a list of axes or grids
     # Return a variable object.
-    def createVariable(self,name,datatype,axisnames):
+    def createVariable(self, name, datatype, axisnames):
         pass
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
     # search all string attributes. If tag is 'dataset', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def searchPattern(self,pattern,attribute,tag):
+    def searchPattern(self, pattern, attribute, tag):
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('dataset',None):
-            if self.searchone(pattern,attribute)==1:
+        if tag in ('dataset', None):
+            if self.searchone(pattern, attribute) == 1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.searchone(pattern,attribute):
+                    if obj.searchone(pattern, attribute):
                         resultlist.append(obj)
-        elif tag!='dataset':
+        elif tag != 'dataset':
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.searchone(pattern,attribute):
+                if obj.searchone(pattern, attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -765,24 +861,24 @@ class Dataset(CdmsObj, cuDataset):
     # search all string attributes. If tag is 'dataset', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def matchPattern(self,pattern,attribute,tag):
+    def matchPattern(self, pattern, attribute, tag):
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('dataset',None):
-            if self.matchone(pattern,attribute)==1:
+        if tag in ('dataset', None):
+            if self.matchone(pattern, attribute) == 1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.matchone(pattern,attribute):
+                    if obj.matchone(pattern, attribute):
                         resultlist.append(obj)
-        elif tag!='dataset':
+        elif tag != 'dataset':
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.matchone(pattern,attribute):
+                if obj.matchone(pattern, attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -793,13 +889,13 @@ class Dataset(CdmsObj, cuDataset):
     # If 'variable', 'axis', etc., it is applied only to that type of object
     # in the dataset. If None, it is applied to all objects, including
     # the dataset itself.
-    def searchPredicate(self,predicate,tag):
+    def searchPredicate(self, predicate, tag):
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('dataset',None):
+        if tag in ('dataset', None):
             try:
-                if apply(predicate,(self,))==1:
+                if predicate(*(self,)) == 1:
                     resultlist.append(self)
             except AttributeError:
                 pass
@@ -807,15 +903,15 @@ class Dataset(CdmsObj, cuDataset):
             for dict in self.dictdict.values():
                 for obj in dict.values():
                     try:
-                        if apply(predicate,(obj,))==1:
+                        if predicate(*(obj,)) == 1:
                             resultlist.append(obj)
                     except AttributeError:
                         pass
-        elif tag!="dataset":
+        elif tag != "dataset":
             dict = self.dictdict[tag]
             for obj in dict.values():
                 try:
-                    if apply(predicate,(obj,))==1:
+                    if predicate(*(obj,)) == 1:
                         resultlist.append(obj)
                 except:
                     pass
@@ -827,8 +923,7 @@ class Dataset(CdmsObj, cuDataset):
         for var in self.variables.values():
             for path, stuple in var.getPaths():
                 pathdict[path] = 1
-        result = pathdict.keys()
-        result.sort()
+        result = sorted(pathdict.keys())
         return result
 
     # Open a data file associated with this dataset.
@@ -838,33 +933,36 @@ class Dataset(CdmsObj, cuDataset):
 
         # Opened via a local XML file?
         if self.parent is None:
-            path = os.path.join(self.datapath,filename)
-            if cdmsobj._debug==1:
-                sys.stdout.write(path+'\n'); sys.stdout.flush()
-            f = Cdunif.CdunifFile(path,mode)
+            path = os.path.join(self.datapath, filename)
+            if cdmsobj._debug == 1:
+                sys.stdout.write(path + '\n')
+                sys.stdout.flush()
+            f = Cdunif.CdunifFile(path, mode)
             return f
 
         # Opened via a database
         else:
             dburls = self.parent.url
-            if type(dburls)!=type([]):
+            if not isinstance(dburls, type([])):
                 dburls = [dburls]
 
             # Try first to open as a local file
             for dburl in dburls:
                 if os.path.isabs(self.directory):
-                    fileurl = os.path.join(self.directory,filename)
+                    fileurl = os.path.join(self.directory, filename)
                 else:
                     try:
-                        fileurl = os.path.join(dburl,self.datapath,filename)
+                        fileurl = os.path.join(dburl, self.datapath, filename)
                     except:
-                        print 'Error joining',`dburl`,self.datapath,filename
+                        print 'Error joining', repr(dburl), self.datapath, filename
                         raise
-                (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fileurl)
-                if scheme in ['file',''] and os.path.isfile(path):
-                    if cdmsobj._debug==1:
-                        sys.stdout.write(fileurl+'\n'); sys.stdout.flush()
-                    f = Cdunif.CdunifFile(path,mode)
+                (scheme, netloc, path, parameters, query,
+                 fragment) = urlparse.urlparse(fileurl)
+                if scheme in ['file', ''] and os.path.isfile(path):
+                    if cdmsobj._debug == 1:
+                        sys.stdout.write(fileurl + '\n')
+                        sys.stdout.flush()
+                    f = Cdunif.CdunifFile(path, mode)
                     return f
 
             # See if request manager is being used for file transfer
@@ -874,32 +972,40 @@ class Dataset(CdmsObj, cuDataset):
                 lcbase = db.lcBaseDN
                 lcpath = self.getLogicalCollectionDN(lcbase)
 
-                # File location is logical collection path combined with relative filename
+                # File location is logical collection path combined with
+                # relative filename
                 fileDN = (self.uri, filename)
-                path = cache.getFile(filename, fileDN, lcpath=lcpath, userid=db.userid, useReplica=db.useReplica)
+                path = cache.getFile(
+                    filename,
+                    fileDN,
+                    lcpath=lcpath,
+                    userid=db.userid,
+                    useReplica=db.useReplica)
                 try:
-                    f = Cdunif.CdunifFile(path,mode)
+                    f = Cdunif.CdunifFile(path, mode)
                 except:
                     # Try again, in case another process clobbered this file
-                    path = cache.getFile(fileurl,fileDN)
-                    f = Cdunif.CdunifFile(path,mode)
+                    path = cache.getFile(fileurl, fileDN)
+                    f = Cdunif.CdunifFile(path, mode)
                 return f
 
             # Try to read via FTP:
 
             for dburl in dburls:
-                fileurl = os.path.join(dburl,self.datapath,filename)
-                (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fileurl)
-                if scheme=='ftp':
+                fileurl = os.path.join(dburl, self.datapath, filename)
+                (scheme, netloc, path, parameters, query,
+                 fragment) = urlparse.urlparse(fileurl)
+                if scheme == 'ftp':
                     cache = self.parent.enableCache()
-                    fileDN = (self.uri, filename) # Global file name
-                    path = cache.getFile(fileurl,fileDN)
+                    fileDN = (self.uri, filename)  # Global file name
+                    path = cache.getFile(fileurl, fileDN)
                     try:
-                        f = Cdunif.CdunifFile(path,mode)
+                        f = Cdunif.CdunifFile(path, mode)
                     except:
-                        # Try again, in case another process clobbered this file
-                        path = cache.getFile(fileurl,fileDN)
-                        f = Cdunif.CdunifFile(path,mode)
+                        # Try again, in case another process clobbered this
+                        # file
+                        path = cache.getFile(fileurl, fileDN)
+                        f = Cdunif.CdunifFile(path, mode)
                     return f
 
             # File not found
@@ -912,9 +1018,9 @@ class Dataset(CdmsObj, cuDataset):
         if hasattr(self, "lc"):
             dn = self.lc
         else:
-            dn = "lc=%s"%self.id
+            dn = "lc=%s" % self.id
         if base is not None:
-            dn = "%s,%s"%(dn,base)
+            dn = "%s,%s" % (dn, base)
         return dn
 
     def getVariable(self, id):
@@ -926,7 +1032,8 @@ class Dataset(CdmsObj, cuDataset):
         axes defined on latitude or longitude, excluding weights and bounds."""
         retval = self.variables.values()
         if spatial:
-            retval = filter(lambda x: x.id[0:7]!="bounds_" and x.id[0:8]!="weights_" and ((x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
+            retval = filter(lambda x: x.id[0:7] != "bounds_" and x.id[0:8] != "weights_" and (
+                (x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
         return retval
 
     def getAxis(self, id):
@@ -938,38 +1045,40 @@ class Dataset(CdmsObj, cuDataset):
         return self.grids.get(id)
 
     def __repr__(self):
-        return "<Dataset: '%s', URI: '%s', mode: '%s', status: %s>"%(self.id, self.uri, self.mode, self._status_)
-
-## internattr.add_internal_attribute (Dataset, 'datapath',
-##                                             'variables',
-##                                             'axes',
-##                                             'grids',
-##                                             'xlinks',
-##                                             'dictdict',
-##                                             'default_variable_name',
-##                                             'parent',
-##                                             'uri',
-##                                             'mode')
+        return "<Dataset: '%s', URI: '%s', mode: '%s', status: %s>" % (self.id, self.uri, self.mode, self._status_)
+
+# internattr.add_internal_attribute (Dataset, 'datapath',
+# 'variables',
+# 'axes',
+# 'grids',
+# 'xlinks',
+# 'dictdict',
+# 'default_variable_name',
+# 'parent',
+# 'uri',
+# 'mode')
+
 
 class CdmsFile(CdmsObj, cuDataset):
-    def __init__(self, path, mode, hostObj = None, mpiBarrier=False):
+
+    def __init__(self, path, mode, hostObj=None, mpiBarrier=False):
 
         if mpiBarrier:
             MPI.COMM_WORLD.Barrier()
 
         CdmsObj.__init__(self, None)
         cuDataset.__init__(self)
-        value = self.__cdms_internals__+['datapath',
-                                'variables',
-                                'axes',
-                                'grids',
-                                'xlinks',
-                                'dictdict',
-                                'default_variable_name',
-                                'id',
-                                'uri',
-                                'parent',
-                                'mode']
+        value = self.__cdms_internals__ + ['datapath',
+                                           'variables',
+                                           'axes',
+                                           'grids',
+                                           'xlinks',
+                                           'dictdict',
+                                           'default_variable_name',
+                                           'id',
+                                           'uri',
+                                           'parent',
+                                           'mode']
         self.___cdms_internals__ = value
         self.id = path
         if "://" in path:
@@ -978,14 +1087,14 @@ class CdmsFile(CdmsObj, cuDataset):
             self.uri = "file://" + os.path.abspath(os.path.expanduser(path))
         self._mode_ = mode
         try:
-            if mode[0].lower()=="w":
+            if mode[0].lower() == "w":
                 try:
                     os.remove(path)
                 except:
                     pass
-            _fileobj_ = Cdunif.CdunifFile (path, mode)
-        except Exception,err:
-            raise CDMSError('Cannot open file %s (%s)'%(path,err))
+            _fileobj_ = Cdunif.CdunifFile(path, mode)
+        except Exception as err:
+            raise CDMSError('Cannot open file %s (%s)' % (path, err))
         self._file_ = _fileobj_   # Cdunif file object
         self.variables = {}
         self.axes = {}
@@ -993,33 +1102,40 @@ class CdmsFile(CdmsObj, cuDataset):
         self.xlinks = {}
         self._gridmap_ = {}
 
-        # self.attributes returns the Cdunif file dictionary. 
-##         self.replace_external_attributes(self._file_.__dict__)
-        for att in  self._file_.__dict__.keys():
-            self.__dict__.__setitem__(att,self._file_.__dict__[att])
-            self.attributes[att]=self._file_.__dict__[att]
+        # self.attributes returns the Cdunif file dictionary.
+# self.replace_external_attributes(self._file_.__dict__)
+        for att in self._file_.__dict__.keys():
+            self.__dict__.__setitem__(att, self._file_.__dict__[att])
+            self.attributes[att] = self._file_.__dict__[att]
         self._boundAxis_ = None         # Boundary axis for cell vertices
-        if self._mode_=='w':
+        if self._mode_ == 'w':
             self.Conventions = convention.CFConvention.current
         self._status_ = 'open'
         self._convention_ = convention.getDatasetConvention(self)
 
         try:
-            
+
             # A mosaic variable with coordinates attached, but the coordinate variables reside in a
-            # different file. Add the coordinate variables to the mosaic variables list.
+            # different file. Add the coordinate variables to the mosaic
+            # variables list.
             if not hostObj is None:
                 for name in self._file_.variables.keys():
                     if 'coordinates' in dir(self._file_.variables[name]):
-                        coords = self._file_.variables[name].coordinates.split()
+                        coords = self._file_.variables[
+                            name].coordinates.split()
                         for coord in coords:
                             if not coord in self._file_.variables.keys():
-                                cdunifvar = Cdunif.CdunifFile(hostObj.gridVars[coord][0], mode)
-                                self._file_.variables[coord] = cdunifvar.variables[coord]
-                
+                                cdunifvar = Cdunif.CdunifFile(
+                                    hostObj.gridVars[coord][0],
+                                    mode)
+                                self._file_.variables[
+                                    coord] = cdunifvar.variables[
+                                        coord]
+
             # Get lists of 1D and auxiliary coordinate axes
             coords1d = self._convention_.getAxisIds(self._file_.variables)
-            coordsaux = self._convention_.getAxisAuxIds(self._file_.variables, coords1d)
+            coordsaux = self._convention_.getAxisAuxIds(
+                self._file_.variables, coords1d)
 
             # Build variable list
             for name in self._file_.variables.keys():
@@ -1028,12 +1144,15 @@ class CdmsFile(CdmsObj, cuDataset):
                     if name in coordsaux:
                         # Put auxiliary coordinate axes with variables, since there may be
                         # a dimension with the same name.
-                        if len(cdunifvar.shape)==2:
-                            self.variables[name] = FileAxis2D(self, name, cdunifvar)
+                        if len(cdunifvar.shape) == 2:
+                            self.variables[name] = FileAxis2D(
+                                self, name, cdunifvar)
                         else:
-                            self.variables[name] = FileAuxAxis1D(self, name, cdunifvar)
+                            self.variables[name] = FileAuxAxis1D(
+                                self, name, cdunifvar)
                     else:
-                        self.variables[name] = FileVariable(self,name,cdunifvar)
+                        self.variables[name] = FileVariable(
+                            self, name, cdunifvar)
 
             # Build axis list
             for name in self._file_.dimensions.keys():
@@ -1043,7 +1162,7 @@ class CdmsFile(CdmsObj, cuDataset):
                     cdunifvar = self._file_.variables[name]
                 else:
                     cdunifvar = None
-                self.axes[name] = FileAxis(self,name,cdunifvar)
+                self.axes[name] = FileAxis(self, name, cdunifvar)
 
             # Attach boundary variables
             for name in coordsaux:
@@ -1051,7 +1170,12 @@ class CdmsFile(CdmsObj, cuDataset):
                 bounds = self._convention_.getVariableBounds(self, var)
                 var.setBounds(bounds)
 
-            self.dictdict = {'variable':self.variables, 'axis':self.axes, 'rectGrid':self.grids, 'curveGrid':self.grids, 'genericGrid':self.grids}
+            self.dictdict = {
+                'variable': self.variables,
+                'axis': self.axes,
+                'rectGrid': self.grids,
+                'curveGrid': self.grids,
+                'genericGrid': self.grids}
 
             # Initialize variable domains
             for var in self.variables.values():
@@ -1061,41 +1185,44 @@ class CdmsFile(CdmsObj, cuDataset):
             for var in self.variables.values():
                 # Get grid information for the variable. gridkey has the form
                 # (latname,lonname,order,maskname, abstract_class).
-                gridkey, lat, lon = var.generateGridkey(self._convention_, self.variables)
+                gridkey, lat, lon = var.generateGridkey(
+                    self._convention_, self.variables)
 
                 # If the variable is gridded, lookup the grid. If no such grid exists,
-                # create a unique gridname, create the grid, and add to the gridmap.
+                # create a unique gridname, create the grid, and add to the
+                # gridmap.
                 if gridkey is None:
                     grid = None
                 else:
                     grid = self._gridmap_.get(gridkey)
                     if grid is None:
 
-                        if hasattr(var,'grid_type'):
+                        if hasattr(var, 'grid_type'):
                             gridtype = var.grid_type
                         else:
                             gridtype = "generic"
 
                         candidateBasename = None
                         if gridkey[4] == 'rectGrid':
-                            gridshape = (len(lat),len(lon))
+                            gridshape = (len(lat), len(lon))
                         elif gridkey[4] == 'curveGrid':
                             gridshape = lat.shape
                         elif gridkey[4] == 'genericGrid':
                             gridshape = lat.shape
-                            candidateBasename = 'grid_%d'%gridshape
+                            candidateBasename = 'grid_%d' % gridshape
                         else:
-                            gridshape = (len(lat),len(lon))
+                            gridshape = (len(lat), len(lon))
 
                         if candidateBasename is None:
-                            candidateBasename = 'grid_%dx%d'%gridshape
-                        if not self.grids.has_key(candidateBasename):
+                            candidateBasename = 'grid_%dx%d' % gridshape
+                        if candidateBasename not in self.grids:
                             gridname = candidateBasename
                         else:
                             foundname = 0
-                            for i in range(97,123): # Lower-case letters
-                                candidateName = candidateBasename+'_'+chr(i)
-                                if not self.grids.has_key(candidateName):
+                            for i in range(97, 123):  # Lower-case letters
+                                candidateName = candidateBasename + \
+                                    '_' + chr(i)
+                                if candidateName not in self.grids:
                                     gridname = candidateName
                                     foundname = 1
                                     break
@@ -1106,20 +1233,36 @@ class CdmsFile(CdmsObj, cuDataset):
 
                         # Create the grid
                         if gridkey[4] == 'rectGrid':
-                            grid = FileRectGrid(self, gridname, lat, lon, gridkey[2], gridtype)
+                            grid = FileRectGrid(
+                                self,
+                                gridname,
+                                lat,
+                                lon,
+                                gridkey[2],
+                                gridtype)
                         else:
-                            if gridkey[3]!='':
-                                if self.variables.has_key(gridkey[3]):
+                            if gridkey[3] != '':
+                                if gridkey[3] in self.variables:
                                     maskvar = self.variables[gridkey[3]]
                                 else:
-                                    print 'Warning: mask variable %s not found'%gridkey[3]
+                                    print 'Warning: mask variable %s not found' % gridkey[3]
                                     maskvar = None
                             else:
                                 maskvar = None
                             if gridkey[4] == 'curveGrid':
-                                grid = FileCurveGrid(lat, lon, gridname, parent=self, maskvar=maskvar)
+                                grid = FileCurveGrid(
+                                    lat,
+                                    lon,
+                                    gridname,
+                                    parent=self,
+                                    maskvar=maskvar)
                             else:
-                                grid = FileGenericGrid(lat, lon, gridname, parent=self, maskvar=maskvar)
+                                grid = FileGenericGrid(
+                                    lat,
+                                    lon,
+                                    gridname,
+                                    parent=self,
+                                    maskvar=maskvar)
                         self.grids[grid.id] = grid
                         self._gridmap_[gridkey] = grid
 
@@ -1130,45 +1273,45 @@ class CdmsFile(CdmsObj, cuDataset):
             raise
 
     # setattr writes external global attributes to the file
-    def __setattr__ (self, name, value):
-##         s = self.get_property_s(name)
-##         if s is not None:
-##             print '....handler'
-##             s(self, name, value)
-##             return
-        self.__dict__[name] =  value #attributes kept in sync w/file
-        if not name in self.__cdms_internals__ and name[0]!='_':
+    def __setattr__(self, name, value):
+# s = self.get_property_s(name)
+# if s is not None:
+# print '....handler'
+# s(self, name, value)
+# return
+        self.__dict__[name] = value  # attributes kept in sync w/file
+        if not name in self.__cdms_internals__ and name[0] != '_':
             setattr(self._file_, name, value)
-            self.attributes[name]=value
-
-##     # getattr reads external global attributes from the file
-##     def __getattr__ (self, name):
-## ##         g = self.get_property_g(name)
-## ##         if g is not None:
-## ##             return g(self, name)
-##         if name in self.__cdms_internals__:
-##             try:
-##                 return self.__dict__[name]
-##             except KeyError:
-##                 raise AttributeError("%s instance has no attribute %s." % \
-##                            (self.__class__.__name__, name))
-##         else:
-##             return getattr(self._file_,name)
+            self.attributes[name] = value
+
+# getattr reads external global attributes from the file
+# def __getattr__ (self, name):
+# g = self.get_property_g(name)
+# if g is not None:
+# return g(self, name)
+# if name in self.__cdms_internals__:
+# try:
+# return self.__dict__[name]
+# except KeyError:
+# raise AttributeError("%s instance has no attribute %s." % \
+# (self.__class__.__name__, name))
+# else:
+# return getattr(self._file_,name)
 
     # delattr deletes external global attributes in the file
     def __delattr__(self, name):
-##         d = self.get_property_d(name)
-##         if d is not None:
-##             d(self, name)
-##             return
+# d = self.get_property_d(name)
+# if d is not None:
+# d(self, name)
+# return
         try:
             del self.__dict__[name]
         except KeyError:
-            raise AttributeError("%s instance has no attribute %s." % \
-                  (self.__class__.__name__, name))
+            raise AttributeError("%s instance has no attribute %s." %
+                                 (self.__class__.__name__, name))
         if not name in self.__cdms_internals__:
             delattr(self._file_, name)
-            if( name in self.attributes.keys() ):
+            if(name in self.attributes.keys()):
                 del(self.attributes[name])
 
     def sync(self):
@@ -1178,12 +1321,12 @@ class CdmsFile(CdmsObj, cuDataset):
    None :: (None) (0) yep
    :::
    """
-        if self._status_=="closed":
+        if self._status_ == "closed":
             raise CDMSError(FileWasClosed + self.id)
         self._file_.sync()
 
     def close(self):
-        if self._status_=="closed":
+        if self._status_ == "closed":
             return
         if hasattr(self, 'dictdict'):
             for dict in self.dictdict.values():
@@ -1194,21 +1337,21 @@ class CdmsFile(CdmsObj, cuDataset):
         self._file_.close()
         self._status_ = 'closed'
 
-##  Note: Removed to allow garbage collection of reference cycles
-##     def __del__(self):
-##         if cdmsobj._debug==1:
-##             print 'Deleting file',self.id
-##         # If the object has been deallocated due to open error,
-##         # it will not have an attribute .dictdict
-##         if hasattr(self,"dictdict") and self.dictdict != {}:
-##             self.close()
+# Note: Removed to allow garbage collection of reference cycles
+# def __del__(self):
+# if cdmsobj._debug==1:
+# print 'Deleting file',self.id
+# If the object has been deallocated due to open error,
+# it will not have an attribute .dictdict
+# if hasattr(self,"dictdict") and self.dictdict != {}:
+# self.close()
 
     # Create an axis
     # 'name' is the string name of the Axis
     # 'ar' is the 1-D data array, or None for an unlimited axis
     # Set unlimited to true to designate the axis as unlimited
     # Return an axis object.
-    def createAxis(self,name,ar,unlimited=0):
+    def createAxis(self, name, ar, unlimited=0):
         """
         Create an axis
         'name' is the string name of the Axis
@@ -1227,36 +1370,36 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.axis.FileAxis) (0) file axis whose id is name
         :::
         """
-        if self._status_=="closed":
+        if self._status_ == "closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
-        if ar is None or (unlimited==1 and getNetcdfUseParallelFlag()==0):
-            cufile.createDimension(name,None)
+        if ar is None or (unlimited == 1 and getNetcdfUseParallelFlag() == 0):
+            cufile.createDimension(name, None)
             if ar is None:
                 typecode = numpy.float
             else:
                 typecode = ar.dtype.char
         else:
-            cufile.createDimension(name,len(ar))
+            cufile.createDimension(name, len(ar))
             typecode = ar.dtype.char
 
         # Compatibility: revert to old typecode for cdunif
         typecode = typeconv.oldtypecodes[typecode]
-        cuvar = cufile.createVariable(name,typecode,(name,))
+        cuvar = cufile.createVariable(name, typecode, (name,))
 
         # Cdunif should really create this extra dimension info:
         #   (units,typecode,filename,varname_local,dimension_type,ncid)
-        cufile.dimensioninfo[name] = ('',typecode,name,'','global',-1)
+        cufile.dimensioninfo[name] = ('', typecode, name, '', 'global', -1)
 
         # Note: like netCDF-3, cdunif does not support 64-bit integers.
         # If ar has dtype int64 on a 64-bit machine, cuvar will be a 32-bit int,
         # and ar must be downcast.
         if ar is not None:
-            if ar.dtype.char!='l':
+            if ar.dtype.char != 'l':
                 cuvar[0:len(ar)] = numpy.ma.filled(ar)
             else:
                 cuvar[0:len(ar)] = numpy.ma.filled(ar).astype(cuvar.typecode())
-        axis = FileAxis(self,name,cuvar)
+        axis = FileAxis(self, name, cuvar)
         self.axes[name] = axis
         return axis
 
@@ -1273,23 +1416,24 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Input:::
         name :: (str) (0) dimension name
-        axislen :: (int) (1) 
+        axislen :: (int) (1)
         :::
         Output:::
         axis :: (cdms2.axis.FileVirtualAxis) (0) file axis whose id is name
         :::
         """
-        if self._status_=="closed":
+        if self._status_ == "closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
         cufile.createDimension(name, axislen)
-        cufile.dimensioninfo[name] = ('','f',name,'','global',-1)
+        cufile.dimensioninfo[name] = ('', 'f', name, '', 'global', -1)
         axis = FileVirtualAxis(self, name, axislen)
         self.axes[name] = axis
         return axis
 
     # Copy axis description and data from another axis
-    def copyAxis(self, axis, newname=None, unlimited=0, index=None, extbounds=None):
+    def copyAxis(self, axis, newname=None,
+                 unlimited=0, index=None, extbounds=None):
         """
         Copy axis description and data from another axis
         :::
@@ -1306,24 +1450,25 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.axis.FileAxis/cdms2.axis.FileVirtualAxis) (0) copy of input axis
         :::
         """
-        if newname is None: newname=axis.id
+        if newname is None:
+            newname = axis.id
 
         # If the axis already exists and has the same values, return existing
-        if self.axes.has_key(newname):
+        if newname in self.axes:
             newaxis = self.axes[newname]
             if newaxis.isVirtual():
-                if len(axis)!=len(newaxis):
-                    raise DuplicateAxisError(DuplicateAxis+newname)
-            elif unlimited==0 or (unlimited==1 and getNetcdfUseParallelFlag()!=0):
-                if len(axis)!=len(newaxis) or numpy.alltrue(numpy.less(numpy.absolute(newaxis[:]-axis[:]),1.e-5))==0:
-                    raise DuplicateAxisError(DuplicateAxis+newname)
+                if len(axis) != len(newaxis):
+                    raise DuplicateAxisError(DuplicateAxis + newname)
+            elif unlimited == 0 or (unlimited == 1 and getNetcdfUseParallelFlag() != 0):
+                if len(axis) != len(newaxis) or numpy.alltrue(numpy.less(numpy.absolute(newaxis[:] - axis[:]), 1.e-5)) == 0:
+                    raise DuplicateAxisError(DuplicateAxis + newname)
             else:
                 if index is None:
-                    isoverlap, index = isOverlapVector(axis[:],newaxis[:])
+                    isoverlap, index = isOverlapVector(axis[:], newaxis[:])
                 else:
                     isoverlap = 1
                 if isoverlap:
-                    newaxis[index:index+len(axis)] = axis[:]
+                    newaxis[index:index + len(axis)] = axis[:]
                     if extbounds is None:
                         axisBounds = axis.getBounds()
                     else:
@@ -1331,7 +1476,7 @@ class CdmsFile(CdmsObj, cuDataset):
                     if axisBounds is not None:
                         newaxis.setBounds(axisBounds)
                 else:
-                    raise DuplicateAxisError(DuplicateAxis+newname)
+                    raise DuplicateAxisError(DuplicateAxis + newname)
 
         elif axis.isVirtual():
             newaxis = self.createVirtualAxis(newname, len(axis))
@@ -1346,8 +1491,8 @@ class CdmsFile(CdmsObj, cuDataset):
                 else:
                     boundsid = None
                 newaxis.setBounds(bounds, persistent=1, boundsid=boundsid)
-            for attname,attval in axis.attributes.items():
-                if attname not in ["datatype", "id","length","isvar","name_in_file","partition"]:
+            for attname, attval in axis.attributes.items():
+                if attname not in ["datatype", "id", "length", "isvar", "name_in_file", "partition"]:
                     setattr(newaxis, attname, attval)
         return newaxis
 
@@ -1393,7 +1538,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if newname is None:
-            if hasattr(grid,'id'):
+            if hasattr(grid, 'id'):
                 newname = grid.id
             else:
                 newname = 'Grid'
@@ -1410,20 +1555,26 @@ class CdmsFile(CdmsObj, cuDataset):
         lon.designateLongitude(persistent=1)
 
         # If the grid name already exists, and is the same, just return it
-        if self.grids.has_key(newname):
+        if newname in self.grids:
             newgrid = self.grids[newname]
             newlat = newgrid.getLatitude()
             newlon = newgrid.getLongitude()
             if ((newlat is not lat) or
                 (newlon is not lon) or
                 (newgrid.getOrder() != grid.getOrder()) or
-                (newgrid.getType() != grid.getType())):
+                    (newgrid.getType() != grid.getType())):
                 raise DuplicateGrid(newname)
 
         # else create a new grid and copy metadata
         else:
             newmask = grid.getMask()    # Get the mask array
-            newgrid = self.createRectGrid(newname, lat, lon, grid.getOrder(), grid.getType(), None)
+            newgrid = self.createRectGrid(
+                newname,
+                lat,
+                lon,
+                grid.getOrder(),
+                grid.getType(),
+                None)
             newgrid.setMask(newmask)    # Set the mask array, non-persistently
             for attname in grid.attributes.keys():
                 setattr(newgrid, attname, getattr(grid, attname))
@@ -1436,7 +1587,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # 'axesOrGrids' is a list of axes, grids. (Note: this should be
     #   generalized to allow subintervals of axes and/or grids)
     # Return a variable object.
-    def createVariable(self,name,datatype,axesOrGrids,fill_value=None):
+    def createVariable(self, name, datatype, axesOrGrids, fill_value=None):
         """
         Create a variable
         'name' is the string name of the Variable
@@ -1456,7 +1607,7 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.fvariable.FileVariable) (0) file variable
         :::
         """
-        if self._status_=="closed":
+        if self._status_ == "closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
         if datatype in CdDatatypes:
@@ -1464,75 +1615,77 @@ class CdmsFile(CdmsObj, cuDataset):
         else:
             numericType = datatype
 
-        #dimensions = map(lambda x: x.id, axes)
+        # dimensions = map(lambda x: x.id, axes)
         # Make a list of names of axes for _Cdunif
         dimensions = []
         for obj in axesOrGrids:
             if isinstance(obj, FileAxis):
                 dimensions.append(obj.id)
             elif isinstance(obj, FileRectGrid):
-                dimensions = dimensions + [obj.getAxis(0).id, obj.getAxis(1).id]
+                dimensions = dimensions + \
+                    [obj.getAxis(0).id, obj.getAxis(1).id]
             else:
                 raise InvalidDomain
 
         try:
             # Compatibility: revert to old typecode for cdunif
             numericType = typeconv.oldtypecodes[numericType]
-            cuvar = cufile.createVariable(name,numericType,tuple(dimensions))
-        except Exception,err:
+            cuvar = cufile.createVariable(name, numericType, tuple(dimensions))
+        except Exception as err:
             print err
-            raise CDMSError("Creating variable "+name)
-        var = FileVariable(self,name,cuvar)
+            raise CDMSError("Creating variable " + name)
+        var = FileVariable(self, name, cuvar)
         var.initDomain(self.axes)
         self.variables[name] = var
-        if fill_value is not None: var.setMissing(fill_value)
+        if fill_value is not None:
+            var.setMissing(fill_value)
         return var
 
     # Create a variable from an existing variable, and copy the metadata
-##     def createVariableCopy(self, var, newname=None):
+# def createVariableCopy(self, var, newname=None):
 
-##         if newname is None: newname=var.id
-##         if self.variables.has_key(newname):
-##             raise DuplicateVariable(newname)
+# if newname is None: newname=var.id
+# if self.variables.has_key(newname):
+# raise DuplicateVariable(newname)
 
 
-##         # Create axes if necessary
-##         axislist = []
-##         for (axis,start,length,true_length) in var.getDomain():
-##             try:
-##                 newaxis = self.copyAxis(axis)
-##             except DuplicateAxisError:
+# Create axes if necessary
+# axislist = []
+# for (axis,start,length,true_length) in var.getDomain():
+# try:
+# newaxis = self.copyAxis(axis)
+# except DuplicateAxisError:
 
-##                 # Create a unique axis name
-##                 setit = 0
-##                 for i in range(97,123): # Lower-case letters
-##                     try:
-##                         newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
-##                         setit = 1
-##                         break
-##                     except DuplicateAxisError:
-##                         continue
+# Create a unique axis name
+# setit = 0
+# for i in range(97,123): # Lower-case letters
+# try:
+# newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
+# setit = 1
+# break
+# except DuplicateAxisError:
+# continue
 
-##                 if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
+# if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
 
-##             axislist.append(newaxis)
+# axislist.append(newaxis)
 
-##         # Create the new variable
-##         datatype = cdmsNode.NumericToCdType.get(var.dtype.char)
-##         newvar = self.createVariable(newname, datatype, axislist)
+# Create the new variable
+# datatype = cdmsNode.NumericToCdType.get(var.dtype.char)
+# newvar = self.createVariable(newname, datatype, axislist)
 
-##         # Copy variable metadata
-##         for attname in var.attributes.keys():
-##             if attname not in ["id", "datatype"]:
-##                 setattr(newvar, attname, getattr(var, attname))
+# Copy variable metadata
+# for attname in var.attributes.keys():
+# if attname not in ["id", "datatype"]:
+# setattr(newvar, attname, getattr(var, attname))
 
-##         return newvar
+# return newvar
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
     # search all string attributes. If tag is 'cdmsFile', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def searchPattern(self,pattern,attribute,tag):
+    def searchPattern(self, pattern, attribute, tag):
         """
         Search for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -1542,26 +1695,26 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('cdmsFile',None,'dataset'):
-            if self.searchone(pattern,attribute)==1:
+        if tag in ('cdmsFile', None, 'dataset'):
+            if self.searchone(pattern, attribute) == 1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.searchone(pattern,attribute):
+                    if obj.searchone(pattern, attribute):
                         resultlist.append(obj)
-        elif tag not in ('cdmsFile','dataset'):
+        elif tag not in ('cdmsFile', 'dataset'):
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.searchone(pattern,attribute):
+                if obj.searchone(pattern, attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -1569,7 +1722,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # search all string attributes. If tag is 'cdmsFile', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def matchPattern(self,pattern,attribute,tag):
+    def matchPattern(self, pattern, attribute, tag):
         """
         Match for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -1579,26 +1732,26 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('cdmsFile',None,'dataset'):
-            if self.matchone(pattern,attribute)==1:
+        if tag in ('cdmsFile', None, 'dataset'):
+            if self.matchone(pattern, attribute) == 1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.matchone(pattern,attribute):
+                    if obj.matchone(pattern, attribute):
                         resultlist.append(obj)
-        elif tag not in ('cdmsFile','dataset'):
+        elif tag not in ('cdmsFile', 'dataset'):
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.matchone(pattern,attribute):
+                if obj.matchone(pattern, attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -1609,7 +1762,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # If 'variable', 'axis', etc., it is applied only to that type of object
     # in the dataset. If None, it is applied to all objects, including
     # the dataset itself.
-    def searchPredicate(self,predicate,tag):
+    def searchPredicate(self, predicate, tag):
         """
         Apply a truth-valued predicate. Return a list containing a single instance: [self] if the predicate is true and either tag is None or matches the object node tag. If the predicate returns false, return an empty list
         :::
@@ -1618,15 +1771,15 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (1) node tag
         :::
         Output:::
-        result :: (list) (0) 
+        result :: (list) (0)
         :::
         """
         resultlist = []
         if tag is not None:
             tag = tag.lower()
-        if tag in ('cdmsFile',None,'dataset'):
+        if tag in ('cdmsFile', None, 'dataset'):
             try:
-                if apply(predicate,(self,))==1:
+                if predicate(*(self,)) == 1:
                     resultlist.append(self)
             except AttributeError:
                 pass
@@ -1634,21 +1787,22 @@ class CdmsFile(CdmsObj, cuDataset):
             for dict in self.dictdict.values():
                 for obj in dict.values():
                     try:
-                        if apply(predicate,(obj,))==1:
+                        if predicate(*(obj,)) == 1:
                             resultlist.append(obj)
                     except AttributeError:
                         pass
-        elif tag not in ('dataset','cdmsFile'):
+        elif tag not in ('dataset', 'cdmsFile'):
             dict = self.dictdict[tag]
             for obj in dict.values():
                 try:
-                    if apply(predicate,(obj,))==1:
+                    if predicate(*(obj,)) == 1:
                         resultlist.append(obj)
                 except:
                     pass
         return resultlist
 
-    def createVariableCopy(self, var, id=None, attributes=None, axes=None, extbounds=None,
+    def createVariableCopy(
+        self, var, id=None, attributes=None, axes=None, extbounds=None,
                               extend=0, fill_value=None, index=None, newname=None, grid=None):
         """Define a new variable, with the same axes and attributes as in <var>.
         This does not copy the data itself.
@@ -1684,10 +1838,10 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if newname is None:
-            newname=var.id
+            newname = var.id
         if id is not None:
             newname = id
-        if self.variables.has_key(newname):
+        if newname in self.variables:
             raise DuplicateVariable(newname)
 
         # Determine the extended axis name if any
@@ -1696,12 +1850,12 @@ class CdmsFile(CdmsObj, cuDataset):
         else:
             sourceAxislist = axes
 
-        if var.rank()==0:      # scalars are not extensible
+        if var.rank() == 0:      # scalars are not extensible
             extend = 0
-            
-        if extend in (1,None):
+
+        if extend in (1, None):
             firstAxis = sourceAxislist[0]
-            if firstAxis is not None and (extend==1 or firstAxis.isTime()):
+            if firstAxis is not None and (extend == 1 or firstAxis.isTime()):
                 extendedAxis = firstAxis.id
             else:
                 extendedAxis = None
@@ -1711,24 +1865,31 @@ class CdmsFile(CdmsObj, cuDataset):
         # Create axes if necessary
         axislist = []
         for axis in sourceAxislist:
-            if extendedAxis is None or axis.id!=extendedAxis:
+            if extendedAxis is None or axis.id != extendedAxis:
                 try:
                     newaxis = self.copyAxis(axis)
                 except DuplicateAxisError:
 
                     # Create a unique axis name
                     setit = 0
-                    for i in range(97,123): # Lower-case letters
+                    for i in range(97, 123):  # Lower-case letters
                         try:
-                            newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
+                            newaxis = self.copyAxis(
+                                axis,
+                                axis.id + '_' + chr(i))
                             setit = 1
                             break
                         except DuplicateAxisError:
                             continue
 
-                    if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
+                    if setit == 0:
+                        raise DuplicateAxisError(DuplicateAxis + axis.id)
             else:
-                newaxis = self.copyAxis(axis, unlimited=1, index=index, extbounds=extbounds)
+                newaxis = self.copyAxis(
+                    axis,
+                    unlimited=1,
+                    index=index,
+                    extbounds=extbounds)
 
             axislist.append(newaxis)
 
@@ -1736,25 +1897,29 @@ class CdmsFile(CdmsObj, cuDataset):
         if attributes is None:
             attributes = var.attributes
             try:
-                attributes['missing_value']=var.missing_value
-            except Exception,err:
+                attributes['missing_value'] = var.missing_value
+            except Exception as err:
                 print err
                 pass
             try:
                 if fill_value is None:
-                    if( '_FillValue' in attributes.keys() ):
-                       attributes['_FillValue']=numpy.array(var._FillValue).astype(var.dtype)
-                       attributes['missing_value']=numpy.array(var._FillValue).astype(var.dtype)
-                    if( 'missing_value' in attributes.keys() ):
-                       attributes['_FillValue']=numpy.array(var.missing_value).astype(var.dtype)
-                       attributes['missing_value']=numpy.array(var.missing_value).astype(var.dtype)
+                    if('_FillValue' in attributes.keys()):
+                        attributes['_FillValue'] = numpy.array(
+                            var._FillValue).astype(var.dtype)
+                        attributes['missing_value'] = numpy.array(
+                            var._FillValue).astype(var.dtype)
+                    if('missing_value' in attributes.keys()):
+                        attributes['_FillValue'] = numpy.array(
+                            var.missing_value).astype(var.dtype)
+                        attributes['missing_value'] = numpy.array(
+                            var.missing_value).astype(var.dtype)
                 else:
-                    attributes['_FillValue']=fill_value
-                    attributes['missing_value']=fill_value
+                    attributes['_FillValue'] = fill_value
+                    attributes['missing_value'] = fill_value
             except:
                 pass
-            if attributes.has_key("name"):
-                if attributes['name']!=var.id:
+            if "name" in attributes:
+                if attributes['name'] != var.id:
                     del(attributes['name'])
 
         # Create grid as necessary
@@ -1763,7 +1928,7 @@ class CdmsFile(CdmsObj, cuDataset):
         if grid is not None:
             coords = grid.writeToFile(self)
             if coords is not None:
-                coordattr = "%s %s"%(coords[0].id, coords[1].id)
+                coordattr = "%s %s" % (coords[0].id, coords[1].id)
                 if attributes is None:
                     attributes = {'coordinates': coordattr}
                 else:
@@ -1772,19 +1937,19 @@ class CdmsFile(CdmsObj, cuDataset):
         # Create the new variable
         datatype = cdmsNode.NumericToCdType.get(var.typecode())
         newvar = self.createVariable(newname, datatype, axislist)
-        for attname,attval in attributes.items():
+        for attname, attval in attributes.items():
             if attname not in ["id", "datatype", "parent"]:
                 setattr(newvar, attname, attval)
                 if (attname == "_FillValue") or (attname == "missing_value"):
-                   setattr(newvar, "_FillValue", attval)
-                   setattr(newvar, "missing_value", attval)
+                    setattr(newvar, "_FillValue", attval)
+                    setattr(newvar, "missing_value", attval)
 
         if fill_value is not None:
             newvar.setMissing(fill_value)
 
         return newvar
 
-    def write(self, var, attributes=None, axes=None, extbounds=None, id=None, \
+    def write(self, var, attributes=None, axes=None, extbounds=None, id=None,
               extend=None, fill_value=None, index=None, typecode=None, dtype=None, pack=False):
         """Write var to the file. If the variable is not yet defined in the file,
         a definition is created. By default, the time dimension of the variable is defined as the
@@ -1825,15 +1990,17 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if _showCompressWarnings:
-            if  (Cdunif.CdunifGetNCFLAGS("shuffle")!=0) or (Cdunif.CdunifGetNCFLAGS("deflate")!=0) or (Cdunif.CdunifGetNCFLAGS("deflate_level")!=0):
+            if (Cdunif.CdunifGetNCFLAGS("shuffle") != 0) or (Cdunif.CdunifGetNCFLAGS("deflate") != 0) or (Cdunif.CdunifGetNCFLAGS("deflate_level") != 0):
                 import warnings
-                warnings.warn("Files are written with compression and shuffling\nYou can query different values of compression using the functions:\ncdms2.getNetcdfShuffleFlag() returning 1 if shuffling is enabled, 0 otherwise\ncdms2.getNetcdfDeflateFlag() returning 1 if deflate is used, 0 otherwise\ncdms2.getNetcdfDeflateLevelFlag() returning the level of compression for the deflate method\n\nIf you want to turn that off or set different values of compression use the functions:\nvalue = 0\ncdms2.setNetcdfShuffleFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateLevelFlag(value) ## where value is a integer between 0 and 9 included\n\nTurning all values to 0 will produce NetCDF3 Classic files\nTo Force NetCDF4 output with classic format and no compressing use:\ncdms2.setNetcdf4Flag(1)\nNetCDF4 file with no shuffling or defalte and noclassic will be open for parallel i/o",Warning)
-                
+                warnings.warn(
+                    "Files are written with compression and shuffling\nYou can query different values of compression using the functions:\ncdms2.getNetcdfShuffleFlag() returning 1 if shuffling is enabled, 0 otherwise\ncdms2.getNetcdfDeflateFlag() returning 1 if deflate is used, 0 otherwise\ncdms2.getNetcdfDeflateLevelFlag() returning the level of compression for the deflate method\n\nIf you want to turn that off or set different values of compression use the functions:\nvalue = 0\ncdms2.setNetcdfShuffleFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateLevelFlag(value) ## where value is a integer between 0 and 9 included\n\nTurning all values to 0 will produce NetCDF3 Classic files\nTo Force NetCDF4 output with classic format and no compressing use:\ncdms2.setNetcdf4Flag(1)\nNetCDF4 file with no shuffling or defalte and noclassic will be open for parallel i/o",
+                    Warning)
+
         # Make var an AbstractVariable
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = dtype
-        if typecode is not None and var.dtype.char!=typecode:
+        if typecode is not None and var.dtype.char != typecode:
             var = var.astype(typecode)
         var = asVariable(var, writeable=0)
 
@@ -1842,25 +2009,26 @@ class CdmsFile(CdmsObj, cuDataset):
             varid = var.id
         else:
             varid = id
-        if self.variables.has_key(varid):
+        if varid in self.variables:
             if pack:
-              raise CDMSError("You cannot pack an existing variable %s " % varid)
+                raise CDMSError(
+                    "You cannot pack an existing variable %s " %
+                    varid)
             v = self.variables[varid]
         else:
-          if pack is not False:
-              typ = numpy.int16
-              n = 16
-          else:
-            typ = var.dtype
-          v = self.createVariableCopy(var.astype(typ), attributes=attributes, axes=axes, extbounds=extbounds,
-              id=varid, extend=extend, fill_value=fill_value, index=index)
-
-
+            if pack is not False:
+                typ = numpy.int16
+                n = 16
+            else:
+                typ = var.dtype
+            v = self.createVariableCopy(
+                var.astype(typ), attributes=attributes, axes=axes, extbounds=extbounds,
+                id=varid, extend=extend, fill_value=fill_value, index=index)
 
         # If var has typecode numpy.int, and v is created from var, then v will have
         # typecode numpy.int32. (This is a Cdunif 'feature'). This causes a downcast error
         # for numpy versions 23+, so make the downcast explicit.
-        if var.typecode()==numpy.int and v.typecode()==numpy.int32 and pack is False:
+        if var.typecode() == numpy.int and v.typecode() == numpy.int32 and pack is False:
             var = var.astype(numpy.int32)
 
         # Write
@@ -1870,21 +2038,22 @@ class CdmsFile(CdmsObj, cuDataset):
             sourceAxislist = axes
 
         vrank = var.rank()
-        if vrank==0:      # scalars are not extensible
+        if vrank == 0:      # scalars are not extensible
             extend = 0
         else:
             vec1 = sourceAxislist[0]
-            
-        if extend==0 or (extend is None and not vec1.isTime()):
-            if vrank>0:
+
+        if extend == 0 or (extend is None and not vec1.isTime()):
+            if vrank > 0:
                 if pack is not False:
-                  v[:] = numpy.zeros(var.shape,typ)
+                    v[:] = numpy.zeros(var.shape, typ)
                 else:
-                  v[:] = var.astype(v.dtype)
+                    v[:] = var.astype(v.dtype)
             else:
                 v.assignValue(var.getValue())
         else:
-            # Determine if the first dimension of var overlaps the first dimension of v
+            # Determine if the first dimension of var overlaps the first
+            # dimension of v
             vec2 = v.getAxis(0)
             if extbounds is None:
                 bounds1 = vec1.getBounds()
@@ -1894,47 +2063,51 @@ class CdmsFile(CdmsObj, cuDataset):
                 isoverlap, index = isOverlapVector(vec1[:], vec2[:])
             else:
                 isoverlap = 1
-            if isoverlap==1:
-                v[index:index+len(vec1)] = var.astype(v.dtype)
-                vec2[index:index+len(vec1)] = vec1[:].astype(vec2[:].dtype)
+            if isoverlap == 1:
+                v[index:index + len(vec1)] = var.astype(v.dtype)
+                vec2[index:index + len(vec1)] = vec1[:].astype(vec2[:].dtype)
                 if bounds1 is not None:
                     vec2.setBounds(bounds1, persistent=1, index=index)
             else:
-                raise CDMSError('Cannot write variable %s: the values of dimension %s=%s, do not overlap the extended dimension %s values: %s'%(varid, vec1.id,`vec1[:]`,vec2.id,`vec2[:]`))
+                raise CDMSError(
+                    'Cannot write variable %s: the values of dimension %s=%s, do not overlap the extended dimension %s values: %s' %
+                    (varid, vec1.id, repr(vec1[:]), vec2.id, repr(vec2[:])))
 
-        # pack implementation source: https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
+        # pack implementation source:
+        # https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
         if pack:
-          M = var.max()
-          m = var.min()
-          scale_factor = (M-m)/(pow(2,n)-2)
-          add_offset = (M+m)/2.
-          missing = -pow(2,n-1)
-          v.setMissing(-pow(2,n-1))
-          scale_factor = scale_factor.astype(var.dtype)
-          add_offset = add_offset.astype(var.dtype)
-          tmp = (var-add_offset)/scale_factor
-          tmp= numpy.round(tmp)
-          tmp=tmp.astype(typ)
-          v[:] = tmp.filled()
-          v.scale_factor = scale_factor.astype(var.dtype)
-          v.add_offset = add_offset.astype(var.dtype)
-          if not hasattr(var,"valid_min"):
-            v.valid_min = m.astype(var.dtype)
-          if not hasattr(var,"valid_max"):
-            v.valid_max = M.astype(var.dtype)
+            M = var.max()
+            m = var.min()
+            scale_factor = (M - m) / (pow(2, n) - 2)
+            add_offset = (M + m) / 2.
+            missing = -pow(2, n - 1)
+            v.setMissing(-pow(2, n - 1))
+            scale_factor = scale_factor.astype(var.dtype)
+            add_offset = add_offset.astype(var.dtype)
+            tmp = (var - add_offset) / scale_factor
+            tmp = numpy.round(tmp)
+            tmp = tmp.astype(typ)
+            v[:] = tmp.filled()
+            v.scale_factor = scale_factor.astype(var.dtype)
+            v.add_offset = add_offset.astype(var.dtype)
+            if not hasattr(var, "valid_min"):
+                v.valid_min = m.astype(var.dtype)
+            if not hasattr(var, "valid_max"):
+                v.valid_max = M.astype(var.dtype)
         return v
 
-    def write_it_yourself( self, obj ):
+    def write_it_yourself(self, obj):
         """Tell obj to write itself to self (already open for writing), using its
         writeg method (AbstractCurveGrid has such a method, for example).  If no
         such method be available, writeToFile will be used.  If that is not
         available, then self.write(obj) will be called to try to write obj as
         a variable."""
-        # This method was formerly called writeg and just wrote an AbstractCurveGrid.
-        if ( hasattr(obj,'writeg') and callable(getattr(obj,'writeg')) ):
-            obj.writeg( self )
-        elif ( hasattr(obj,'writeToFile') and callable(getattr(obj,'writeToFile')) ):
-            obj.writeToFile( self )
+        # This method was formerly called writeg and just wrote an
+        # AbstractCurveGrid.
+        if (hasattr(obj, 'writeg') and callable(getattr(obj, 'writeg'))):
+            obj.writeg(self)
+        elif (hasattr(obj, 'writeToFile') and callable(getattr(obj, 'writeToFile'))):
+            obj.writeToFile(self)
         else:
             self.write(obj)
 
@@ -1947,7 +2120,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         variable :: (cdms2.fvariable.FileVariable/None) (0) file variable
-        :::        
+        :::
         """
         return self.variables.get(id)
 
@@ -1960,11 +2133,12 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         variables :: ([cdms2.fvariable.FileVariable]) (0) file variables
-        :::        
+        :::
 """
         retval = self.variables.values()
         if spatial:
-            retval = filter(lambda x: x.id[0:7]!="bounds_" and x.id[0:8]!="weights_" and ((x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
+            retval = filter(lambda x: x.id[0:7] != "bounds_" and x.id[0:8] != "weights_" and (
+                (x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
         return retval
 
     def getAxis(self, id):
@@ -1975,7 +2149,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         axis :: (cdms2.axis.FileAxis/None) (0) file axis
-        :::        
+        :::
         """
         return self.axes.get(id)
 
@@ -1988,11 +2162,11 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         grid :: (cdms2.hgrid.FileCurveGrid/cdms2.gengrid.FileGenericGrid/cdms2.grid.FileRectGrid/None) (0) file axis
-        :::        
+        :::
         """
         return self.grids.get(id)
 
-    def getBoundsAxis(self, n,boundid=None):
+    def getBoundsAxis(self, n, boundid=None):
         """Get a bounds axis of length n. Create the bounds axis if necessary.
         :::
         Input:::
@@ -2003,30 +2177,31 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if boundid is None:
-            if n==2:
+            if n == 2:
                 boundid = "bound"
             else:
-                boundid = "bound_%d"%n
-            
-        if self.axes.has_key(boundid):
+                boundid = "bound_%d" % n
+
+        if boundid in self.axes:
             boundaxis = self.axes[boundid]
         else:
             boundaxis = self.createVirtualAxis(boundid, n)
         return boundaxis
 
     def __repr__(self):
-        filerep = `self._file_`
+        filerep = repr(self._file_)
         loc = filerep.find("file")
-        if loc==-1: loc=0
-        return "<CDMS "+filerep[loc:-1]+", status: %s>"%self._status_
-
-## internattr.add_internal_attribute (CdmsFile, 'datapath',
-##                                             'variables',
-##                                             'axes',
-##                                             'grids',
-##                                             'xlinks',
-##                                             'dictdict',
-##                                             'default_variable_name',
-##                                             'id',
-##                                             'parent',
-##                                             'mode')
+        if loc == -1:
+            loc = 0
+        return "<CDMS " + filerep[loc:-1] + ", status: %s>" % self._status_
+
+# internattr.add_internal_attribute (CdmsFile, 'datapath',
+# 'variables',
+# 'axes',
+# 'grids',
+# 'xlinks',
+# 'dictdict',
+# 'default_variable_name',
+# 'id',
+# 'parent',
+# 'mode')
diff --git a/Packages/cdms2/Lib/error.py b/Packages/cdms2/Lib/error.py
index df73aa688..a8e3091b7 100644
--- a/Packages/cdms2/Lib/error.py
+++ b/Packages/cdms2/Lib/error.py
@@ -1,5 +1,7 @@
 "Error object for cdms module"
 
+
 class CDMSError (Exception):
-    def __init__ (self, args="Unspecified error from package cdms"):
+
+    def __init__(self, args="Unspecified error from package cdms"):
         self.args = (args,)
diff --git a/Packages/cdms2/Lib/forecast.py b/Packages/cdms2/Lib/forecast.py
index 0c08ab922..f3a6a6cd6 100644
--- a/Packages/cdms2/Lib/forecast.py
+++ b/Packages/cdms2/Lib/forecast.py
@@ -7,54 +7,58 @@ import cdms2
 import copy
 from cdms2 import CDMSError
 
-def two_times_from_one( t ):
+
+def two_times_from_one(t):
     """Input is a time representation, either as the long int used in the cdscan
     script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime
     (component time) object.
     Output is the same time, both as a long _and_ as a comptime."""
-    if t==0:
-        t = 0L
-    if isinstance(t,str):
+    if t == 0:
+        t = 0
+    if isinstance(t, str):
         t = cdtime.s2c(t)
-    if (isinstance(t,long) or isinstance(t,int)) and t>1000000000L :
+    if (isinstance(t, long) or isinstance(t, int)) and t > 1000000000:
         tl = t
-        year = tl / 1000000000L
-        rem =  tl % 1000000000L
-        month = rem / 10000000L
-        rem =   rem % 10000000L
-        day =     rem / 100000
+        year = tl / 1000000000
+        rem = tl % 1000000000
+        month = rem / 10000000
+        rem = rem % 10000000
+        day = rem / 100000
         allsecs = rem % 100000
-        sec =     allsecs%60
-        allmins = allsecs/60
-        min =  allmins%60
-        hour = allmins/60
-        tc = cdtime.comptime(year,month,day,hour,min,sec)
+        sec = allsecs % 60
+        allmins = allsecs / 60
+        min = allmins % 60
+        hour = allmins / 60
+        tc = cdtime.comptime(year, month, day, hour, min, sec)
     else:
         # I'd like to check that t is type comptime, but although Python
         # prints the type as <type 'comptime'> it won't recognize as a type
         # comptime or anything similar.  Note that cdtime.comptime is a C
         # function available from Python.
         tc = t
-        tl = tc.year * 1000000000L
-        tl += tc.month * 10000000L
-        tl += tc.day   * 100000
+        tl = tc.year * 1000000000
+        tl += tc.month * 10000000
+        tl += tc.day * 100000
         tl += tc.hour * 3600
-        tl += tc.minute *60
+        tl += tc.minute * 60
         tl += tc.second.__int__()
-    return tl,tc
+    return tl, tc
+
 
-def comptime( t ):
+def comptime(t):
     """Input is a time representation, either as the long int used in the cdscan
     script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime
     (component time) object.
     Output is the same time a cdtime.comptime (component time)."""
-    tl,tc = two_times_from_one( t )
+    tl, tc = two_times_from_one(t)
     return tc
 
+
 class forecast():
+
     """represents a forecast starting at a single time"""
 
-    def __init__( self, tau0time, dataset_list, path="." ):
+    def __init__(self, tau0time, dataset_list, path="."):
         """tau0time is the first time of the forecast, i.e. the time at which tau=0.
         dataset_list is used to get the forecast file from the forecast time.
         Each list item should look like this example:
@@ -65,33 +69,33 @@ class forecast():
         N.B.  This is like a CdmsFile.  Creating a forecast means opening a file,
         so later on you should call forecast.close() to close it.
         """
-        self.fctl, self.fct = two_times_from_one( tau0time )
+        self.fctl, self.fct = two_times_from_one(tau0time)
 
-        filenames = [ l[5] for l in dataset_list if l[4]==self.fctl ]
-        if len(filenames)>0:
+        filenames = [l[5] for l in dataset_list if l[4] == self.fctl]
+        if len(filenames) > 0:
             filename = filenames[0]
         else:
-            raise CDMSError, "Cannot find filename for forecast %d"%self.fctl
+            raise CDMSError("Cannot find filename for forecast %d" % self.fctl)
         self.filename = path + '/' + filename
-        self.file = cdms2.open( self.filename )
+        self.file = cdms2.open(self.filename)
 
-    def close( self ):
+    def close(self):
         self.file.close()
 
-    def __call__( self, varname ):
+    def __call__(self, varname):
         """Reads the specified variable from this forecast's file."""
         return self.file(varname)
 
-    def __getitem__( self, varname ):
+    def __getitem__(self, varname):
         """Reads variable attributes from this forecast's file."""
         return self.file.__getitem__(varname)
 
     def __repr__(self):
-        return "<forecast from %s>"%(self.fct)
+        return "<forecast from %s>" % (self.fct)
     __str__ = __repr__
 
 
-def available_forecasts( dataset_file, path="." ):
+def available_forecasts(dataset_file, path="."):
     """Returns a list of forecasts (as their generating times) which are
     available through the specified cdscan-generated dataset xml file.
     The forecasts are given in 64-bit integer format, but can be converted
@@ -99,16 +103,18 @@ def available_forecasts( dataset_file, path="." ):
     This function may help in choosing the right arguments for initializing
     a "forecasts" (forecast set) object.
     """
-    dataset=cdms2.openDataset( dataset_file, dpath=path )
-    fm=cdms2.dataset.parseFileMap(dataset.cdms_filemap)
-    alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
+    dataset = cdms2.openDataset(dataset_file, dpath=path)
+    fm = cdms2.dataset.parseFileMap(dataset.cdms_filemap)
+    alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
     dataset.close()
     return alltimesl
 
+
 class forecasts():
+
     """represents a set of forecasts"""
 
-    def __init__( self, dataset_file, forecast_times, path="." ):
+    def __init__(self, dataset_file, forecast_times, path="."):
         """Creates a set of forecasts.  Normally you do it by something like
         f = forecasts( 'file.xml', (min_time, max_time) )
         or
@@ -117,9 +123,9 @@ class forecasts():
         f = forecasts( 'file.xml', [ time1, time2, time3, time4, time5 ] )
 
         where the two or three arguments are::
-        
+
         1. the name of a dataset xml file generated by "cdscan --forecast ..."
-        
+
         2. Times here are the times when the forecasts began (tau=0, aka reference time).
         (i) If you use a 2-item tuple, forecasts will be chosen which start at a time
         t between the min and max times, e.g. min_time <= t < max_time .
@@ -138,72 +144,78 @@ class forecasts():
         2006012300000 for the first second of January 23, 2006, or as
         component times (comptime) in the cdtime module, or as
         a string in the format "2010-08-25 15:26:00".
-        
+
         3. An optional path for the data files; use this if the xml file
         contains filenames without complete paths.
-         
+
         As for the forecast class, this opens files when initiated, so when you
         are finished with the forecasts, you should close the files by calling
         forecasts.close() .
         """
 
         # Create dataset_list to get a forecast file from each forecast time.
-        self.dataset=cdms2.openDataset( dataset_file, dpath=path )
-        fm=cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
-        self.alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
+        self.dataset = cdms2.openDataset(dataset_file, dpath=path)
+        fm = cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
+        self.alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
         dataset_list = fm[0][1]
         for f in fm[1:]:
             dataset_list.extend(f[1])
 
-        mytimesl = self.forecast_times_to_list( forecast_times )
+        mytimesl = self.forecast_times_to_list(forecast_times)
         if mytimesl == []:
-            raise CDMSError, "bad forecast_times argument to forecasts.__init__"
-        self.fcs = [ forecast( t, dataset_list, path ) for t in mytimesl ]
+            raise CDMSError(
+                "bad forecast_times argument to forecasts.__init__")
+        self.fcs = [forecast(t, dataset_list, path) for t in mytimesl]
 
-    def forecast_times_to_list( self, forecast_times ):
+    def forecast_times_to_list(self, forecast_times):
         """For internal list, translates a "forecast_times" argument of __init__ or
         other methods, into a list of times."""
-        if type(forecast_times) is tuple:
-            if len(forecast_times)<=2:
+        if isinstance(forecast_times, tuple):
+            if len(forecast_times) <= 2:
                 openclosed = 'co'
             else:
                 openclosed = forecast_times[2]
-            mytimesl = self.time_interval_to_list( forecast_times[0], forecast_times[1], openclosed )
+            mytimesl = self.time_interval_to_list(
+                forecast_times[
+                    0],
+                forecast_times[
+                    1],
+                openclosed)
             return mytimesl
-        elif type(forecast_times) is list:
+        elif isinstance(forecast_times, list):
             return forecast_times
         elif forecast_times == 'All':
             return self.alltimesl
         else:
             return []
 
-    def time_interval_to_list( self, tlo, thi, openclosed='co' ):
+    def time_interval_to_list(self, tlo, thi, openclosed='co'):
         """For internal use, translates a time interval to a list of times.
         """
-        if type(tlo) is not long:  # make tlo a long integer
-            tlo, tdummy = two_times_from_one( tlo )
-        if type(thi) is not long:  # make thi a long integer
-            thi, tdummy = two_times_from_one( thi )
+        if not isinstance(tlo, long):  # make tlo a long integer
+            tlo, tdummy = two_times_from_one(tlo)
+        if not isinstance(thi, long):  # make thi a long integer
+            thi, tdummy = two_times_from_one(thi)
         oclo = openclosed[0]
         ochi = openclosed[1]
-        if oclo=='c':
-            mytimesl = [ t for t in self.alltimesl if t>=tlo ]
+        if oclo == 'c':
+            mytimesl = [t for t in self.alltimesl if t >= tlo]
         else:
-            mytimesl = [ t for t in self.alltimesl if t>tlo ]
-        if ochi=='c':
-            mytimesl = [ t for t in mytimesl if t<=thi ]
+            mytimesl = [t for t in self.alltimesl if t > tlo]
+        if ochi == 'c':
+            mytimesl = [t for t in mytimesl if t <= thi]
         else:
-            mytimesl = [ t for t in mytimesl if t<thi ]
+            mytimesl = [t for t in mytimesl if t < thi]
         return mytimesl
 
-    def reduce_inplace( self, min_time, max_time, openclosed='co' ):
+    def reduce_inplace(self, min_time, max_time, openclosed='co'):
         """ For a forecasts object f, f( min_time, max_time ) will reduce the
         scope of f, to forecasts whose start time t has min_time<=t<max_time.
         This is done in place, i.e. any other forecasts in f will be discarded.
         If slice notation were possible for forecasts (it's not because we need
         too many bits to represent time), this function would do the same as
         f = f[min_time : max_time ]
-        
+
         The optional openclosed argument lets you specify the treatment of
         the endpoints min_time, max_time.  The first character should be 'c' if you want
         to include min_time in the new scope of f, or 'o' to exclude it.  Similarly,
@@ -215,15 +227,15 @@ class forecasts():
 
         Times can be the usual long integers, strings, or cdtime component times.
         """
-        mytimesl = self.time_interval_to_list( min_time, max_time, openclosed )
-        self.fcs = [ f for f in self.fcs if ( f.fctl in mytimesl ) ]
+        mytimesl = self.time_interval_to_list(min_time, max_time, openclosed)
+        self.fcs = [f for f in self.fcs if (f.fctl in mytimesl)]
 
-    def close( self ):
+    def close(self):
         self.dataset.close()
         for fc in self.fcs:
             fc.close()
 
-    def __call__( self, varname, forecast_times='All' ):
+    def __call__(self, varname, forecast_times='All'):
         """Reads the specified variable for all the specified forecasts.
         Creates and returns a new variable which is dimensioned by forecast
         as well as the original variable's dimensions.
@@ -247,55 +259,58 @@ class forecasts():
         if forecast_times == 'All':
             varfcs = self.fcs
         else:
-            mytimesl = self.forecast_times_to_list( forecast_times )
-            varfcs = [ f for f in self.fcs if ( f.fctl in mytimesl ) ]
-        vars = [ fc(varname) for fc in varfcs ]
+            mytimesl = self.forecast_times_to_list(forecast_times)
+            varfcs = [f for f in self.fcs if (f.fctl in mytimesl)]
+        vars = [fc(varname) for fc in varfcs]
 
         # Create the variable from the data, with mask:
         v0 = vars[0]
-        a = numpy.asarray([ v.data for v in vars ])
+        a = numpy.asarray([v.data for v in vars])
         if v0._mask == False:
             m = False
-            v = cdms2.tvariable.TransientVariable( a )
+            v = cdms2.tvariable.TransientVariable(a)
         else:
-            m = numpy.asarray([ v._mask for v in vars])
-            v = cdms2.tvariable.TransientVariable( a, mask=m, fill_value=v0._fill_value )
+            m = numpy.asarray([v._mask for v in vars])
+            v = cdms2.tvariable.TransientVariable(
+                a, mask=m, fill_value=v0._fill_value)
 
         # Domain-related attributes:
             # We get the tomain from __getitem__ to make sure that fcs[var] is consistent
             # with fcs(var)
-        fvd = self.__getitem__(varname,varfcs).domain
+        fvd = self.__getitem__(varname, varfcs).domain
         v._TransientVariable__domain = fvd
         # former domain code, not using __getitem:
         # ltvd = len(v0._TransientVariable__domain)
         # v._TransientVariable__domain[1:ltvd+1] = v0._TransientVariable__domain[0:ltvd]
-        # v._TransientVariable__domain[0] = self.forecast_axis( varname, varfcs )
-        if hasattr( v0, 'coordinates' ):
+        # v._TransientVariable__domain[0] = self.forecast_axis( varname, varfcs
+        # )
+        if hasattr(v0, 'coordinates'):
             v.coordinates = 'iforecast ' + v0.coordinates
 
         # Other attributes, all those for which I've seen nontrivial values in a
         # real example (btw, the _isfield one was wrong!) :
         # It would be better to do a list comprehension over v0.attribures.keys(),
-        # if I could be sure that that wouldn't transfer something inappropriate.
-        if hasattr( v0, 'id' ):
+        # if I could be sure that that wouldn't transfer something
+        # inappropriate.
+        if hasattr(v0, 'id'):
             v.id = v0.id
-        if hasattr( v0, 'long_name' ):
+        if hasattr(v0, 'long_name'):
             v.long_name = v0.long_name
-        if hasattr( v0, 'standard_name' ):
+        if hasattr(v0, 'standard_name'):
             v.standard_name = v0.standard_name
-        if hasattr( v0, 'base_name' ):
+        if hasattr(v0, 'base_name'):
             v.base_name = v0.base_name
-        if hasattr( v0, 'units' ):
+        if hasattr(v0, 'units'):
             v.units = v0.units
-        if hasattr( v0, '_isfield' ):
+        if hasattr(v0, '_isfield'):
             v._isfield = v0._isfield
         return v
 
-    def forecast_axis( self, varname, fcss=None ):
+    def forecast_axis(self, varname, fcss=None):
         """returns a tuple (axis,start,length,true_length) where axis is in the
         forecast direction.  If a list of forecasts be specified, the axis' data will
         be limited to them."""
-        if fcss==None:
+        if fcss is None:
             fcss = self.fcs
         axis = None
         domitem1 = None
@@ -304,7 +319,7 @@ class forecasts():
 
         var = self.dataset[varname]
         # ... var is a DatasetVariable, used here just for two of its domain's axes
-        dom = copy.deepcopy(getattr(var,'domain',[]))
+        dom = copy.deepcopy(getattr(var, 'domain', []))
         # ...this 'domain' attribute has an element with an axis, etc.
         # representing all forecasts; so we want to cut it down to match
         # those forecasts in fcss.
@@ -315,33 +330,33 @@ class forecasts():
             # axis is a axis.Axis and the rest of the tuple is int's.
             # I don't know what true_length is, but it doesn't seem to get used
             # anywhere, and is normally the same as length.
-            if getattr(domitem[0],'id',None)=='fctau0':
+            if getattr(domitem[0], 'id', None) == 'fctau0':
                 # Force the axis to match fcss :
                 # More precisely the long int times fcss[i].fctl should match
-                # the axis data. The axis partition and .length need changing too.
+                # the axis data. The axis partition and .length need changing
+                # too.
                 domitem1 = 0
                 domitem2 = len(fcss)
                 domitem3 = len(fcss)
                 axis = copy.copy(domitem[0])
-                axis._data_ = [ f.fctl for f in fcss ]
+                axis._data_ = [f.fctl for f in fcss]
                 axis.length = len(axis._data_)
                 axis.partition = axis.partition[0:axis.length]
                 axis.axis = 'F'
                 axis.standard_name = 'forecast_reference_time'
                 timeaxis = var.getTime()
-                if not hasattr(axis,'calendar') and timeaxis:
+                if not hasattr(axis, 'calendar') and timeaxis:
                     axis.calendar = timeaxis.calendar
-                
-        return ( axis, domitem1, domitem2, domitem3 )
 
+        return (axis, domitem1, domitem2, domitem3)
 
-    def __getitem__( self, varname, fccs=None ):
+    def __getitem__(self, varname, fccs=None):
         """returns whatever the forecast set has that matches the given
         attribute, normally a DatasetVariable.  The optional argument fccs
         is a list of forecasts to be passed on to forecast_axis().
         """
-        if type(varname) is not str :
-            raise CDMSError, "bad argument to forecasts[]"
+        if not isinstance(varname, str):
+            raise CDMSError("bad argument to forecasts[]")
 
         var = self.dataset[varname]
         # var is a DatasetVariable and consists of lots of attributes.
@@ -349,20 +364,19 @@ class forecasts():
         # The attribute which needs to be changed is 'domain' - it will normally
         # have an element with an axis, etc. representing all forecasts; so we
         # want to cut it down to match those forecasts in self.fcs.
-        dom = copy.deepcopy(getattr(var,'domain',[]))
+        dom = copy.deepcopy(getattr(var, 'domain', []))
         for i in range(len(dom)):
             domitem = dom[i]
-            if getattr(domitem[0],'id',None)=='fctau0':
-                dom[i] = self.forecast_axis(varname,fccs)
-        setattr(var,'domain',dom)
-                
+            if getattr(domitem[0], 'id', None) == 'fctau0':
+                dom[i] = self.forecast_axis(varname, fccs)
+        setattr(var, 'domain', dom)
+
         return var
 
     def __repr__(self):
         l = len(self.fcs)
-        if l==0:
+        if l == 0:
             return "<forecasts - None>"
         else:
-            return "<forecasts from %s,...,%s>"%(self.fcs[0].fct,self.fcs[l-1].fct)
+            return "<forecasts from %s,...,%s>" % (self.fcs[0].fct, self.fcs[l - 1].fct)
     __str__ = __repr__
-
diff --git a/Packages/cdms2/Lib/fvariable.py b/Packages/cdms2/Lib/fvariable.py
index 6ca32df15..2f41df8a6 100644
--- a/Packages/cdms2/Lib/fvariable.py
+++ b/Packages/cdms2/Lib/fvariable.py
@@ -1,32 +1,35 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 "CDMS File-based variables."
 import numpy
-import typeconv
+from . import typeconv
 import re
 
-from cdmsobj import Max32int
-from variable import DatasetVariable
-from error import CDMSError
-from sliceut import reverseSlice
-from avariable import AbstractVariable
+from .cdmsobj import Max32int
+from .variable import DatasetVariable
+from .error import CDMSError
+from .sliceut import reverseSlice
+from .avariable import AbstractVariable
 from cdms2 import Cdunif
 from Cdunif import CdunifError
 
 FileClosed = "Cannot read from closed file, variable: "
 FileClosedWrite = "Cannot write to a closed file, variable: "
 
+
 class FileVariable(DatasetVariable):
+
     "A variable in a single file."
-    def __init__(self,parent,varname,cdunifobj=None):
+
+    def __init__(self, parent, varname, cdunifobj=None):
         DatasetVariable.__init__(self, parent, varname)
         self._obj_ = cdunifobj
         if cdunifobj is not None:
             for attname, attval in cdunifobj.__dict__.items():
                 self.__dict__[attname] = attval
-                self.attributes[attname]=attval
-        val = self.__cdms_internals__+['name_in_file',]
+                self.attributes[attname] = attval
+        val = self.__cdms_internals__ + ['name_in_file', ]
         self.___cdms_internals__ = val
 
     # Initialize the domain
@@ -38,7 +41,7 @@ class FileVariable(DatasetVariable):
             start = 0
             length = len(axis)
             truelen = length
-            self.domain.append((axis,start,length,truelen))
+            self.domain.append((axis, start, length, truelen))
 
     def typecode(self):
         # Compatibility: convert to new typecode
@@ -46,43 +49,44 @@ class FileVariable(DatasetVariable):
         tc = typeconv.convtypecode2(tc).char
         return tc
 
-    def assignValue(self,data):
+    def assignValue(self, data):
         if self.parent is None:
-            raise CDMSError, FileClosedWrite+self.id
+            raise CDMSError(FileClosedWrite + self.id)
         if numpy.ma.isMaskedArray(data):
-          if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask,0):
-            saveFill = data.fill_value
-            if self.getMissing() is None:
-                self.setMissing(saveFill)
-            else:
-                data.set_fill_value(self.getMissing())
+            if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask, 0):
+                saveFill = data.fill_value
+                if self.getMissing() is None:
+                    self.setMissing(saveFill)
+                else:
+                    data.set_fill_value(self.getMissing())
         self._obj_.assignValue(numpy.ma.filled(data))
         if numpy.ma.isMaskedArray(data):
-          if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask,0):
-            data.set_fill_value(saveFill)
+            if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask, 0):
+                data.set_fill_value(saveFill)
 
-    def expertSlice (self, initslicelist):
+    def expertSlice(self, initslicelist):
         # Handle negative slices
-        revlist = []                    # Slices to apply to result if reversals needed
+        revlist = []
+            # Slices to apply to result if reversals needed
         slist = []                      # Slices with positive strides
         haveReversals = 0               # True iff result array needs reversing
-        i=0
+        i = 0
         for s in initslicelist:
-            if s.step<0:
+            if s.step < 0:
                 axislen = self.shape[i]
-                slist.append(reverseSlice(s,axislen))
-                revlist.append(slice(None,None,-1))
+                slist.append(reverseSlice(s, axislen))
+                revlist.append(slice(None, None, -1))
                 haveReversals = 1
             else:
                 slist.append(s)
-                revlist.append(slice(None,None,1))
+                revlist.append(slice(None, None, 1))
             i += 1
 
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
+            raise CDMSError(FileClosed + self.id)
         if self.rank() == 0:
             return self._obj_.getValue()
-        result = apply(self._obj_.getitem,slist)
+        result = self._obj_.getitem(*slist)
 
         # If slices with negative strides were input, apply the appropriate
         # reversals.
@@ -93,43 +97,43 @@ class FileVariable(DatasetVariable):
 
     def __setitem__(self, index, value):
         if self.parent is None:
-            raise CDMSError, FileClosedWrite+self.id
+            raise CDMSError(FileClosedWrite + self.id)
         if numpy.ma.isMaskedArray(value):
-          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
-            saveFill = value.fill_value
-            if self.getMissing() is None:
-                self.setMissing(saveFill)
-            else:
-                value.set_fill_value(self.getMissing())
-        apply(self._obj_.setitem,(index,numpy.ma.filled(value)))
+            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
+                saveFill = value.fill_value
+                if self.getMissing() is None:
+                    self.setMissing(saveFill)
+                else:
+                    value.set_fill_value(self.getMissing())
+        self._obj_.setitem(*(index, numpy.ma.filled(value)))
         if numpy.ma.isMaskedArray(value):
-          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
-            value.set_fill_value(saveFill)
+            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
+                value.set_fill_value(saveFill)
 
     def __setslice__(self, low, high, value):
         if self.parent is None:
-            raise CDMSError, FileClosedWrite+self.id
+            raise CDMSError(FileClosedWrite + self.id)
 
         # Hack to prevent netCDF overflow error on 64-bit architectures
         high = min(Max32int, high)
-        if high == Max32int and self.rank()==0:
-          high=1
-        
+        if high == Max32int and self.rank() == 0:
+            high = 1
+
         if numpy.ma.isMaskedArray(value):
-          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
-            saveFill = value.fill_value
-            if self.getMissing() is None:
-                self.setMissing(saveFill)
-            else:
-                value.set_fill_value(self.getMissing())
-        apply(self._obj_.setslice,(low,high,numpy.ma.filled(value)))
+            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
+                saveFill = value.fill_value
+                if self.getMissing() is None:
+                    self.setMissing(saveFill)
+                else:
+                    value.set_fill_value(self.getMissing())
+        self._obj_.setslice(*(low, high, numpy.ma.filled(value)))
         if numpy.ma.isMaskedArray(value):
-          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
-            value.set_fill_value(saveFill)
+            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
+                value.set_fill_value(saveFill)
 
-    def _getShape (self):
+    def _getShape(self):
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
+            raise CDMSError(FileClosed + self.id)
         return self._obj_.shape
 
     # Write external attributes to the file.
@@ -138,13 +142,15 @@ class FileVariable(DatasetVariable):
     # that the value is propagated to the external file.
     def __setattr__(self, name, value):
         if hasattr(self, "parent") and self.parent is None:
-            raise CDMSError, FileClosedWrite+self.id
+            raise CDMSError(FileClosedWrite + self.id)
         if (not name in self.__cdms_internals__) and (value is not None):
             try:
                 setattr(self._obj_, name, value)
             except CdunifError:
-                raise CDMSError, "Setting %s.%s=%s"%(self.id,name,`value`)
-            self.attributes[name]=value
+                raise CDMSError(
+                    "Setting %s.%s=%s" %
+                    (self.id, name, repr(value)))
+            self.attributes[name] = value
         self.__dict__[name] = value
 
     # Delete external file attributes.
@@ -156,23 +162,23 @@ class FileVariable(DatasetVariable):
             try:
                 delattr(self._obj_, name)
             except CdunifError:
-                raise CDMSError, "Deleting %s.%s"%(self.id,name)
+                raise CDMSError("Deleting %s.%s" % (self.id, name))
             del(self.attributes[name])
         del self.__dict__[name]
 
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
-        if self.rank()>0:
+            raise CDMSError(FileClosed + self.id)
+        if self.rank() > 0:
             return self.getSlice(Ellipsis, squeeze=squeeze)
         else:
             return self._obj_.getValue()
-    
+
     def __len__(self):
         " Length of first dimension. "
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
+            raise CDMSError(FileClosed + self.id)
         return len(self._obj_)
 
 #    def __repr__(self):
@@ -181,5 +187,4 @@ class FileVariable(DatasetVariable):
 #        else:
 #            return "<Variable: %s, file: **CLOSED**>"%self.id
 
-
-    shape = property(_getShape,None)
+    shape = property(_getShape, None)
diff --git a/Packages/cdms2/Lib/gengrid.py b/Packages/cdms2/Lib/gengrid.py
index 95e6d1871..5fad339b7 100644
--- a/Packages/cdms2/Lib/gengrid.py
+++ b/Packages/cdms2/Lib/gengrid.py
@@ -1,15 +1,15 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """CDMS Generic Grids"""
 
 import numpy
-## import PropertiedClasses
-import bindex
-from error import CDMSError
-from grid import LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
-from hgrid import AbstractHorizontalGrid
-from axis import allclose
+# import PropertiedClasses
+from . import bindex
+from .error import CDMSError
+from .grid import LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
+from .hgrid import AbstractHorizontalGrid
+from .axis import allclose
 
 MethodNotImplemented = "Method not yet implemented"
 
@@ -19,7 +19,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         """Create a generic grid.
         """
         if latAxis.shape != lonAxis.shape:
-            raise CDMSError, 'Latitude and longitude axes must have the same shape.'
+            raise CDMSError('Latitude and longitude axes must have the same shape.')
         AbstractHorizontalGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
         self._index_ = None
 
@@ -35,17 +35,17 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
     def getMesh(self, transpose=None):
         """Generate a mesh array for the meshfill graphics method.
         'transpose' is for compatibility with other grid types, is ignored."""
-        import MV2 as MV
+        from . import MV2 as MV
         if self._mesh_ is None:
-            LAT=0
-            LON=1
+            LAT = 0
+            LON = 1
             latbounds, lonbounds = self.getBounds()
             if latbounds is None or lonbounds is None:
-                raise CDMSError, 'No boundary data is available for grid %s'%self.id
+                raise CDMSError('No boundary data is available for grid %s'%self.id)
             nvert = latbounds.shape[-1]
-            mesh = numpy.zeros((self.size(),2,nvert),latbounds.dtype.char)
-            mesh[:,LAT,:] = MV.filled(latbounds)
-            mesh[:,LON,:] = MV.filled(lonbounds)
+            mesh = numpy.zeros((self.size(), 2, nvert), latbounds.dtype.char)
+            mesh[:, LAT,:] = MV.filled(latbounds)
+            mesh[:, LON,:] = MV.filled(lonbounds)
             self._mesh_ = mesh
         return self._mesh_
 
@@ -101,9 +101,9 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         gridcenterlon.units = "degrees"
         gridimask = cufile.createVariable("grid_imask", numpy.int, ("grid_size",))
         gridimask.units = "unitless"
-        gridcornerlat = cufile.createVariable("grid_corner_lat", numpy.float, ("grid_size","grid_corners"))
+        gridcornerlat = cufile.createVariable("grid_corner_lat", numpy.float, ("grid_size", "grid_corners"))
         gridcornerlat.units = "degrees"
-        gridcornerlon = cufile.createVariable("grid_corner_lon", numpy.float, ("grid_size","grid_corners"))
+        gridcornerlon = cufile.createVariable("grid_corner_lon", numpy.float, ("grid_size", "grid_corners"))
         gridcornerlon.units = "degrees"
 
         griddims[:] = numpy.array([ngrid], numpy.int32)
@@ -162,8 +162,8 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
                 i = k
             k += 1
 
-        if i==-1:
-            raise RuntimeError, 'Grid lat/lon domains do not match variable domain'
+        if i == -1:
+            raise RuntimeError('Grid lat/lon domains do not match variable domain')
 
         return ((islice, ), (inewaxis, ))
 
@@ -193,10 +193,10 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         lonspec = spec[CoordTypeToLoc[LongitudeType]]
         latlin = numpy.ma.filled(self._lataxis_)
         lonlin = numpy.ma.filled(self._lonaxis_)
-        lonlin = numpy.ma.where(numpy.ma.greater_equal(lonlin,360.0), lonlin-360.0, lonlin)
+        lonlin = numpy.ma.where(numpy.ma.greater_equal(lonlin, 360.0), lonlin-360.0, lonlin)
         points = bindex.intersectHorizontalGrid(latspec, lonspec, latlin, lonlin, index)
-        if len(points)==0:
-            raise CDMSError, 'No data in the specified region, longitude=%s, latitude=%s'%(`lonspec`, `latspec`)
+        if len(points) == 0:
+            raise CDMSError('No data in the specified region, longitude=%s, latitude=%s'%(repr(lonspec), repr(latspec)))
 
         fullmask = numpy.ones(ncell)
         numpy.put(fullmask, points, 0)
@@ -205,7 +205,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         submask = fullmask[imin:imax]
 
         cellid = self.getAxis(0).id
-        indexspecs = {cellid:slice(imin,imax)}
+        indexspecs = {cellid:slice(imin, imax)}
 
         return submask, indexspecs
 
@@ -253,9 +253,9 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
             result = self.clone()
             for i in missing:
                 for item in axes:
-                    if (len(selfaxes[i])==len(item)) and allclose(selfaxes[i], item):
-                        result._lataxis_.setAxis(i,item)
-                        result._lonaxis_.setAxis(i,item)
+                    if (len(selfaxes[i]) == len(item)) and allclose(selfaxes[i], item):
+                        result._lataxis_.setAxis(i, item)
+                        result._lonaxis_.setAxis(i, item)
                         break
                 else:
                     result = None
@@ -268,7 +268,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
         if self._flataxes_ is None:
-            import MV2 as MV
+            from . import MV2 as MV
             alat = MV.filled(self.getLatitude())
             alon = MV.filled(self.getLongitude())
             self._flataxes_ = (alat, alon)
@@ -280,11 +280,11 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         result = self.clone()
         result.id = gridid
         return result
-    shape = property(_getShape,None)
+    shape = property(_getShape, None)
     
-## PropertiedClasses.set_property (AbstractGenericGrid, 'shape', 
-##                                   AbstractGenericGrid._getShape, nowrite=1,
-##                                   nodelete=1)
+# PropertiedClasses.set_property (AbstractGenericGrid, 'shape',
+# AbstractGenericGrid._getShape, nowrite=1,
+# nodelete=1)
 
 class DatasetGenericGrid(AbstractGenericGrid):
 
@@ -295,7 +295,7 @@ class DatasetGenericGrid(AbstractGenericGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<DatasetGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<DatasetGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
 class FileGenericGrid(AbstractGenericGrid):
 
@@ -306,7 +306,7 @@ class FileGenericGrid(AbstractGenericGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<FileGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<FileGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
 class TransientGenericGrid(AbstractGenericGrid):
 
@@ -321,7 +321,7 @@ class TransientGenericGrid(AbstractGenericGrid):
         AbstractGenericGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask)
 
     def __repr__(self):
-        return "<TransientGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<TransientGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
     def toGenericGrid(self, gridid=None):
         if gridid is None:
@@ -338,20 +338,20 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    from auxcoord import TransientAuxAxis1D
-    from coord import TransientVirtualAxis
+    from .auxcoord import TransientAuxAxis1D
+    from .coord import TransientVirtualAxis
 
     convention = 'SCRIP'
     if 'S' in fileobj.variables.keys():
         convention = 'NCAR'
-        if whichType=="grid":
+        if whichType == "grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid=="destination":
+        elif whichGrid == "destination":
             gridCornerLatName = 'yv_b'
             gridCornerLonName = 'xv_b'
             gridMaskName = 'mask_b'
@@ -366,14 +366,14 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
             gridCenterLonName = 'xc_a'
             titleName = 'source_grid'
     else:
-        if whichType=="grid":
+        if whichType == "grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid=="destination":
+        elif whichGrid == "destination":
             gridCornerLatName = 'dst_grid_corner_lat'
             gridCornerLonName = 'dst_grid_corner_lon'
             gridMaskName = 'dst_grid_imask'
@@ -399,38 +399,38 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
         ni = dims[0]
 
     boundsshape = (ni, ncorners)
-    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6]=='radian':
+    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6] == 'radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
 
-    iaxis = TransientVirtualAxis("i",ni)
+    iaxis = TransientVirtualAxis("i", ni)
 
-    if vardict.has_key(gridMaskName):
+    if gridMaskName in vardict:
         # SCRIP convention: 0 for invalid data
         # numpy.ma convention: 1 for invalid data
         mask = 1 - fileobj(gridMaskName)
     else:
         mask = None
         
-    if vardict.has_key(gridCenterLatName):
+    if gridCenterLatName in vardict:
         centerLat = fileobj(gridCenterLatName)
-        if hasattr(centerLat, "units") and centerLat.units.lower()=='radians':
+        if hasattr(centerLat, "units") and centerLat.units.lower() == 'radians':
             centerLat *= (180.0/numpy.pi)
     else:
-        centerLat = cornerLat[:,:,0]
+        centerLat = cornerLat[:,:, 0]
 
-    if vardict.has_key(gridCenterLonName):
+    if gridCenterLonName in vardict:
         centerLon = fileobj(gridCenterLonName)
-        if hasattr(centerLon, "units") and centerLon.units.lower()=='radians':
+        if hasattr(centerLon, "units") and centerLon.units.lower() == 'radians':
             centerLon *= (180.0/numpy.pi)
     else:
-        centerLon = cornerLon[:,:,0]
+        centerLon = cornerLon[:,:, 0]
 
-    if hasattr(fileobj,titleName):
+    if hasattr(fileobj, titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = sgridid.strip().replace(' ','_')
+        gridid = sgridid.strip().replace(' ', '_')
     else:
-        gridid="<None>"
+        gridid = "<None>"
 
     lataxis = TransientAuxAxis1D(centerLat, axes=(iaxis,), bounds=cornerLat,
                               attributes={'units':'degrees_north'}, id="latitude")
diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index 05fdf9b7c..fef1505e7 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -1,14 +1,14 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """CDMS Grid objects"""
 import re
-from error import CDMSError
+from .error import CDMSError
 import numpy #, PropertiedClasses, internattr
 # import regrid2._regrid
 import copy, string, sys
-from cdmsobj import CdmsObj
-from axis import TransientAxis, createAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, getAutoBounds, createGaussianAxis, lookupArray, isSubsetVector
+from .cdmsobj import CdmsObj
+from .axis import TransientAxis, createAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, getAutoBounds, createGaussianAxis, lookupArray, isSubsetVector
 import cdtime
 
 MethodNotImplemented = "Method not yet implemented"
@@ -20,10 +20,10 @@ _classifyGrids = 1                      # Determine the type of grid from the gr
 # (if any). If 'off', the value of .grid_type overrides the classification.
 def setClassifyGrids(mode):
     global _classifyGrids
-    if mode=='on':
-        _classifyGrids=1
-    elif mode=='off':
-        _classifyGrids=0
+    if mode == 'on':
+        _classifyGrids = 1
+    elif mode == 'off':
+        _classifyGrids = 0
 
 # Create a transient rectilinear grid
 def createRectGrid(lat, lon, order="yx", type="generic", mask=None):
@@ -33,7 +33,7 @@ def createRectGrid(lat, lon, order="yx", type="generic", mask=None):
 def createUniformGrid(startLat, nlat, deltaLat, startLon, nlon, deltaLon, order="yx", mask=None):
     lat = createUniformLatitudeAxis(startLat, nlat, deltaLat)
     lon = createUniformLongitudeAxis(startLon, nlon, deltaLon)
-    return createRectGrid(lat,lon,order,"uniform",mask)
+    return createRectGrid(lat, lon, order, "uniform", mask)
 
 # Generate a grid for calculating the global mean. The grid is a single
 # zone covering the range of the input grid
@@ -41,41 +41,41 @@ def createGlobalMeanGrid(grid):
     inlat = grid.getLatitude()
     inlatBounds, inlonBounds = grid.getBounds()
     outlatArray = numpy.array([(inlat[0] + inlat[-1])/2.0])
-    outlatBounds = numpy.array([[inlatBounds[0,0], inlatBounds[-1,1]]])
+    outlatBounds = numpy.array([[inlatBounds[0, 0], inlatBounds[-1, 1]]])
     outlat = createAxis(outlatArray, outlatBounds)
     outlat.units = inlat.units
 
     inlon = grid.getLongitude()
     outlonArray = numpy.array([(inlon[0] + inlon[-1])/2.0])
-    outlonBounds = numpy.array([[inlonBounds[0,0], inlonBounds[-1,1]]])
+    outlonBounds = numpy.array([[inlonBounds[0, 0], inlonBounds[-1, 1]]])
     outlon = createAxis(outlonArray, outlonBounds)
     outlon.units = inlon.units
 
-    return createRectGrid(outlat,outlon,grid.getOrder())
+    return createRectGrid(outlat, outlon, grid.getOrder())
 
 # Generate a grid for zonal averaging. The grid has the same latitudes
 # as the input grid, and a single longitude.
 def createZonalGrid(grid):
     inlat = grid.getLatitude()
     outlatBounds, inlonBounds = grid.getBounds()
-    outlat = createAxis(inlat[:],outlatBounds)
+    outlat = createAxis(inlat[:], outlatBounds)
     outlat.units = inlat.units
 
     inlon = grid.getLongitude()
     outlonArray = numpy.array([(inlon[0] + inlon[-1])/2.0])
-    outlonBounds = numpy.array([[inlonBounds[0,0], inlonBounds[-1,1]]])
+    outlonBounds = numpy.array([[inlonBounds[0, 0], inlonBounds[-1, 1]]])
     outlon = createAxis(outlonArray, outlonBounds)
     outlon.units = inlon.units
 
-    return createRectGrid(outlat,outlon,grid.getOrder())
+    return createRectGrid(outlat, outlon, grid.getOrder())
 
 # Generate a generic (untyped) grid from lat, lon vectors
 def createGenericGrid(latArray, lonArray, latBounds=None, lonBounds=None, order="yx", mask=None):
-    lat = createAxis(latArray,latBounds)
+    lat = createAxis(latArray, latBounds)
     lat.units = "degrees_north"
-    lon = createAxis(lonArray,lonBounds)
+    lon = createAxis(lonArray, lonBounds)
     lon.units = "degrees_east"
-    return createRectGrid(lat,lon,order,"generic",mask)
+    return createRectGrid(lat, lon, order, "generic", mask)
 
 def createGaussianGrid(nlats, xorigin=0.0, order="yx"):
     """ createGaussianGrid(nlats, xorigin=0.0)
@@ -96,7 +96,7 @@ TimeType = 'time'
 CoordinateTypes = [LongitudeType, LatitudeType, VerticalType, TimeType]
 
 # Note: no time dimensions in grids.
-CoordTypeToLoc = {LongitudeType:0, LatitudeType:1, VerticalType:2}
+CoordTypeToLoc = {LongitudeType: 0, LatitudeType: 1, VerticalType: 2}
 
 def defaultRegion():
     """Return a specification for a default (full) region."""
@@ -125,27 +125,27 @@ def setRegionSpecs(grid, coordSpec, coordType, resultSpec):
 
     Note that time coordinate types are not permitted.
     """
-    
-    if (coordSpec is None) or (coordSpec==':'):
+
+    if (coordSpec is None) or (coordSpec == ':'):
         canonSpec = None
     elif isinstance(coordSpec, tuple):
-        if len(coordSpec)==2:
-            canonSpec = (coordSpec[0],coordSpec[1],'cc',None)
-        elif len(coordSpec)==3:
-            canonSpec = (coordSpec[0],coordSpec[1],coordSpec[2],None)
-        elif len(coordSpec)!=4:
-            raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
+        if len(coordSpec) == 2:
+            canonSpec = (coordSpec[0], coordSpec[1], 'cc', None)
+        elif len(coordSpec) == 3:
+            canonSpec = (coordSpec[0], coordSpec[1], coordSpec[2], None)
+        elif len(coordSpec) != 4:
+            raise CDMSError('Invalid coordinate specification: %s'%repr(coordSpec))
     elif isinstance(coordSpec, (int, float)):
         canonSpec = (coordSpec, coordSpec, 'cc', None)
     else:
-        raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
+        raise CDMSError('Invalid coordinate specification: %s'%repr(coordSpec))
 
     coordLoc = CoordTypeToLoc[coordType]
     if coordLoc is None:
-        raise CDMSError, 'Invalid coordinate type: %s'%coordType
+        raise CDMSError('Invalid coordinate type: %s'%coordType)
 
     if resultSpec[coordLoc] is not None:
-        raise CDMSError, 'Multiple specifications for coordinate type %s'%coordType
+        raise CDMSError('Multiple specifications for coordinate type %s'%coordType)
     resultSpec[coordLoc] = canonSpec
 
 class AbstractGrid (CdmsObj):
@@ -153,13 +153,14 @@ class AbstractGrid (CdmsObj):
     def __init__ (self, node):
         CdmsObj.__init__ (self, node)
         self.id = '<None>' # String identifier
-        if node is not None and hasattr(node,'id'): self.id = node.id
+        if node is not None and hasattr(node, 'id'):
+            self.id = node.id
         self.parent = None #Dataset containing this grid
         self._flataxes_ = None
         self._mesh_ = None
 
     def listall (self, all=None):
-        result=[]
+        result = []
         result.append('Grid has Python id %s.' % hex(id(self)))
         return result
 
@@ -170,26 +171,27 @@ class AbstractGrid (CdmsObj):
 
     def info(self, flag=None, device=None):
         "Write info about slab; include dimension values and weights if flag"
-        if device is None: device = sys.stdout
+        if device is None:
+            device = sys.stdout
         device.write(str(self))
 
     def writeToFile(self, file):
         """Write self to a CdmsFile file, returning CF coordinates attribute, or None if not applicable"""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def subSlice(self, *specs, **keys):
         """Get a subgrid based on an argument list <specs> of slices."""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def hasCoordType(self, coordType):
         """Return 1 iff self has the coordinate type."""
         return 0
 
     def getAxisList(self):
-      axes =[]
-      for i in range(len(self._order_)):
-        axes.append(self.getAxis(i))
-      return axes
+        axes = []
+        for i in range(len(self._order_)):
+            axes.append(self.getAxis(i))
+        return axes
 
     def isClose(self, g):
         """Return 1 if g is 'close enough' to self to be considered equal, 0 if not."""
@@ -205,31 +207,31 @@ class AbstractGrid (CdmsObj):
 
     def clone(self, copyData=1):
         """Make a copy of self."""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def flatAxes(self):
         """Return (flatlat, flatlon) where flatlat is a raveled NumPy array
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def size(self):
         "Return number of cells in the grid"
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def writeScrip(self, cdunifFile):
         "Write a grid to a SCRIP file"
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
 class AbstractRectGrid(AbstractGrid):
     """AbstractRectGrid defines the interface for rectilinear grids:
        grids which can be decomposed into 1-D latitude and longitude axes
     """
-    gridtypes = ['gaussian','uniform','equalarea','generic']
+    gridtypes = ['gaussian', 'uniform', 'equalarea', 'generic']
 
     def __init__ (self, node):
         AbstractGrid.__init__ (self, node)
-        val = self.__cdms_internals__ + ['id',]
+        val = self.__cdms_internals__ + ['id', ]
         self.___cdms_internals__ = val
 
     def listall (self, all=None):
@@ -244,14 +246,14 @@ class AbstractRectGrid(AbstractGrid):
 
     def _getshape (self):
         if self._order_ == "yx":
-            return (len(self._lataxis_),len(self._lonaxis_))
+            return (len(self._lataxis_), len(self._lonaxis_))
         else:
-            return (len(self._lonaxis_),len(self._lataxis_))
+            return (len(self._lonaxis_), len(self._lataxis_))
 
     # Get the n-th axis. naxis is 0 or 1.
     def getAxis(self, naxis):
         ind = self._order_[naxis]
-        if ind=='x':
+        if ind == 'x':
             axis = self.getLongitude()
         else:
             axis = self.getLatitude()
@@ -259,7 +261,7 @@ class AbstractRectGrid(AbstractGrid):
 
     def getBounds(self):
         latbnds, lonbnds = (self._lataxis_.getExplicitBounds(), self._lonaxis_.getExplicitBounds())
-        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1,2]:
+        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1, 2]:
             nlatbnds, nlonbnds = self.genBounds()
             if latbnds is None:
                 latbnds = nlatbnds
@@ -275,10 +277,10 @@ class AbstractRectGrid(AbstractGrid):
         return self._lonaxis_
 
     def getMask(self):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
-    def setMask(self,mask,permanent=0):
-        raise CDMSError, MethodNotImplemented
+    def setMask(self, mask, permanent=0):
+        raise CDMSError(MethodNotImplemented)
 
     def getOrder(self):
         return self._order_
@@ -287,8 +289,10 @@ class AbstractRectGrid(AbstractGrid):
         return self._gridtype_
 
     def setType(self, gridtype):
-        if gridtype=='linear': gridtype='uniform'
-        if gridtype=='unknown': gridtype='generic'
+        if gridtype == 'linear':
+            gridtype = 'uniform'
+        if gridtype == 'unknown':
+            gridtype = 'generic'
         # assert gridtype in AbstractRectGrid.gridtypes, 'Grid type must be one of %s'%`AbstractRectGrid.gridtypes`
         self._gridtype_ = gridtype
 
@@ -300,22 +304,24 @@ class AbstractRectGrid(AbstractGrid):
 
         latBounds, lonBounds = self.getBounds()
         latBounds = (numpy.pi/180.0) * latBounds
-        latWeights = 0.5 * numpy.absolute(numpy.sin(latBounds[:,1]) - numpy.sin(latBounds[:,0]))
+        latWeights = 0.5 * numpy.absolute(numpy.sin(latBounds[:, 1]) - numpy.sin(latBounds[:, 0]))
 
-        lonWeights = numpy.absolute((lonBounds[:,1] - lonBounds[:,0]))/360.0
+        lonWeights = numpy.absolute((lonBounds[:, 1] - lonBounds[:, 0]))/360.0
 
         return latWeights, lonWeights
 
     # Create a transient grid for the index (tuple) intervals.
-    def subGrid(self,latinterval, loninterval):
-        if latinterval is None: latinterval = (0, len(self._lataxis_))
-        if loninterval is None: loninterval = (0, len(self._lonaxis_))
-            
-        latobj = self._lataxis_.subaxis(latinterval[0],latinterval[1])
-        lonobj = self._lonaxis_.subaxis(loninterval[0],loninterval[1])
+    def subGrid(self, latinterval, loninterval):
+        if latinterval is None:
+            latinterval = (0, len(self._lataxis_))
+        if loninterval is None:
+            loninterval = (0, len(self._lonaxis_))
+
+        latobj = self._lataxis_.subaxis(latinterval[0], latinterval[1])
+        lonobj = self._lonaxis_.subaxis(loninterval[0], loninterval[1])
         maskArray = self.getMask()
         if maskArray is not None:
-            if self._order_=="yx":
+            if self._order_ == "yx":
                 submask = maskArray[latinterval[0]:latinterval[1], loninterval[0]:loninterval[1]]
             else:
                 submask = maskArray[loninterval[0]:loninterval[1], latinterval[0]:latinterval[1]]
@@ -323,7 +329,7 @@ class AbstractRectGrid(AbstractGrid):
             submask = None
 
         return TransientRectGrid(latobj, lonobj, self._order_, self._gridtype_, submask)
-        
+
     # Same as subGrid, for coordinates
     def subGridRegion(self, latRegion, lonRegion):
         latInterval = self._lataxis_.mapInterval(latRegion)
@@ -332,7 +338,7 @@ class AbstractRectGrid(AbstractGrid):
 
     # Return a transient grid which is the transpose of this grid
     def transpose(self):
-        if self._order_=="yx":
+        if self._order_ == "yx":
             neworder = "xy"
         else:
             neworder = "yx"
@@ -358,65 +364,69 @@ class AbstractRectGrid(AbstractGrid):
 
         CLOSE_ENOUGH = 1.e-3
         lat = self.getLatitude()
-        if len(lat)==1:
-            return ('generic',1,0)
+        if len(lat) == 1:
+            return ('generic', 1, 0)
 
         latar = lat[:]
-        if lat[0]<lat[-1]:              # increasing?
-            hassouth = (abs(lat[0]+90.0)<1.e-2)
-            hasnorth = (abs(lat[-1]-90.0)<1.e-2)
-            if hassouth: latar = latar[1:]
-            if hasnorth: latar = latar[:-1]
+        if lat[0] < lat[-1]:              # increasing?
+            hassouth = (abs(lat[0]+90.0) < 1.e-2)
+            hasnorth = (abs(lat[-1]-90.0) < 1.e-2)
+            if hassouth:
+                latar = latar[1:]
+            if hasnorth:
+                latar = latar[:-1]
         else:                           # decreasing
-            hassouth = (abs(lat[-1]+90.0)<1.e-2)
-            hasnorth = (abs(lat[0]-90.0)<1.e-2)
-            if hassouth: latar = latar[:-1]
-            if hasnorth: latar = latar[1:]
+            hassouth = (abs(lat[-1]+90.0) < 1.e-2)
+            hasnorth = (abs(lat[0]-90.0) < 1.e-2)
+            if hassouth:
+                latar = latar[:-1]
+            if hasnorth:
+                latar = latar[1:]
         nlats = len(latar)
 
         # Get the related Gaussian latitude
-        gausslatns, wts, bnds = regrid2._regrid.gridattr(len(latar),'gaussian')
+        gausslatns, wts, bnds = regrid2._regrid.gridattr(len(latar), 'gaussian')
         gausslatsn = gausslatns[::-1]
         diffs = latar[1:]-latar[:-1]
-        equalareans, wts, bnds = regrid2._regrid.gridattr(len(latar),'equalarea')
+        equalareans, wts, bnds = regrid2._regrid.gridattr(len(latar), 'equalarea')
         equalareasn = equalareans[::-1]
 
         # Get the Gaussian lats for len+1, in case this is a boundary
-        dumlat, dumwt, bndsplusns = regrid2._regrid.gridattr(len(latar)+1,'gaussian')
+        dumlat, dumwt, bndsplusns = regrid2._regrid.gridattr(len(latar)+1, 'gaussian')
         bndsplussn = bndsplusns[::-1]
 
         # Look for N-S equality
         isoffset = 0
-        if numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatns),CLOSE_ENOUGH)):
+        if numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatns), CLOSE_ENOUGH)):
             actualType = 'gaussian'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatsn),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatsn), CLOSE_ENOUGH)):
             actualType = 'gaussian'
 
         # Check for zone (offset) variable
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplusns[1:-1]),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplusns[1:-1]), CLOSE_ENOUGH)):
             actualType = 'gaussian'
             isoffset = 1
             nlats = nlats+1
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplussn[1:-1]),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplussn[1:-1]), CLOSE_ENOUGH)):
             actualType = 'gaussian'
             isoffset = 1
             nlats = nlats+1
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(diffs-diffs[0]),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(diffs-diffs[0]), CLOSE_ENOUGH)):
             actualType = 'uniform'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareans),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareans), CLOSE_ENOUGH)):
             actualType = 'equalarea'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareasn),CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareasn), CLOSE_ENOUGH)):
             actualType = 'equalarea'
 
         else:
             actualType = 'generic'
 
-        return (actualType,nlats,isoffset)
+        return (actualType, nlats, isoffset)
 
     # Generate a best guess at grid info within a family of grids (list of grids)
     # Return a tuple (type,coverage,nlats,isoffset, basegrid, latindex) where:
@@ -436,21 +446,23 @@ class AbstractRectGrid(AbstractGrid):
         coverage = 'global'
         basegrid = None
         latindex = None
-        if gridtype=='generic':
+        if gridtype == 'generic':
             # Look for truncated grids: such that grid is a subset of grid2
             found = 0
             for grid2 in gridlist:
-                if self.id==grid2.id: continue
+                if self.id == grid2.id:
+                    continue
                 lat = self.getLatitude()
                 lon = self.getLongitude()
                 lat2 = grid2.getLatitude()
                 lon2 = grid2.getLongitude()
-                if len(lat)>len(lat2) or len(lon)>len(lon2): continue
-                latIsSubset, latindex = isSubsetVector(lat[:],lat2[:],1.e-2)
-                lonIsSubset, lonindex = isSubsetVector(lon[:],lon2[:],1.e-2)
+                if len(lat) > len(lat2) or len(lon) > len(lon2):
+                    continue
+                latIsSubset, latindex = isSubsetVector(lat[:], lat2[:], 1.e-2)
+                lonIsSubset, lonindex = isSubsetVector(lon[:], lon2[:], 1.e-2)
                 if latIsSubset and lonIsSubset:
                     found = 1
-                    if len(lat2)>nlats:
+                    if len(lat2) > nlats:
                         coverage = 'regional'
                     nlats = len(lat2)
                     basegrid = grid2.id
@@ -462,13 +474,13 @@ class AbstractRectGrid(AbstractGrid):
     def genBounds(self):
         import regrid2._regrid
 
-        if hasattr(self,"parent") and self.parent is not None:
+        if hasattr(self, "parent") and self.parent is not None:
             gridfamily = self.parent.grids.values()
         else:
             gridfamily = []
 
         gridtype, coverage, nlats, isoffset, basegrid, latindex = self.classifyInFamily(gridfamily)
-        if _classifyGrids==0:
+        if _classifyGrids == 0:
             gridtypenew = self.getType()
             if gridtypenew in AbstractRectGrid.gridtypes:
                 gridtype = gridtypenew
@@ -476,39 +488,41 @@ class AbstractRectGrid(AbstractGrid):
         # Get latitude bounds
         lat = self.getLatitude()
         ascending = (lat[0] < lat[-1])
-        if gridtype=='gaussian':
+        if gridtype == 'gaussian':
             pts, wts, bnds = regrid2._regrid.gridattr(nlats, 'gaussian')
-            if ascending: bnds = bnds[::-1]
-            latbnds = numpy.zeros((len(lat),2),numpy.float)
-            latbnds[:,0] = bnds[:-1]
-            latbnds[:,1] = bnds[1:]
-            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[0,:]))
-            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[-1,:]))
-        elif gridtype=='equalarea':
+            if ascending:
+                bnds = bnds[::-1]
+            latbnds = numpy.zeros((len(lat), 2), numpy.float)
+            latbnds[:, 0] = bnds[:-1]
+            latbnds[:, 1] = bnds[1:]
+            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[0,:]))
+            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[-1,:]))
+        elif gridtype == 'equalarea':
             pts, wts, bnds = regrid2._regrid.gridattr(nlats, 'equalarea')
-            if ascending: bnds = bnds[::-1]
-            latbnds = numpy.zeros((len(lat),2),numpy.float)
-            latbnds[:,0] = bnds[:-1]
-            latbnds[:,1] = bnds[1:]
-            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[0,:]))
-            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[-1,:]))
+            if ascending:
+                bnds = bnds[::-1]
+            latbnds = numpy.zeros((len(lat), 2), numpy.float)
+            latbnds[:, 0] = bnds[:-1]
+            latbnds[:, 1] = bnds[1:]
+            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[0,:]))
+            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[-1,:]))
         else:
             latbnds = lat.genGenericBounds()
 
         # Stretch latitude bounds to +/- 90.0
         if ascending:
-            latbnds[0,0] = min(latbnds[0,0],-90.0)
-            latbnds[-1,1] = max(latbnds[-1,1],90.0)
+            latbnds[0, 0] = min(latbnds[0, 0], -90.0)
+            latbnds[-1, 1] = max(latbnds[-1, 1], 90.0)
         else:
-            latbnds[0,0] = max(latbnds[0,0],+90.0)
-            latbnds[-1,1] = min(latbnds[-1,1],-90.0)
+            latbnds[0, 0] = max(latbnds[0, 0], +90.0)
+            latbnds[-1, 1] = min(latbnds[-1, 1], -90.0)
 
         # Get longitude bounds
         lon = self.getLongitude()
-        if len(lon)>1:
+        if len(lon) > 1:
             lonbnds = lon.genGenericBounds()
         else:
-            lonbnds = numpy.array([[lon[0]-180.0, lon[0]+180.0]],numpy.float)
+            lonbnds = numpy.array([[lon[0]-180.0, lon[0]+180.0]], numpy.float)
 
         return (latbnds, lonbnds)
 
@@ -518,27 +532,27 @@ class AbstractRectGrid(AbstractGrid):
     def getMesh(self):
         """Generate a mesh array for the meshfill graphics method."""
         if self._mesh_ is None:
-            LAT=0
-            LON=1
+            LAT = 0
+            LON = 1
             latbounds, lonbounds = self.getBounds()
             if latbounds is None or lonbounds is None:
-                raise CDMSError, 'No boundary data is available for grid %s'%self.id
+                raise CDMSError('No boundary data is available for grid %s'%self.id)
             ny = len(self._lataxis_)
             nx = len(self._lonaxis_)
             lenmesh = ny*nx
-            mesh = numpy.zeros((lenmesh,2,4),latbounds.dtype.char)
-            broadlat = numpy.repeat(latbounds[:,numpy.newaxis,:],nx,axis=1)
-            broadlat.shape = (lenmesh,2)
-            broadlon = numpy.repeat(lonbounds[numpy.newaxis,:,:],ny,axis=0)
-            broadlon.shape=(lenmesh,2)
-            mesh[:,LAT,0] = broadlat[:,0]
-            mesh[:,LAT,1] = broadlat[:,0]
-            mesh[:,LAT,2] = broadlat[:,1]
-            mesh[:,LAT,3] = broadlat[:,1]
-            mesh[:,LON,0] = broadlon[:,0]
-            mesh[:,LON,1] = broadlon[:,1]
-            mesh[:,LON,2] = broadlon[:,1]
-            mesh[:,LON,3] = broadlon[:,0]
+            mesh = numpy.zeros((lenmesh, 2, 4), latbounds.dtype.char)
+            broadlat = numpy.repeat(latbounds[:, numpy.newaxis,:], nx, axis=1)
+            broadlat.shape = (lenmesh, 2)
+            broadlon = numpy.repeat(lonbounds[numpy.newaxis,:,:], ny, axis=0)
+            broadlon.shape = (lenmesh, 2)
+            mesh[:, LAT, 0] = broadlat[:, 0]
+            mesh[:, LAT, 1] = broadlat[:, 0]
+            mesh[:, LAT, 2] = broadlat[:, 1]
+            mesh[:, LAT, 3] = broadlat[:, 1]
+            mesh[:, LON, 0] = broadlon[:, 0]
+            mesh[:, LON, 1] = broadlon[:, 1]
+            mesh[:, LON, 2] = broadlon[:, 1]
+            mesh[:, LON, 3] = broadlon[:, 0]
             self._mesh_ = mesh
         return self._mesh_
 
@@ -550,7 +564,7 @@ class AbstractRectGrid(AbstractGrid):
         if self._flataxes_ is None:
             alat = self.getLatitude()[:]
             alon = self.getLongitude()[:]
-            alatflat = numpy.repeat(alat[:,numpy.newaxis], len(alon), axis=1)
+            alatflat = numpy.repeat(alat[:, numpy.newaxis], len(alon), axis=1)
             alonflat = numpy.repeat(alon[numpy.newaxis,:], len(alat), axis=0)
             self._flataxes_ = (numpy.ravel(alatflat), numpy.ravel(alonflat))
         return self._flataxes_
@@ -572,8 +586,8 @@ class AbstractRectGrid(AbstractGrid):
         'gridid' is the string identifier of the resulting curvilinear grid object.
         """
 
-        from coord import TransientVirtualAxis, TransientAxis2D
-        from hgrid import TransientCurveGrid
+        from .coord import TransientVirtualAxis, TransientAxis2D
+        from .hgrid import TransientCurveGrid
 
         lat = self._lataxis_[:]
         lon = self._lonaxis_[:]
@@ -606,37 +620,37 @@ class AbstractRectGrid(AbstractGrid):
             ax, ay = lon, lat
             bx, by = blon, blat
             nx, ny = nlon, nlat
-            
+
         centerX = numpy.outer(numpy.ones(ny), ax)
         centerY = numpy.outer(ay, numpy.ones(nx))
 
         # Create corner latitudes (in yx order), ensuring counterclockwise direction
         cy = numpy.zeros((ny, 4), numpy.float)
-        if (by[0,0]<= by[0,1]):
+        if (by[0, 0] <= by[0, 1]):
             incr = 1
         else:
             incr = 0
-        cy[:,0] = by[:,1-incr]
-        cy[:,1] = by[:,1-incr]
-        cy[:,2] = by[:,incr]
-        cy[:,3] = by[:,incr]
-        cornerY = numpy.repeat(cy[:,numpy.newaxis,:], nx, axis=1)
-        
+        cy[:, 0] = by[:, 1-incr]
+        cy[:, 1] = by[:, 1-incr]
+        cy[:, 2] = by[:, incr]
+        cy[:, 3] = by[:, incr]
+        cornerY = numpy.repeat(cy[:, numpy.newaxis,:], nx, axis=1)
+
         # Create corner longitudes (in yx order), ensuring counterclockwise direction
         cx = numpy.zeros((nx, 4), numpy.float)
-        if (bx[0,0]<= bx[0,1]):
+        if (bx[0, 0] <= bx[0, 1]):
             incr = 1
         else:
             incr = 0
-        cx[:,0] = bx[:,1-incr]
-        cx[:,1] = bx[:,incr]
-        cx[:,2] = bx[:,incr]
-        cx[:,3] = bx[:,1-incr]
+        cx[:, 0] = bx[:, 1-incr]
+        cx[:, 1] = bx[:, incr]
+        cx[:, 2] = bx[:, incr]
+        cx[:, 3] = bx[:, 1-incr]
         cornerX = numpy.repeat(cx[numpy.newaxis,:,:], ny, axis=0)
 
-        iaxis = TransientVirtualAxis("i",ny) # First axis
-        jaxis = TransientVirtualAxis("j",nx) # Second axis
-        
+        iaxis = TransientVirtualAxis("i", ny) # First axis
+        jaxis = TransientVirtualAxis("j", nx) # Second axis
+
         centerLat = centerY
         centerLon = centerX
         cornerLat = cornerY
@@ -646,7 +660,7 @@ class AbstractRectGrid(AbstractGrid):
             centerLon = centerY
             cornerLat = cornerX
             cornerLon = cornerY
-            
+
 
         lataxis = TransientAxis2D(centerLat, axes=(iaxis, jaxis), bounds=cornerLat,
                                   attributes={'units':latunits}, id="latitude")
@@ -661,39 +675,40 @@ class AbstractRectGrid(AbstractGrid):
         gengrid = curvegrid.toGenericGrid(gridid=gridid)
         return gengrid
 
-    shape = property(_getshape,None)
-   
-## PropertiedClasses.set_property (AbstractRectGrid, 'shape', 
-##                                 AbstractRectGrid._getshape, 
-##                                 nowrite=1,
-##                                 nodelete=1)
+    shape = property(_getshape, None)
+
+# PropertiedClasses.set_property (AbstractRectGrid, 'shape',
+# AbstractRectGrid._getshape,
+# nowrite=1,
+# nodelete=1)
 
-## internattr.add_internal_attribute (AbstractRectGrid, 'id', 'parent')
+# internattr.add_internal_attribute (AbstractRectGrid, 'id', 'parent')
 
 class RectGrid(AbstractRectGrid):
 
     def __init__(self,parent,rectgridNode=None):
         if rectgridNode is not None and rectgridNode.tag != 'rectGrid':
-            raise CDMSError, 'Node is not a grid node'
-        AbstractRectGrid.__init__(self,rectgridNode)
+            raise CDMSError('Node is not a grid node')
+        AbstractRectGrid.__init__(self, rectgridNode)
         self.parent = parent
 
     # Set pointers to related structural elements: lon, lat axes, order, mask
     def initDomain(self, axisdict, vardict):
-        if not axisdict.has_key(self.latitude):
-            raise CDMSError, 'No such latitude: %s'%`self.latitude`
-        if not axisdict.has_key(self.longitude):
-            raise CDMSError, 'No such longitude: %s'%`self.longitude`
+        if self.latitude not in axisdict:
+            raise CDMSError('No such latitude: %s'%repr(self.latitude))
+        if self.longitude not in axisdict:
+            raise CDMSError('No such longitude: %s'%repr(self.longitude))
         self._lataxis_ = axisdict[self.latitude]
         self._lonaxis_ = axisdict[self.longitude]
         self._order_ = self.order
         self._gridtype_ = self.attributes.get('type')
-        if self._gridtype_ is None: self._gridtype_ = "generic"
-        if hasattr(self,"mask"):
+        if self._gridtype_ is None:
+            self._gridtype_ = "generic"
+        if hasattr(self, "mask"):
             self._maskVar_ = vardict.get(self.mask)
         else:
             self._maskVar_ = None
-    
+
     def getMask(self):
         if self._maskVar_ is None:
             # return numpy.ones(self.shape)
@@ -704,7 +719,7 @@ class RectGrid(AbstractRectGrid):
     def getMaskVar(self):
         return self._maskVar_
 
-## internattr.add_internal_attribute(RectGrid)
+# internattr.add_internal_attribute(RectGrid)
 
 class FileRectGrid(AbstractRectGrid):
 
@@ -714,8 +729,8 @@ class FileRectGrid(AbstractRectGrid):
         self.parent = parent
         self._lataxis_ = latobj
         self._lonaxis_ = lonobj
-        if not order in ["yx","xy"]:
-            raise CDMSError, 'Grid order must be "yx" or "xy"'
+        if not order in ["yx", "xy"]:
+            raise CDMSError('Grid order must be "yx" or "xy"')
         self._order_ = order
         self.setType(gridtype)
         self._maskVar_ = maskobj        # FileVariable of mask
@@ -739,24 +754,25 @@ class FileRectGrid(AbstractRectGrid):
     # Set the mask to array 'mask'. If persistent == 1, modify permanently
     # in the file, else set as a temporary mask.
     def setMask(self,mask,persistent=0):
-        if persistent!=0: raise CDMSError, MethodNotImplemented
+        if persistent != 0:
+            raise CDMSError(MethodNotImplemented)
         if mask is None:
             self._tempMask_ = None
         else:
-            assert type(mask)==numpy.ndarray, 'Mask must be a numpy array'
-            assert mask.shape==self.shape,'Mask must have shape %s'%`self.shape`
+            assert isinstance(mask, numpy.ndarray), 'Mask must be a numpy array'
+            assert mask.shape == self.shape, 'Mask must have shape %s'%repr(self.shape)
             self._tempMask_ = copy.copy(mask)
 
     def getMaskVar(self):
         return self._maskVar_
 
-## internattr.add_internal_attribute(FileRectGrid)
+# internattr.add_internal_attribute(FileRectGrid)
 
 # In-memory rectilinear grid
 class TransientRectGrid(AbstractRectGrid):
     "Grids that live in memory only."
     def __init__(self, latobj, lonobj, order, gridtype, maskarray=None):
-        AbstractRectGrid.__init__(self,None)
+        AbstractRectGrid.__init__(self, None)
         if latobj.__class__ != TransientAxis:
             latobj = TransientAxis(latobj[:], latobj.getBounds())
         if lonobj.__class__ != TransientAxis:
@@ -765,8 +781,8 @@ class TransientRectGrid(AbstractRectGrid):
         self._lataxis_.designateLatitude()
         self._lonaxis_ = lonobj
         self._lonaxis_.designateLongitude()
-        if not order in ["yx","xy"]:
-            raise CDMSError, 'Grid order must be "yx" or "xy"'
+        if not order in ["yx", "xy"]:
+            raise CDMSError('Grid order must be "yx" or "xy"')
         self._order_ = order
         self.setType(gridtype)
         self.setMask(maskarray)        # numpy mask array
@@ -782,17 +798,17 @@ class TransientRectGrid(AbstractRectGrid):
     # with persistent versions, is ignored.
     def setMask(self,mask, persistent=0):
         if mask is not None:
-            if type(mask)!=numpy.ndarray:
-               raise CDMSError, 'Mask must be a numpy array'
+            if not isinstance(mask, numpy.ndarray):
+                raise CDMSError('Mask must be a numpy array')
             if mask.shape != self.shape:
-               raise CDMSError, 'Mask must have shape %s'%`self.shape`
+                raise CDMSError('Mask must have shape %s'%repr(self.shape))
         self._maskArray_ = copy.copy(mask)
 
     def setBounds(self, latBounds, lonBounds):
         self._lataxis_.setBounds(latBounds)
         self._lonaxis_.setBounds(lonBounds)
 
-## internattr.add_internal_attribute(TransientRectGrid)
+# internattr.add_internal_attribute(TransientRectGrid)
 
 def isGrid(grid):
     """
@@ -808,9 +824,8 @@ def writeScripGrid(path, grid, gridTitle=None):
     grid is a CDMS grid object.
     gridTitle is a string ID for the grid.
     """
-    
+
     import Cdunif
-    f = Cdunif.CdunifFile(path,'w')
+    f = Cdunif.CdunifFile(path, 'w')
     grid.writeScrip(f, gridTitle)
     f.close()
-
diff --git a/Packages/cdms2/Lib/gsHost.py b/Packages/cdms2/Lib/gsHost.py
index 343a3c842..4933e7f49 100644
--- a/Packages/cdms2/Lib/gsHost.py
+++ b/Packages/cdms2/Lib/gsHost.py
@@ -5,7 +5,7 @@ A file-like object to access a host file, the single entry point
 to an entire gridspec data file layout.
 
 Dave Kindig and Alex Pletzer, Tech-X (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
@@ -24,7 +24,8 @@ except:
     # raise ImportError, 'Error: could not import pycf'
     print 'Error: could not import pycf'
 
-def open(hostfile, mode = 'r'):
+
+def open(hostfile, mode='r'):
     """
     Open host file
     @param hostfile host file
@@ -34,17 +35,19 @@ def open(hostfile, mode = 'r'):
     outHostFile = Host(hostfile, mode)
     return outHostFile
 
+
 class Host:
+
     """
     A LibCF/GRIDSPEC host file object. This acts as the single point of entry to
     a GRIDSPEC aggregation. Variables and grids can be requested solely through
-    the Host object, which is a hybrid between a variable and file object. 
+    the Host object, which is a hybrid between a variable and file object.
     Host relies on the libcf shared object. As such, if there is a problem
     consult http://www.unidata.ucar.edu/software/libcf/docs/libcf/ for details
     on building host files and all related GRIDSPEC files.
     """
 
-    def __init__(self, hostfile, mode = 'r'):
+    def __init__(self, hostfile, mode='r'):
         """
         Constructor
         @param hostfile path to the host
@@ -54,42 +57,41 @@ class Host:
         self.__initialize()
         self.uri = hostfile
         self.mode = mode
-        
+
         # Data dir based on location of hostfile
         if mode != 'r':
-            raise CDMSError, 'Only read mode is supported for host file'
+            raise CDMSError('Only read mode is supported for host file')
 
         for sosuffix in '.so', '.dylib', '.dll', '.a':
             self.libcfdll = CDLL(LIBCF + sosuffix)
             if self.libcfdll:
                 break
 
-        if self.libcfdll == None: 
-            raise CDMSError, 'libcf not installed or incorrect path\n  '
+        if self.libcfdll is None:
+            raise CDMSError('libcf not installed or incorrect path\n  ')
 
         libcfdll = self.libcfdll
 
         status = libcfdll.nccf_def_host_from_file(hostfile,
-                                               byref(self.hostId_ct))
+                                                  byref(self.hostId_ct))
         if status != 0:
-            raise CDMSError, \
-                "ERROR: not a valid host file %s (status=%d)" % \
-                (hostfile, status)
+            raise CDMSError("ERROR: not a valid host file %s (status=%d)" %
+                            (hostfile, status))
 
         # Attach global attrs
-        libcfdll.nccf_def_global_from_file( hostfile, \
-                                            byref(self.globalId_ct))
+        libcfdll.nccf_def_global_from_file(hostfile,
+                                           byref(self.globalId_ct))
 
         # get the global attributes from the file
         natts = c_int(-1)
-        attName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
-        attValu_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
-        self.libcfdll.nccf_inq_global_natts( self.globalId_ct, byref(natts))
+        attName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
+        attValu_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
+        self.libcfdll.nccf_inq_global_natts(self.globalId_ct, byref(natts))
         for i in range(natts.value):
-            self.libcfdll.nccf_inq_global_attval(self.globalId_ct, \
-                                                     i, attName_ct, \
-                                                     attValu_ct)
-            if not self.attributes.has_key( attName_ct.value ):
+            self.libcfdll.nccf_inq_global_attval(self.globalId_ct,
+                                                 i, attName_ct,
+                                                 attValu_ct)
+            if attName_ct.value not in self.attributes:
                 self.attributes[attName_ct.value] = attValu_ct.value
 
         self.id = hostfile
@@ -97,28 +99,28 @@ class Host:
         i_ct = c_int()
         status = libcfdll.nccf_inq_host_ngrids(self.hostId_ct, byref(i_ct))
         self.nGrids = i_ct.value
-        status = libcfdll.nccf_inq_host_nstatdatafiles(self.hostId_ct, \
-                                                           byref(i_ct))
+        status = libcfdll.nccf_inq_host_nstatdatafiles(self.hostId_ct,
+                                                       byref(i_ct))
         self.nStatDataFiles = i_ct.value
-        status = libcfdll.nccf_inq_host_ntimedatafiles(self.hostId_ct, \
-                                                           byref(i_ct))
+        status = libcfdll.nccf_inq_host_ntimedatafiles(self.hostId_ct,
+                                                       byref(i_ct))
 
         self.nTimeDataFiles = i_ct.value
-        status = libcfdll.nccf_inq_host_ntimeslices(self.hostId_ct, \
-                                                        byref(i_ct))
+        status = libcfdll.nccf_inq_host_ntimeslices(self.hostId_ct,
+                                                    byref(i_ct))
         self.nTimeSliceFiles = i_ct.value
 
-        fName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
-        gName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
+        fName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
+        gName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
 
-        self.dimensions = {"nGrids": self.nGrids, 
+        self.dimensions = {"nGrids": self.nGrids,
                            "nStatDataFiles": self.nStatDataFiles,
                            "nTimeDataFiles": self.nTimeDataFiles,
-                           "nTimeSliceFiles":self.nTimeSliceFiles }
+                           "nTimeSliceFiles": self.nTimeSliceFiles}
 
         # Mosaic filename (use getMosaic to return the connectivity)
         mosaicFilename = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
-        status = libcfdll.nccf_inq_host_mosaicfilename(self.hostId_ct, 
+        status = libcfdll.nccf_inq_host_mosaicfilename(self.hostId_ct,
                                                        mosaicFilename)
         self.mosaicFilename = mosaicFilename.value
 
@@ -131,9 +133,9 @@ class Host:
         # static data
         for vfindx in range(self.nStatDataFiles):
             for gfindx in range(self.nGrids):
-                status = libcfdll.nccf_inq_host_statfilename(self.hostId_ct, 
-                                                          vfindx, gfindx, 
-                                                          fName_ct)
+                status = libcfdll.nccf_inq_host_statfilename(self.hostId_ct,
+                                                             vfindx, gfindx,
+                                                             fName_ct)
                 statFilenames.append(fName_ct.value)
                 f = cdms2.open(fName_ct.value, 'r')
                 varNames = f.listvariable()
@@ -142,12 +144,12 @@ class Host:
                     # Add coordinate names a local list of coordinates
                     if 'coordinates' in dir(f[vn]):
                         for coord in f[vn].coordinates.split():
-                            if not coord in coordinates: 
+                            if not coord in coordinates:
                                 coordinates.append(coord)
-                    if not self.statVars.has_key(vn):
+                    if vn not in self.statVars:
                         # allocate
-                        self.statVars[vn] = ["" for ig in \
-                                                 range(self.nGrids)] 
+                        self.statVars[vn] = ["" for ig in
+                                             range(self.nGrids)]
 
                     # set file name
                     self.statVars[vn][gfindx] = fName_ct.value
@@ -158,11 +160,11 @@ class Host:
             for tfindx in range(self.nTimeSliceFiles):
                 for gfindx in range(self.nGrids):
                     status = \
-                        libcfdll.nccf_inq_host_timefilename(self.hostId_ct, 
-                                                            vfindx, \
-                                                                tfindx, \
-                                                                gfindx, \
-                                                              fName_ct)
+                        libcfdll.nccf_inq_host_timefilename(self.hostId_ct,
+                                                            vfindx,
+                                                            tfindx,
+                                                            gfindx,
+                                                            fName_ct)
                     timeFilenames.append(fName_ct.value)
                     f = cdms2.open(fName_ct.value, 'r')
                     varNames = f.listvariable()
@@ -170,13 +172,13 @@ class Host:
                         # Add coordinate names a local list of coordinates
                         if 'coordinates' in dir(f[vn]):
                             for coord in f[vn].coordinates.split():
-                                if not coord in coordinates: 
+                                if not coord in coordinates:
                                     coordinates.append(coord)
-                        if not self.timeVars.has_key(vn):
+                        if vn not in self.timeVars:
                             # allocate
                             self.timeVars[vn] = \
-                                [["" for it in range(self.nTimeSliceFiles)] \
-                                     for ig in range(self.nGrids)]
+                                [["" for it in range(self.nTimeSliceFiles)]
+                                 for ig in range(self.nGrids)]
                         # set file name
                         self.timeVars[vn][gfindx][tfindx] = fName_ct.value
                     f.close()
@@ -184,12 +186,12 @@ class Host:
         # Grid names and data. Must come after time and static file dictionaries
         # because they define the coordinates.
         for gfindx in range(self.nGrids):
-            status = libcfdll.nccf_inq_host_gridfilename(self.hostId_ct, 
-                                                      gfindx, 
-                                                      fName_ct)
-            status = libcfdll.nccf_inq_host_gridname(self.hostId_ct, 
-                                                      gfindx, 
-                                                      gName_ct)
+            status = libcfdll.nccf_inq_host_gridfilename(self.hostId_ct,
+                                                         gfindx,
+                                                         fName_ct)
+            status = libcfdll.nccf_inq_host_gridname(self.hostId_ct,
+                                                     gfindx,
+                                                     gName_ct)
 
             varNames = cdms2.open(fName_ct.value, 'r').listvariable()
             for vn in varNames:
@@ -204,23 +206,22 @@ class Host:
         # Populate the variables dictionary, avoid the grids
         self.variables = {}
         for item in self.statVars.keys():
-            self.variables[item] = StaticFileVariable(self, item) 
+            self.variables[item] = StaticFileVariable(self, item)
         for item in self.timeVars.keys():
             self.variables[item] = TimeFileVariable(self, item)
 
-
     def __initialize(self):
         """
-        private method to inititialze the hostObj and for use in reseting 
+        private method to inititialze the hostObj and for use in reseting
         the hostObj on close
         """
 
-        self.mode     = ''
+        self.mode = ''
         self.libcfdll = None
-        self.uri      = ''
-        self.id       = ''
+        self.uri = ''
+        self.id = ''
         self._status_ = ''
-        
+
         # ctypes variables
         self.hostId_ct = c_int(-1)
         self.globalId_ct = c_int(-1)
@@ -228,10 +229,10 @@ class Host:
         # number of grid files
         self.nGrids = 0
 
-        # number of static var files 
+        # number of static var files
         self.nStatDataFiles = 0
 
-        # number of time dependent var files 
+        # number of time dependent var files
         self.nTimeDataFiles = 0
 
         # number of time files
@@ -251,14 +252,14 @@ class Host:
         self.statVars = {}
 
         # global attributes
-        self.attributes = {}   
+        self.attributes = {}
 
     def getMosaic(self):
         """
         Get the mosaic filename
         @return mfn Mosaic filename
         """
-        from gsMosaic import Mosaic
+        from .gsMosaic import Mosaic
         mfn = Mosaic(self.mosaicFilename, "r")
 
         return mfn
@@ -282,10 +283,10 @@ class Host:
         """
         return self.gridName.values()
 
-    def getStatFilenames(self, varName = None):
+    def getStatFilenames(self, varName=None):
         """
         Return a list of static variable filenames
-        @param varName variable name (or None if all the static file names are to 
+        @param varName variable name (or None if all the static file names are to
                        be returned)
         @return list the file names corresponding to varName
         """
@@ -294,7 +295,7 @@ class Host:
         # return all the static var filenames
         return self.statVars.values()
 
-    def getTimeFilenames(self, varName = None):
+    def getTimeFilenames(self, varName=None):
         """
         Return a list of time dependent variable filenames
         @param varName variable name. None for all variables
@@ -311,7 +312,7 @@ class Host:
         @return list of coordinate names
         """
         return self.gridVars.keys()
-    
+
     def getNumGrids(self):
         """
         Get number of grids (tiles)
@@ -322,7 +323,7 @@ class Host:
 
     def getNumStatDataFiles(self):
         """
-        Get number of static data files 
+        Get number of static data files
         @return number static files
         """
         return self.nStatDataFiles
@@ -334,7 +335,7 @@ class Host:
         """
         return self.nTimeDataFiles
 
-    def listvariable(self, gstype = None):
+    def listvariable(self, gstype=None):
         """
         @param type Grid, Static, Time Dependent or None
         @return list of all variables, including static and time dependent, Default = None
@@ -356,9 +357,9 @@ class Host:
         # Raise error
         else:
             text = 'type must be "Static", "Time", None or empty'
-            raise CDMSError, text
+            raise CDMSError(text)
 
-    def listvariables(self, type = None):
+    def listvariables(self, type=None):
         """
         Synonymous to listvariable
         @param type Grid, Static, Time Dependent or None
@@ -373,9 +374,9 @@ class Host:
         @return attributes list
         """
         fName = ""
-        if self.statVars.has_key(varName):
+        if varName in self.statVars:
             fName = self.statVars[varName][0]
-        elif self.timeVars.has_key(varName):
+        elif varName in self.timeVars:
             fName = self.timeVars[varName][0][0]
         if fName:
             var = cdms2.open(fName, 'r')(varName)
@@ -397,12 +398,12 @@ class Host:
         @return [nGrids, (n0, n1, ...)]
         """
         return self.dimensions.keys()
-        
+
     def listglobal(self):
         """
         List global attributes of host file
         @return a list of the global attributes in the file
-        """ 
+        """
         return self.attributes.keys()
 
     def getglobal(self, attName):
@@ -410,10 +411,10 @@ class Host:
         Get the value of the global attribute
         @param [attName] - global attribute name
         @return attribute value
-        """        
+        """
         return self.attributes[attName]
 
-    def listall(self, varName = None, all = None):
+    def listall(self, varName=None, all=None):
         """
         Get info about data from the file.
         @param varName variable name
@@ -421,11 +422,12 @@ class Host:
         @return information about file.
         """
 
-        if varName is None: return None 
+        if varName is None:
+            return None
         var = self.getVariable(varName)
-        return var.listall(all = all)
+        return var.listall(all=all)
 
-    def showall(self, varName = None, all = None, device = None):
+    def showall(self, varName=None, all=None, device=None):
         """
         Get info about data from the file.
         @param varName variable name
@@ -433,9 +435,12 @@ class Host:
         @param device output device
         @return information about file.
         """
-        import sys, string
-        if device is None: device=sys.stdout
-        if varName is None: return None 
+        import sys
+        import string
+        if device is None:
+            device = sys.stdout
+        if varName is None:
+            return None
         var = self.getVariable(varName)
         alist = var.listall(all=all)
         device.write(string.join(alist, "\n"))
@@ -448,22 +453,22 @@ class Host:
         self.__initialize()
         self._status_ = 'closed'
 
-    def __repr__(self): 
+    def __repr__(self):
         """
         Python repr()
         @return res Print statement
         """
         res = "< '%s',  URI: '%s', MODE: '%s', STATUS: '%s',\n libcf: %s >" % \
-            ( self.__class__, self.uri, self.mode, 
+            (self.__class__, self.uri, self.mode,
               self._status_, self.libcfdll)
-        return res 
+        return res
 
     def __del__(self):
         """
         Free the host file from memory
         """
-        if self.hostId_ct.value >= 0: 
-            self.libcfdll.nccf_free_host( self.hostId_ct )
+        if self.hostId_ct.value >= 0:
+            self.libcfdll.nccf_free_host(self.hostId_ct)
         self.hostId_ct.value = -1
 
 # NOTE: There is no __call__ method for host files.
@@ -475,12 +480,12 @@ class Host:
         @return list of cdms2 file variables, one for each grid
         """
         # Static variables
-        if self.statVars.has_key(varName):
+        if varName in self.statVars:
             staticFV = StaticFileVariable(self, varName)
             return staticFV
 
         # Time variables
-        elif self.timeVars.has_key(varName):
+        elif varName in self.timeVars:
             timeVariables = TimeFileVariable(self, varName)
             return timeVariables
 
@@ -508,8 +513,9 @@ class Host:
         @return value
         """
         return self.attributes[name]
-    
-##############################################################################
+
+#
+
 
 def test():
     import sys
@@ -520,20 +526,20 @@ def test():
     from optparse import OptionParser
     parser = OptionParser()
     parser.add_option("-f", "--file", dest="hostFilename",
-                  help="host file name")
+                      help="host file name")
 
     options, args = parser.parse_args()
     if not options.hostFilename:
-        print """need to provide a host file, use -h 
+        print """need to provide a host file, use -h
 to get a full list of options"""
         sys.exit(1)
 
     print 'open file..., create grdspec file object...'
     gf = cdms2.open(options.hostFilename)
-    if gf._status_ == 'closed': 
+    if gf._status_ == 'closed':
         print "File not opened"
         sys.exit(1)
-    print 
+    print
     print "type=", type(gf)
     print 'listvariable...'
     print gf.listvariable()
@@ -552,15 +558,15 @@ to get a full list of options"""
     print 'acess time dependent data...', "V" in gf.listvariables()
     print gf['V'][0].size
 
-
     # Test the mosaic
     print 'getMosaic...', 'getMosaic' in dir(gf)
     mosaic = gf.getMosaic()
-    for c in mosaic.coordinate_names: 
+    for c in mosaic.coordinate_names:
         print c
-    for t in mosaic.tile_contacts: 
+    for t in mosaic.tile_contacts:
         print "%s -> %s" % (t, mosaic.tile_contacts[t])
 
-##############################################################################
+#
 
-if __name__ == "__main__": test()
+if __name__ == "__main__":
+    test()
diff --git a/Packages/cdms2/Lib/gsMosaic.py b/Packages/cdms2/Lib/gsMosaic.py
index 2592ada8c..983030222 100644
--- a/Packages/cdms2/Lib/gsMosaic.py
+++ b/Packages/cdms2/Lib/gsMosaic.py
@@ -3,7 +3,7 @@
 """
 A file-like object to access mosaic.
 Dave Kindig and Alex Pletzer, Tech-X (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
@@ -11,7 +11,7 @@ No guarantee is provided whatsoever. Use at your own risk.
 from re import search, sub
 from ctypes import c_char_p, c_int, CDLL, byref
 
-# numpy 
+# numpy
 from numpy import zeros, reshape
 
 # CDAT
@@ -24,12 +24,13 @@ from cdms2.error import CDMSError
 try:
     from pycf import libCFConfig, __path__
 except:
-    raise ImportError, 'Error: could not import pycf'
+    raise ImportError('Error: could not import pycf')
 
-LIBCFDIR  = __path__[0] + "/pylibcf"
-libCF  = libCFConfig
+LIBCFDIR = __path__[0] + "/pylibcf"
+libCF = libCFConfig
 
-def open(uri, mode = 'r'):
+
+def open(uri, mode='r'):
     """
     Open mosaic file
     @param mosaicfile mosaic file
@@ -40,6 +41,7 @@ def open(uri, mode = 'r'):
     outMosaicFile = Mosaic(uri, mode)
     return outMosaicFile
 
+
 def getSlab(strg):
     """
     From a string return a tuple of slice objects
@@ -58,26 +60,29 @@ def getSlab(strg):
             step = 1
             startIndex = int(m.group(1))
             endIndex = int(m.group(2))
-            if endIndex < startIndex: step = -1
+            if endIndex < startIndex:
+                step = -1
             slc = slice(startIndex, endIndex, step)
             res.append(slc)
     return tuple(res)
 
+
 class Mosaic:
+
     """
     Define a mosaic.
     """
 
-    def __init__(self, uri, mode = 'r'):
+    def __init__(self, uri, mode='r'):
         """
         Constructor
         @param uri Filename with path
         @param mode read/write. Currently only read is supported
         """
 
-        self.id      = uri
-        self.mode    = mode
-        self.uri     = uri
+        self.id = uri
+        self.mode = mode
+        self.uri = uri
         self._status = 'Open'
 
         self.mosaicId_ct = c_int(-1)
@@ -89,41 +94,44 @@ class Mosaic:
 
         libcfdll = self.lib
 
-        self.file_type           = ""
-        self.contact_map         = {}
-        self.tile_contacts       = {}
+        self.file_type = ""
+        self.contact_map = {}
+        self.tile_contacts = {}
         self.tile_contacts_compl = {}
-        self.coordinate_names    = []
-        self.tile_names          = []
+        self.coordinate_names = []
+        self.tile_names = []
 
-        status = libcfdll.nccf_def_mosaic_from_file(uri, "", 
+        status = libcfdll.nccf_def_mosaic_from_file(uri, "",
                                                     byref(self.mosaicId_ct))
 
         if status != 0:
-            raise CDMSError, "ERROR: %s is not a valid mosaic file (status = %d)" % \
-                (uri, status)
+            raise CDMSError("ERROR: %s is not a valid mosaic file (status = %d)" %
+                            (uri, status))
 
         # Get some sizes
-        nGrids         = c_int(-1)
-        ndims          = c_int(-1)
-        ncontacts      = c_int(-1)
+        nGrids = c_int(-1)
+        ndims = c_int(-1)
+        ncontacts = c_int(-1)
         libcfdll.nccf_inq_mosaic_ndims(self.mosaicId_ct, byref(ndims))
         libcfdll.nccf_inq_mosaic_ngrids(self.mosaicId_ct, byref(nGrids))
         libcfdll.nccf_inq_mosaic_ncontacts(self.mosaicId_ct, byref(ncontacts))
 
         # Build the character arrays
         separator_ct = libCF.CF_TILE_SEPARATOR
-        contact_map_ct  = c_char_p(" " * (libCF.NC_MAX_NAME+1))
-        tile_contact_ct = c_char_p(" " * (libCF.NC_MAX_NAME+1))
-        tile_name_ct    = c_char_p(" " * (libCF.NC_MAX_NAME+1))
+        contact_map_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
+        tile_contact_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
+        tile_name_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
         coord_ct = (c_char_p * ndims.value)()
 
         for iDim in range(ndims.value):
-            coord_ct[iDim] = " " * (libCF.NC_MAX_NAME+1)
+            coord_ct[iDim] = " " * (libCF.NC_MAX_NAME + 1)
 
         # Get the grid names
         for igrid in range(nGrids.value):
-            libcfdll.nccf_inq_mosaic_gridname(self.mosaicId_ct, igrid, tile_name_ct)
+            libcfdll.nccf_inq_mosaic_gridname(
+                self.mosaicId_ct,
+                igrid,
+                tile_name_ct)
             tname = str(tile_name_ct)
             self.tile_names.append(tname)
 
@@ -135,24 +143,24 @@ class Mosaic:
 
         # Get the contact map information
         for iContact in range(ncontacts.value):
-            status = libcfdll.nccf_inq_mosaic_contactmap(self.mosaicId_ct, \
-                                                       iContact, contact_map_ct)
-            status = libcfdll.nccf_inq_mosaic_tilecontact(self.mosaicId_ct, \
-                                                        iContact, tile_contact_ct)
+            status = libcfdll.nccf_inq_mosaic_contactmap(self.mosaicId_ct,
+                                                         iContact, contact_map_ct)
+            status = libcfdll.nccf_inq_mosaic_tilecontact(self.mosaicId_ct,
+                                                          iContact, tile_contact_ct)
 
-            tN1, tN2             = tile_contact_ct.value.split(separator_ct)
+            tN1, tN2 = tile_contact_ct.value.split(separator_ct)
             tileName1, tileName2 = tN1.strip(), tN2.strip()
-            s1, s2               = contact_map_ct.value.split(separator_ct)
+            s1, s2 = contact_map_ct.value.split(separator_ct)
 
             # slice objects
             slab1 = getSlab(s1.strip())
             slab2 = getSlab(s2.strip())
 
             # Create the tile contact dictionary. Non symmetric.
-            if not self.tile_contacts.has_key(tileName1):
+            if tileName1 not in self.tile_contacts:
                 self.tile_contacts[tileName1] = {}
             # The complement to tile_contacts
-            if not self.tile_contacts_compl.has_key(tileName2):
+            if tileName2 not in self.tile_contacts_compl:
                 self.tile_contacts_compl[tileName2] = {}
 
             # Attach the contact map (slab) between the tiles
@@ -184,13 +192,13 @@ class Mosaic:
             for sl in slab:
                 b = sl.start
                 e = sl.stop + 1
-                newsl = slice(max(b-1, 0), max(e-1, -1), sl.step)
+                newsl = slice(max(b - 1, 0), max(e - 1, -1), sl.step)
                 newslab.append(newsl)
             newslabs.append(tuple(newslab))
         slab1, slab2 = newslabs
-    
+
         return (slab1, slab2)
-    
+
     def getSeamGrids(self, coordData):
         """
         Retrieve the seem grids between two cell centered tiles
@@ -203,32 +211,33 @@ class Mosaic:
                 # Get the seam data
                 result.append(self.getSeamData(tn1, tn2, coordData))
 
-                # Get the triangle data. Need to find the three cells 
+                # Get the triangle data. Need to find the three cells
                 # comprising a corner.
                 if tn2 in self.tile_contacts.keys():
-                    t1n = self.tile_contacts[tn1].keys() 
-                    t2n = self.tile_contacts[tn2].keys() 
+                    t1n = self.tile_contacts[tn1].keys()
+                    t2n = self.tile_contacts[tn2].keys()
 
                     # Look for a tile in the main list. Now compare the adjacent
                     # tiles to 1 and 2 until there is match. Now we have tile 3
                     for tn3 in t1n:
                         if tn3 in t1n and tn3 in t2n:
                             cornerIndex = self.getCornerData(tn1, tn2, tn3)
+
                             def getCornerInfo(data, cornerindex):
                                 lon = data.getLongitude()
                                 lat = data.getLatitude()
                                 c1 = data[cornerindex]
                                 n1 = lon[cornerindex]
                                 t1 = lat[cornerindex]
-                                lonatts = {'units':lon.units, 
-                                           'standard_name':lon.standard_name}
-                                latatts = {'units':lat.units, 
-                                           'standard_name':lat.standard_name}
+                                lonatts = {'units': lon.units,
+                                           'standard_name': lon.standard_name}
+                                latatts = {'units': lat.units,
+                                           'standard_name': lat.standard_name}
 
                                 return c1, n1, t1, lonatts, latatts
 
                             def popCorner(d1, d2, d3, dtype):
-                                corner = zeros((2, 2), dtype = dtype)
+                                corner = zeros((2, 2), dtype=dtype)
                                 if 'data' in dir(d1):
                                     corner[0, 0] = d1.data
                                     corner[0, 1] = d2.data
@@ -242,7 +251,9 @@ class Mosaic:
                                 return corner
 
                             c1, n1, t1, lonAtts, latAtts = \
-                                        getCornerInfo(coordData[tn1], cornerIndex[0])
+                                getCornerInfo(
+                                    coordData[tn1],
+                                            cornerIndex[0])
                             c2, n2, t2, lonAtts, latAtts = \
                                 getCornerInfo(coordData[tn2], cornerIndex[1])
                             c3, n3, t3, lonAtts, latAtts = \
@@ -253,21 +264,27 @@ class Mosaic:
                             lon_dtype = coordData[tn1].getLongitude().dtype
                             lat_dtype = coordData[tn1].getLatitude().dtype
                             corner = popCorner(c1, c2, c3, dtype)
-                            lon    = popCorner(n1, n2, n3, lon_dtype)
-                            lat    = popCorner(t1, t2, t3, lat_dtype)
-                            gridid = 'corner_%d_%d_%d' % (coordData[tn1].gridIndex, \
-                                                           coordData[tn2].gridIndex, 
-                                                           coordData[tn3].gridIndex)
-                            gridAtts = {'lon':lonAtts, 'lat':latAtts, 'gridid':gridid}
-                            cornerGrid = self.createSeamGrid(lon, lat, gridAtts)
-
-                            cornerTV = cdms2.createVariable(corner, 
-                                             axes = cornerGrid.getAxisList(), 
-                                             grid = cornerGrid, 
-                                             attributes = coordData[tn1].attributes, 
-                                             id = gridid)
-                    
-                                
+                            lon = popCorner(n1, n2, n3, lon_dtype)
+                            lat = popCorner(t1, t2, t3, lat_dtype)
+                            gridid = 'corner_%d_%d_%d' % (coordData[tn1].gridIndex,
+                                                          coordData[
+                                tn2].gridIndex,
+                                coordData[tn3].gridIndex)
+                            gridAtts = {
+                                'lon': lonAtts,
+                                'lat': latAtts,
+                                'gridid': gridid}
+                            cornerGrid = self.createSeamGrid(
+                                lon, lat, gridAtts)
+
+                            cornerTV = cdms2.createVariable(corner,
+                                                            axes=cornerGrid.getAxisList(
+                                                            ),
+                                                            grid=cornerGrid,
+                                                            attributes=coordData[
+                                                            tn1].attributes,
+                                                            id=gridid)
+
         return (result, cornerTV)
 
     def getCornerData(self, tileName1, tileName2, tileName3):
@@ -276,9 +293,9 @@ class Mosaic:
         @tileName1 Tile name of first grid (tile)
         @tileName2 Tile name of second grid (tile)
         @tileName3 Tile name of third grid (tile)
-        @return tuple of data marking the corners of the corner grid        
+        @return tuple of data marking the corners of the corner grid
         """
-        
+
         # Get the slabs and account for cell centers
         s1, s2 = self.tile_contacts[tileName1][tileName2]
         s3, s4 = self.tile_contacts[tileName1][tileName3]
@@ -293,12 +310,18 @@ class Mosaic:
         c5, c6 = self.getContactCornerIndex(s5, s6)
 
         # Set the tuple containing the corner indices in j, i order.
-        if c1 == 0 and c3 == 1: pair1 = (s1[c1].start, s3[c3].start)
-        if c1 == 1 and c3 == 0: pair1 = (s3[c3].start, s1[c1].start)
-        if c2 == 0 and c5 == 1: pair2 = (s2[c2].start, s5[c5].start)
-        if c2 == 1 and c5 == 0: pair2 = (s5[c5].start, s2[c2].start)
-        if c4 == 0 and c6 == 1: pair3 = (s4[c4].start, s6[c6].start)
-        if c4 == 1 and c6 == 0: pair3 = (s6[c6].start, s4[c4].start)
+        if c1 == 0 and c3 == 1:
+            pair1 = (s1[c1].start, s3[c3].start)
+        if c1 == 1 and c3 == 0:
+            pair1 = (s3[c3].start, s1[c1].start)
+        if c2 == 0 and c5 == 1:
+            pair2 = (s2[c2].start, s5[c5].start)
+        if c2 == 1 and c5 == 0:
+            pair2 = (s5[c5].start, s2[c2].start)
+        if c4 == 0 and c6 == 1:
+            pair3 = (s4[c4].start, s6[c6].start)
+        if c4 == 1 and c6 == 0:
+            pair3 = (s6[c6].start, s4[c4].start)
 
         return (pair1, pair2, pair3)
 
@@ -326,7 +349,7 @@ class Mosaic:
         @return attrs Attributes for eash plus the gridid
         """
         pass
-        
+
     def createSeamGrid(self, x, y, attrs):
         """
         Return the coordinate data associated with variable.
@@ -337,34 +360,37 @@ class Mosaic:
         LONSTR = 'lon'
         LATSTR = 'lat'
 
-
         # Get the dimensions
         xdim = x.shape
         ydim = y.shape
 
-        if xdim != ydim: 
-            raise CDMSError, "Dimension of coordinates grids don't match"
+        if xdim != ydim:
+            raise CDMSError("Dimension of coordinates grids don't match")
 
         nj = xdim[0]
         ni = xdim[1]
 
         # Define the axes, verifying the lon and lat grids
-        jaxis = TransientVirtualAxis("j",nj)
-        iaxis = TransientVirtualAxis("i",ni)
-
-        if search(LONSTR, attrs['lon']['standard_name']): lon = x
-        if search(LONSTR, attrs['lat']['standard_name']): lon = y
-        if search(LATSTR, attrs['lon']['standard_name']): lat = x
-        if search(LATSTR, attrs['lat']['standard_name']): lat = y
-
-        lataxis = TransientAxis2D(lat, 
-                       axes=(jaxis, iaxis), 
-                       attributes=attrs['lat'], 
-                       id=attrs['lat']['standard_name'])
-        lonaxis = TransientAxis2D(lon, 
-                       axes=(jaxis, iaxis), 
-                       attributes=attrs['lon'], 
-                       id=attrs['lon']['standard_name'])
+        jaxis = TransientVirtualAxis("j", nj)
+        iaxis = TransientVirtualAxis("i", ni)
+
+        if search(LONSTR, attrs['lon']['standard_name']):
+            lon = x
+        if search(LONSTR, attrs['lat']['standard_name']):
+            lon = y
+        if search(LATSTR, attrs['lon']['standard_name']):
+            lat = x
+        if search(LATSTR, attrs['lat']['standard_name']):
+            lat = y
+
+        lataxis = TransientAxis2D(lat,
+                                  axes=(jaxis, iaxis),
+                                  attributes=attrs['lat'],
+                                  id=attrs['lat']['standard_name'])
+        lonaxis = TransientAxis2D(lon,
+                                  axes=(jaxis, iaxis),
+                                  attributes=attrs['lon'],
+                                  id=attrs['lon']['standard_name'])
 
         # Define the combined grid
         grid = TransientCurveGrid(lataxis, lonaxis, id=attrs['gridid'])
@@ -380,7 +406,7 @@ class Mosaic:
         """
 
         slab1, slab2 = self.tile_contacts[tileName][otherTileName]
-    
+
         # Convert to cell centered slabs
         slab1, slab2 = self.getCellCenteredSlab(slab1, slab2)
         d1 = inputData[tileName]
@@ -397,11 +423,12 @@ class Mosaic:
 
         data1, lon1, lat1 = createNewVar(inputData[tileName], slab1)
         data2, lon2, lat2 = createNewVar(inputData[otherTileName], slab2)
-        
+
         # Remove dimensions of size 1.
         shape = []
         for d in data1.shape:
-            if d != 1: shape.append(d)
+            if d != 1:
+                shape.append(d)
 
         newshape = tuple(shape + [2])
         shape = tuple(shape)
@@ -415,21 +442,22 @@ class Mosaic:
         newLat[:, 0] = reshape(lat1[:], shape)
         newLat[:, 1] = reshape(lat2, shape)
         gridid = 'seam_tile%d_tile%d' % (data1.gridIndex, data2.gridIndex)
-        gridAtts = {'lon':{'units':l1.units, 'standard_name':l1.standard_name}, \
-                    'lat':{'units':t1.units, 'standard_name':t1.standard_name}, \
-                    'gridid':gridid}
+        gridAtts = {
+            'lon': {'units': l1.units, 'standard_name': l1.standard_name},
+                    'lat':
+                        {'units': t1.units, 'standard_name': t1.standard_name},
+                    'gridid': gridid}
         seamGrid = self.createSeamGrid(newLon, newLat, gridAtts)
 
         dataAtts = {'gridid': gridid}
-        newData = cdms2.createVariable(newVar, 
-                         axes = seamGrid.getAxisList(), 
-                         grid = seamGrid, 
-                         attributes = d1.attributes, 
-                         id = dataAtts['gridid'])
+        newData = cdms2.createVariable(newVar,
+                                       axes=seamGrid.getAxisList(),
+                                       grid=seamGrid,
+                                       attributes=d1.attributes,
+                                       id=dataAtts['gridid'])
 
         return newData
 
-    
     def getCoordinateNames(self):
         """
         Get the coordinate names for a mosaic
@@ -439,7 +467,7 @@ class Mosaic:
 
     def __repr__(self):
         res = "<Mosaic: '%s',  URI: '%s', mode: '%s', status: '%s' >" % \
-            ( self.id, self.uri, self.mode, self._status)
+            (self.id, self.uri, self.mode, self._status)
         return res
 
     def __call__(self):
@@ -448,7 +476,8 @@ class Mosaic:
     def __del__(self):
         self.lib.nccf_free_mosaic(self.mosaicId_ct)
 
-#############################################################################
+#
+
 
 def test():
     import os.path
@@ -460,7 +489,7 @@ def test():
     """
     parser = OptionParser()
     parser.add_option("-f", "--file", dest="mfile",
-                  help="full path to mosaic file")
+                      help="full path to mosaic file")
 
     options, args = parser.parse_args()
     if not options.mfile:
@@ -474,12 +503,16 @@ def test():
     m = open(options.mfile)
 
     print "\nCoordinate Names"
-    for c in m.coordinate_names: print c
+    for c in m.coordinate_names:
+        print c
 
     print "\nTile Contacts"
-    for t in m.tile_contacts: print "%s -> %s" % (t, m.tile_contacts[t])
+    for t in m.tile_contacts:
+        print "%s -> %s" % (t, m.tile_contacts[t])
     print "\nTile Contacts Complement"
-    for t in m.tile_contacts_compl: print "%s -> %s" % (t, m.tile_contacts_compl[t])
+    for t in m.tile_contacts_compl:
+        print "%s -> %s" % (t, m.tile_contacts_compl[t])
     print
 
-if __name__ == "__main__": test()
+if __name__ == "__main__":
+    test()
diff --git a/Packages/cdms2/Lib/gsStaticVariable.py b/Packages/cdms2/Lib/gsStaticVariable.py
index fdd740c86..5a1910fe2 100644
--- a/Packages/cdms2/Lib/gsStaticVariable.py
+++ b/Packages/cdms2/Lib/gsStaticVariable.py
@@ -3,7 +3,7 @@
 """
 A variable-like object extending over multiple tiles
 Dave Kindig and Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
@@ -17,6 +17,7 @@ from cdms2.coord import FileAxis2D
 from cdms2.gengrid import FileGenericGrid
 from cdms2.fvariable import FileVariable
 from cdms2.axis import FileAxis
+from functools import reduce
 
 try:
     from pycf import libCFConfig as libcf
@@ -24,6 +25,7 @@ except:
 #    raise ImportError, 'Error: could not import pycf'
     pass
 
+
 def createTransientGrid(gFName, coordinates):
     """
     Return the coordinate data associated with variable.
@@ -35,7 +37,7 @@ def createTransientGrid(gFName, coordinates):
 
     fh = cdms2.open(gFName)
     gridid = None
-    if libcf.CF_GRIDNAME in fh.attributes.keys(): 
+    if libcf.CF_GRIDNAME in fh.attributes.keys():
         gridid = getattr(fh, libcf.CF_GRIDNAME)
     xn, yn = coordinates.split()
 
@@ -46,8 +48,8 @@ def createTransientGrid(gFName, coordinates):
     xdim = x.shape
     ydim = y.shape
 
-    if xdim != ydim: 
-        raise CDMSError, "Dimension of coordinates grids don't match"
+    if xdim != ydim:
+        raise CDMSError("Dimension of coordinates grids don't match")
 
     ni = xdim[1]
     nj = xdim[0]
@@ -55,32 +57,39 @@ def createTransientGrid(gFName, coordinates):
     lonstr = 'lon'
     latstr = 'lat'
 
-    if re.search(lonstr, x.standard_name): lon = x
-    if re.search(lonstr, y.standard_name): lon = y
-    if re.search(latstr, x.standard_name): lat = x
-    if re.search(latstr, y.standard_name): lat = y
+    if re.search(lonstr, x.standard_name):
+        lon = x
+    if re.search(lonstr, y.standard_name):
+        lon = y
+    if re.search(latstr, x.standard_name):
+        lat = x
+    if re.search(latstr, y.standard_name):
+        lat = y
 
     # Define the axes, verifying the lon and lat grids
     iaxis = TransientVirtualAxis("i", ni)
     jaxis = TransientVirtualAxis("j", nj)
 
-    lataxis = TransientAxis2D(lat, 
-                   axes=(iaxis, jaxis), 
-                   attributes={'units': lat.units}, 
-                   id=lat.standard_name)
-    lonaxis = TransientAxis2D(lon, 
-                   axes=(iaxis, jaxis), 
-                   attributes={'units': lon.units}, 
-                   id=lon.standard_name)
+    lataxis = TransientAxis2D(lat,
+                              axes=(iaxis, jaxis),
+                              attributes={'units': lat.units},
+                              id=lat.standard_name)
+    lonaxis = TransientAxis2D(lon,
+                              axes=(iaxis, jaxis),
+                              attributes={'units': lon.units},
+                              id=lon.standard_name)
 
     # Define the combined grid
     grid = TransientCurveGrid(lataxis, lonaxis, id=gridid)
     return grid
 
+
 class StaticVariable:
+
     """
     Constructor
     """
+
     def __init__(self, StaticVariable, hostObj, varName):
         """
         Constructor - Contains methods applicable to both file and transient static variables
@@ -88,12 +97,12 @@ class StaticVariable:
         @param hostObj The host file object
         @param varName for the id
         """
-        StaticVariable.id     = varName
+        StaticVariable.id = varName
         StaticVariable.nGrids = hostObj.nGrids
 
         StaticVariable.vars = []
         if StaticVariable.nGrids > 0:
-            StaticVariable.vars = [None]*StaticVariable.nGrids
+            StaticVariable.vars = [None] * StaticVariable.nGrids
 
     def __getitem__(self, gridIndex):
         """
@@ -151,10 +160,13 @@ class StaticVariable:
             return self.vars[0].typecode()
         return None
 
+
 class StaticFileVariable(StaticVariable):
+
     """
     Static variable extending over multiple grid files
     """
+
     def __init__(self, hostObj, varName):
         """
         Create a list of file variable with grid attached
@@ -173,8 +185,11 @@ class StaticFileVariable(StaticVariable):
             gn = gridFilenames[gridIndex]
 
             # Open the files
-            f = cdms2.open(fn, mode)   # Need f and u because they serve slightly different purposes
-            u = CdunifFile(fn, mode)   # f.axes exists while axes is not a part of u
+            f = cdms2.open(fn, mode)
+                           # Need f and u because they serve slightly different
+                           # purposes
+            u = CdunifFile(fn, mode)
+                           # f.axes exists while axes is not a part of u
 #            u.variables[varName].gridIndex = gridIndex
             g = CdunifFile(gn, mode)
 
@@ -187,20 +202,22 @@ class StaticFileVariable(StaticVariable):
             coordsaux = f._convention_.getAxisAuxIds(u.variables, coords1d)
 
             # Convert the variable into a FileVariable
-            f.variables[varName] = FileVariable(f, varName, u.variables[varName])
+            f.variables[varName] = FileVariable(
+                f, varName, u.variables[varName])
 
             # Add the coordinates to the file
             for coord in coords:
                 f.variables[coord] = g.variables[coord]
                 f.variables[coord] = FileAxis2D(f, coord, g.variables[coord])
-            
+
             # Build the axes
             for key in f.axes.keys():
                 f.axes[key] = FileAxis(f, key, None)
 
             # Set the boundaries
             for coord in coords:
-                bounds = f._convention_.getVariableBounds(f, f.variables[coord])
+                bounds = f._convention_.getVariableBounds(
+                    f, f.variables[coord])
                 f.variables[coord].setBounds(bounds)
 
             # Initialize the domain
@@ -208,26 +225,35 @@ class StaticFileVariable(StaticVariable):
                 var.initDomain(f.axes)
 
             # Add the grid
-            gridkey, lat, lon = f.variables[varName].generateGridkey(f._convention_, f.variables)
+            gridkey, lat, lon = f.variables[
+                varName].generateGridkey(f._convention_, f.variables)
             gridname = "grid_%dx%d" % lat.shape
-#            grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
-            grid = FileCurveGrid(lat, lon, gridname, parent = f, maskvar = None)
+# grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
+            grid = FileCurveGrid(
+                lat,
+                lon,
+                gridname,
+                parent=f,
+                maskvar=None)
             f.variables[varName]._grid_ = grid
             self.vars[gridIndex] = f.variables[varName]
         self._repr_string = "StaticFileVariable"
 
-    def listall(self, all = None):
+    def listall(self, all=None):
         """
         Gain access to cdms2 listall method. Requires a StaticFileVariable
         @param all
         @returns list
         """
-        return self[0].listall(all = all)
+        return self[0].listall(all=all)
+
 
 class StaticTransientVariable(StaticVariable):
+
     """
     Static variable extending over multiple grid files
     """
+
     def __init__(self, hostObj, varName):
         """
         Constructor
@@ -247,15 +273,17 @@ class StaticTransientVariable(StaticVariable):
             # name of the file containing coordinate data
             gFName = gridFilenames[gridIndex]
 
-            fh = cdms2.open(fName, hostObj = hostObj)
+            fh = cdms2.open(fName, hostObj=hostObj)
             gh = cdms2.open(gFName)
 
             vr = fh(varName)
-            vr.gridIndex    = gridIndex
+            vr.gridIndex = gridIndex
 
             grid = None
             if 'coordinates' in vr.attributes.keys():
-                grid = createTransientGrid(gFName, vr.attributes['coordinates'])
+                grid = createTransientGrid(
+                    gFName,
+                    vr.attributes['coordinates'])
             atts = dict(vr.attributes)
             atts.update(gh.attributes)
             if libcf.CF_GRIDNAME in fh.attributes.keys():
@@ -263,18 +291,19 @@ class StaticTransientVariable(StaticVariable):
 
             # Create the variable
             if grid:
-                var = cdms2.createVariable(vr, 
-                                axes = grid.getAxisList(), 
-                                grid = grid, 
-                                attributes = atts, 
-                                id = vr.standard_name)
-            else: 
+                var = cdms2.createVariable(vr,
+                                           axes=grid.getAxisList(),
+                                           grid=grid,
+                                           attributes=atts,
+                                           id=vr.standard_name)
+            else:
                 var = vr
             self.vars[gridIndex] = var
         self._repr_string = "StaticTransientVariable"
 
+
 def test():
     pass
 
-if __name__ == '__main__': test()
- 
+if __name__ == '__main__':
+    test()
diff --git a/Packages/cdms2/Lib/gsTimeVariable.py b/Packages/cdms2/Lib/gsTimeVariable.py
index 1f4268fb7..0a0c4d571 100644
--- a/Packages/cdms2/Lib/gsTimeVariable.py
+++ b/Packages/cdms2/Lib/gsTimeVariable.py
@@ -23,10 +23,13 @@ from cdms2.fvariable import FileVariable
 from cdms2.axis import FileAxis, TransientAxis
 from cdms2.axis import concatenate as axisConcatenate
 
+
 class TimeAggregatedFileVariable:
+
     """
     Constructor Class for aggregating a time dependant variable across files.
     """
+
     def __init__(self, gridIndex, listOfFVs, hostObj):
         """
         @param gridIndex Index of requested grid
@@ -36,10 +39,12 @@ class TimeAggregatedFileVariable:
         self.fvs = listOfFVs
         self.gridIndex = gridIndex
         self.hostObj = hostObj
-        self.nTimeStepFiles = hostObj.nTimeSliceFiles * hostObj.nTimeDataFiles * hostObj.nGrids
+        self.nTimeStepFiles = hostObj.nTimeSliceFiles * \
+            hostObj.nTimeDataFiles * hostObj.nGrids
         it = self.getTimeAxisIndex(self.fvs[0].getAxisList())
         self.nTimeStepsPerFile = (self.fvs[0].shape)[it]
-        self.nTimeStepsPerVariable = hostObj.nTimeSliceFiles * self.nTimeStepsPerFile
+        self.nTimeStepsPerVariable = hostObj.nTimeSliceFiles * \
+            self.nTimeStepsPerFile
 
     def __call__(self, *args, **kwargs):
         """
@@ -65,7 +70,7 @@ class TimeAggregatedFileVariable:
         @param slc Integer, slice or tuple of slices. If tuple 0 is time
         @return sliced variable
         """
-        
+
         if isinstance(slc, int):
             # return FileVariable
             return self.fvs[slc]
@@ -76,10 +81,12 @@ class TimeAggregatedFileVariable:
             axes = self.fvs[0].getAxisList()
             timeAxisIndex = self.getTimeAxisIndex(axes)
             if timeAxisIndex is None:
-                CDMSError, "No time axis in :\n"  + axes
+                CDMSError, "No time axis in :\n" + axes
             if isinstance(slc[timeAxisIndex], slice):
-                (fileInds, timeStepInds) = self.getTimeFileIndex(slc[timeAxisIndex])
-                tv = self.createTransientVariableFromIndices(fileInds, timeStepInds)
+                (fileInds, timeStepInds) = self.getTimeFileIndex(
+                    slc[timeAxisIndex])
+                tv = self.createTransientVariableFromIndices(
+                    fileInds, timeStepInds)
                 newslc = self.buildSlice(slc, tv.getAxisList())
                 return tv[newslc]
             elif isinstance(slc[timeAxisIndex], int):
@@ -87,13 +94,15 @@ class TimeAggregatedFileVariable:
                 timeIndex = slc[timeAxisIndex] % nTSF
 
                 # Get just the file needed for the index slice requested.
-                tv = self.createTransientVariableFromIndices(fileIndex, timeIndex)
+                tv = self.createTransientVariableFromIndices(
+                    fileIndex, timeIndex)
                 newslc = self.buildSlice(slc, axes)
                 return tv[newslc]
 
         elif isinstance(slc, slice):
             (fileInds, timeStepInds) = self.getTimeFileIndex(slc)
-            tv = self.createTransientVariableFromIndices(fileInds, timeStepInds)
+            tv = self.createTransientVariableFromIndices(
+                fileInds, timeStepInds)
             return tv
 
     def __len__(self):
@@ -115,8 +124,10 @@ class TimeAggregatedFileVariable:
         timI2 = []
         filI2 = []
 
-        if timeslc.step is None: step = 1
-        else: step = timeslc.step
+        if timeslc.step is None:
+            step = 1
+        else:
+            step = timeslc.step
         stop = timeslc.stop
         if timeslc.stop >= nTSV:
             stop = nTSV
@@ -128,7 +139,7 @@ class TimeAggregatedFileVariable:
                 timI1.append(tt[indx])
                 filI1.append(ii[indx])
             else:
-                if ii[indx] == ii[indx-1]:
+                if ii[indx] == ii[indx - 1]:
                     timI1.append(tt[indx])
                     filI1.append(ii[indx])
                 else:
@@ -152,7 +163,8 @@ class TimeAggregatedFileVariable:
         @return the index - None if time not found
         """
         for indx, axis in enumerate(inAxes):
-            if axis.isTime(): return indx
+            if axis.isTime():
+                return indx
             return None
 
     def buildSlice(self, inslc, inAxes):
@@ -168,8 +180,9 @@ class TimeAggregatedFileVariable:
         newslc = []
         for cslc, axis in zip(inslc, inAxes):
             if axis.isTime():
-                if type(cslc) is int:
-                    # Omit slice - the new variable has only the shape of the grid.
+                if isinstance(cslc, int):
+                    # Omit slice - the new variable has only the shape of the
+                    # grid.
                     continue
                 else:
                     newslc.append(slice(None, None, None))
@@ -213,13 +226,13 @@ class TimeAggregatedFileVariable:
         Aggregate a time file variable. Start and End Indices use slice notation.
         @param fileIndices the file indices to aggregate across
         @param timeIndices which time steps with in each file
-        @return aggregated time dep. variable. Has shape of full grid. 
+        @return aggregated time dep. variable. Has shape of full grid.
                 Subset the grid after exiting.
         """
         from numpy import reshape
         firsttime = True
         nTSF = self.nTimeStepsPerFile
-        if type(fileIndices) is not int:
+        if not isinstance(fileIndices, int):
             for files, times in zip(fileIndices, timeIndices):
                 for indx, file in enumerate(files):
                     # Should make these slices.
@@ -231,19 +244,21 @@ class TimeAggregatedFileVariable:
                     # Insert the new time axis.
                     axisTime = self.fvs[file].getTime()
                     timeAxis = TransientAxis([file * nTSF + times[indx]],
-                                              attributes = axisTime.attributes,
-                                              id = axisTime.id)
-                    axes = self.buildAxes(timeAxis, self.fvs[file].getAxisList())
+                                             attributes=axisTime.attributes,
+                                             id=axisTime.id)
+                    axes = self.buildAxes(
+                        timeAxis,
+                        self.fvs[file].getAxisList())
 
                     # shape --> tm1.shape = (1, :, :)
                     tm1 = reshape(cvar, tuple([1] + list(cvar.shape)))
 
                     # Attach needed items
                     var = cdms2.createVariable(tm1,
-                            axes = axes,
-                            grid = grid,
-                            attributes = atts,
-                            id = cvar.standard_name)
+                                               axes=axes,
+                                               grid=grid,
+                                               attributes=atts,
+                                               id=cvar.standard_name)
 
                     # Create cdms2 transient variable
                     if firsttime:
@@ -253,26 +268,31 @@ class TimeAggregatedFileVariable:
                         # insert the new time axis.
                         taA = new.getTime()
                         newTime = axisConcatenate((taA, timeAxis),
-                                                  attributes = axisTime.attributes,
-                                                  id = axisTime.id)
-                        axes = self.buildAxes(newTime, self.fvs[file].getAxisList())
+                                                  attributes=axisTime.attributes,
+                                                  id=axisTime.id)
+                        axes = self.buildAxes(
+                            newTime,
+                            self.fvs[file].getAxisList())
 
                         tmp = MV2concatenate((new, var))
                         new = cdms2.createVariable(tmp,
-                                axes = axes,
-                                grid = grid,
-                                attributes = atts,
-                                id = cvar.standard_name)
+                                                   axes=axes,
+                                                   grid=grid,
+                                                   attributes=atts,
+                                                   id=cvar.standard_name)
 
         else:
             new = self.fvs[fileIndices][timeIndices]
 
         return new
 
+
 class TimeFileVariable:
+
     """
     Construct an aggregated time dependant variable.
     """
+
     def __init__(self, hostObj, varName):
         """
         Create a list of file variable with grid attached
@@ -297,8 +317,12 @@ class TimeFileVariable:
 
                 # Open the files
                 fn = hostObj.timeVars[varName][gridIndex][timeFileIndex]
-                f = cdms2.open(fn, mode)   # Need f and u because they serve slightly different purposes
-                u = CdunifFile(fn, mode)   # f.axes exists while axes is not a part of u
+                f = cdms2.open(
+                    fn,
+                    mode)   # Need f and u because they serve slightly different purposes
+                u = CdunifFile(
+                    fn,
+                    mode)   # f.axes exists while axes is not a part of u
 #                u.variables[varName].gridIndex = gridIndex
 
                 # Turn the coordinates into a list
@@ -310,12 +334,14 @@ class TimeFileVariable:
                 coordsaux = f._convention_.getAxisAuxIds(u.variables, coords1d)
 
                 # Convert the variable into a FileVariable
-                f.variables[varName] = FileVariable(f, varName, u.variables[varName])
+                f.variables[varName] = FileVariable(
+                    f, varName, u.variables[varName])
 
                 # Add the coordinates to the file
                 for coord in coords:
                     f.variables[coord] = g.variables[coord]
-                    f.variables[coord] = FileAxis2D(f, coord, g.variables[coord])
+                    f.variables[coord] = FileAxis2D(
+                        f, coord, g.variables[coord])
 
                 # Build the axes
                 for key in f.axes.keys():
@@ -323,7 +349,8 @@ class TimeFileVariable:
 
                 # Set the boundaries
                 for coord in coords:
-                    bounds = f._convention_.getVariableBounds(f, f.variables[coord])
+                    bounds = f._convention_.getVariableBounds(
+                        f, f.variables[coord])
                     f.variables[coord].setBounds(bounds)
 
                 # Initialize the domain
@@ -331,10 +358,16 @@ class TimeFileVariable:
                     var.initDomain(f.axes)
 
                 # Add the grid
-                gridkey, lat, lon = f.variables[varName].generateGridkey(f._convention_, f.variables)
+                gridkey, lat, lon = f.variables[
+                    varName].generateGridkey(f._convention_, f.variables)
                 gridname = ("grid%d_" % gridIndex) + "%dx%d" % lat.shape
-#                grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
-                grid = FileCurveGrid(lat, lon, gridname, parent = f, maskvar = None)
+# grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
+                grid = FileCurveGrid(
+                    lat,
+                    lon,
+                    gridname,
+                    parent=f,
+                    maskvar=None)
                 f.variables[varName]._grid_ = grid
                 vars.append(f.variables[varName])
 
@@ -343,23 +376,23 @@ class TimeFileVariable:
 
         self._repr_string = "TimeFileVariable"
 
-    def listall(self, all = None):
+    def listall(self, all=None):
         """
         Gain access to cdms2 listall method. Requires a TimeFileVariable
         @param all
         @returns list
         """
-        return self[0][0].listall(all = all)
+        return self[0][0].listall(all=all)
 
-    def showall(self, all = None, device = None):
+    def showall(self, all=None, device=None):
         """
         Gain access to cdms2 showall method
         Requires a TimeFileVariable
         @param all
-        @param device 
+        @param device
         @returns list
         """
-        return self[0][0][:].showall(all = all, device = device)
+        return self[0][0][:].showall(all=all, device=device)
 
     def __getitem__(self, gridIndex):
         """
@@ -367,11 +400,13 @@ class TimeFileVariable:
         """
         return self.vars[gridIndex]
 
-###############################################################################
-############## DEPRECIATED - Testing required to fully remove #################
-###############################################################################
+#
+# DEPRECIATED - Testing required to fully remove #################
+#
+
 
 class TimeTransientVariable:
+
     def __init__(self, hostObj, varName, **slicekwargs):
         """
         Constructor
@@ -402,15 +437,18 @@ class TimeTransientVariable:
 
                 for timeFileIndex in range(hostObj.nTimeDataFiles):
 
-                    fName = hostObj.timeDepVars[varName][gridIndex][timeFileIndex]
+                    fName = hostObj.timeDepVars[
+                        varName][
+                            gridIndex][
+                                timeFileIndex]
                     fh = cdms2.open(fName, hostObj=hostObj)
 
                     # TransientVariable
                     var = fh(varName, **slicekwargs)
 
                     # Attach the grid to the variable
-                    grid = cdms2.gsStaticVariable.createTransientGrid(gFName, \
-                                         var.attributes['coordinates'])
+                    grid = cdms2.gsStaticVariable.createTransientGrid(gFName,
+                                                                      var.attributes['coordinates'])
                     axis0 = var.getAxis(0)
                     gridaxes = grid.getAxisList()
                     axes = [axis0] + list(gridaxes)
@@ -420,21 +458,21 @@ class TimeTransientVariable:
                     # Create cdms2 transient variable
                     if timeFileIndex == 0:
                         new = cdms2.createVariable(var,
-                                axes = axes,
-                                grid = grid,
-                                attributes = atts,
-                                id = var.standard_name)
+                                                   axes=axes,
+                                                   grid=grid,
+                                                   attributes=atts,
+                                                   id=var.standard_name)
                     else:
-                        tmp =MV2concatenate((new, var))
+                        tmp = MV2concatenate((new, var))
                         axis0 = tmp.getAxis(0)
                         gridaxes = grid.getAxisList()
                         axes = [axis0, gridaxes[0], gridaxes[1]]
 #                        new.append(tmp)
                         new = cdms2.createVariable(tmp,
-                                axes = axes,
-                                grid = grid,
-                                attributes = atts,
-                                id = var.standard_name)
+                                                   axes=axes,
+                                                   grid=grid,
+                                                   attributes=atts,
+                                                   id=var.standard_name)
                     fh.close()
 
                 # Add the variable to the index
@@ -443,10 +481,10 @@ class TimeTransientVariable:
         self._repr_string = "TimeTransientVariable"
 
 
-###################################################################
+#
 
 def test():
     pass
 
-if __name__ == '__main__': test()
-
+if __name__ == '__main__':
+    test()
diff --git a/Packages/cdms2/Lib/gui.py b/Packages/cdms2/Lib/gui.py
index 65e13cd8c..44fab4cb6 100644
--- a/Packages/cdms2/Lib/gui.py
+++ b/Packages/cdms2/Lib/gui.py
@@ -5,6 +5,7 @@ from cdms import cache
 
 _progressParent = None                  # Parent frame of progress gui
 
+
 def setProgressParent(parent):
     """
     Enable the FTP progress GUI, and set the parent frame.
@@ -12,7 +13,9 @@ def setProgressParent(parent):
     """
     global _progressParent
     _progressParent = parent
-    cache.useWindow()                   # Notify cache module that window dialogs should be used.
+    cache.useWindow()
+                    # Notify cache module that window dialogs should be used.
+
 
 def getProgressParent():
     """
@@ -21,6 +24,7 @@ def getProgressParent():
     """
     return _progressParent
 
+
 def updateProgressGui(blocknum, blocksize, size, prog):
     """
     Callback function for the FTP progress dialog.
@@ -31,25 +35,28 @@ def updateProgressGui(blocknum, blocksize, size, prog):
 
     Return: 0 to signal that a cancel has been received, 1 to continue reading.
     """
-    sizekb = size/1024L
-    percent = min(100,int(100.0*float(blocknum*blocksize)/float(size)))
-    if percent<100:
-        noInterrupt = prog.Update(percent,"Read: %3d%% of %dK"%(percent,sizekb))
+    sizekb = size / 1024
+    percent = min(100, int(100.0 * float(blocknum * blocksize) / float(size)))
+    if percent < 100:
+        noInterrupt = prog.Update(
+            percent, "Read: %3d%% of %dK" %
+            (percent, sizekb))
     else:
         noInterrupt = 1                    # Don't interrupt - finish up cleanly
         prog.Destroy()
-    if noInterrupt==0:
+    if noInterrupt == 0:
         prog.Destroy()
     return noInterrupt
 
+
 class CdProgressDialog(wxProgressDialog):
 
     # <frame> is the parent frame.
     # filename is the file being read.
-    def __init__(self, frame, filename):
-        wxProgressDialog.__init__(self,"FTP: %s"%filename,
-                         "Connecting ...",
-                         100,
-                         frame,
-                         wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_REMAINING_TIME)
 
+    def __init__(self, frame, filename):
+        wxProgressDialog.__init__(self, "FTP: %s" % filename,
+                                  "Connecting ...",
+                                  100,
+                                  frame,
+                                  wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_REMAINING_TIME)
diff --git a/Packages/cdms2/Lib/hgrid.py b/Packages/cdms2/Lib/hgrid.py
index 1f53d3702..aa09ca8c5 100644
--- a/Packages/cdms2/Lib/hgrid.py
+++ b/Packages/cdms2/Lib/hgrid.py
@@ -1,5 +1,5 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 """CDMS HorizontalGrid objects"""
 
@@ -7,19 +7,20 @@ import numpy
 import cdms2
 import os
 import os.path
-## import PropertiedClasses
-from error import CDMSError
-from grid import AbstractGrid, LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
-from coord import TransientVirtualAxis
-from axis import getAutoBounds, allclose
-import bindex,_bindex
+# import PropertiedClasses
+from .error import CDMSError
+from .grid import AbstractGrid, LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
+from .coord import TransientVirtualAxis
+from .axis import getAutoBounds, allclose
+import bindex, _bindex
+from functools import reduce
 
 MethodNotImplemented = "Method not yet implemented"
 
 def _flatten(boundsar):
     boundsshape = boundsar.shape
-    if len(boundsshape)>2:
-        newshape = (reduce((lambda x,y: x*y), boundsshape[:-1], 1), boundsshape[-1])
+    if len(boundsshape) > 2:
+        newshape = (reduce((lambda x, y: x*y), boundsshape[:-1], 1), boundsshape[-1])
         boundsar.shape = newshape
     return boundsar
 
@@ -40,17 +41,17 @@ class AbstractHorizontalGrid(AbstractGrid):
 
     # Generate default bounds
     def genBounds(self):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     # Get the n-th axis. naxis is 0 or 1.
     def getAxis(self, naxis):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getBounds(self):
         """Get the grid cell boundaries, as a tuple (latitudeBounds, longitudeBounds)
         """
         latbnds, lonbnds = (self._lataxis_.getExplicitBounds(), self._lonaxis_.getExplicitBounds())
-        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1,2]:
+        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1, 2]:
             nlatbnds, nlonbnds = self.genBounds()
             if latbnds is None:
                 latbnds = nlatbnds
@@ -76,27 +77,27 @@ class AbstractHorizontalGrid(AbstractGrid):
 
     def getMesh(self):
         """Get the mesh array used by the meshfill plot."""
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def getWeightsArray(self):
         """Return normalized area weights, as an array of the same
         shape as the grid.
         """
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def listall (self, all=None):
-        result=[]
+        result = []
         result.append('Grid has Python id %s.' % hex(id(self)))
         return result
 
-    def setMask(self,mask,permanent=0):
+    def setMask(self, mask, permanent=0):
         self._maskVar_ = mask
 
     def subGridRegion(self, latRegion, lonRegion):
-        raise CDMSError, MethodNotImplemented
+        raise CDMSError(MethodNotImplemented)
 
     def hasCoordType(self, coordType):
-        return ((coordType==LatitudeType) or (coordType==LongitudeType))
+        return ((coordType == LatitudeType) or (coordType == LongitudeType))
 
     def checkConvex(self):
         """Check that each cell of the grid is convex in lon-lat space, with nodes defined counter-clockwise.
@@ -116,10 +117,10 @@ class AbstractHorizontalGrid(AbstractGrid):
         for n0 in range(nnode):
             n1 = (n0+1)%nnode
             n2 = (n1+1)%nnode
-            vec0lon = lonb[:,n1] - lonb[:,n0]
-            vec0lat = latb[:,n1] - latb[:,n0]
-            vec1lon = lonb[:,n2] - lonb[:,n1]
-            vec1lat = latb[:,n2] - latb[:,n1]
+            vec0lon = lonb[:, n1] - lonb[:, n0]
+            vec0lat = latb[:, n1] - latb[:, n0]
+            vec1lon = lonb[:, n2] - lonb[:, n1]
+            vec1lat = latb[:, n2] - latb[:, n1]
             cross = vec0lon*vec1lat - vec0lat*vec1lon
 
             mask = where(less(cross, 0.0), 1, 0)
@@ -170,24 +171,24 @@ class AbstractHorizontalGrid(AbstractGrid):
             for node in range(2*nnode):
                 n0 = node%nnode
                 n1 = (n0+1)%nnode
-                vec0lon = lonb2[k,n1]-lonb2[k,n0]
-                if vec0lon>threshold:
-                    lonb2[k,n1] -= 360.0
-                elif vec0lon<-threshold:
-                    lonb2[k,n1] += 360.0
+                vec0lon = lonb2[k, n1]-lonb2[k, n0]
+                if vec0lon > threshold:
+                    lonb2[k, n1] -= 360.0
+                elif vec0lon < -threshold:
+                    lonb2[k, n1] += 360.0
 
             # If the cross-product test still fails, restore
             # the original values and add to the nonConvexCells list
             for n0 in range(nnode):
                 n1 = (n0+1)%nnode
                 n2 = (n1+1)%nnode
-                vec0lon = lonb2[k,n1] - lonb2[k,n0]
-                vec0lat = latb2[k,n1] - latb2[k,n0]
-                vec1lon = lonb2[k,n2] - lonb2[k,n1]
-                vec1lat = latb2[k,n2] - latb2[k,n1]
+                vec0lon = lonb2[k, n1] - lonb2[k, n0]
+                vec0lat = latb2[k, n1] - latb2[k, n0]
+                vec1lon = lonb2[k, n2] - lonb2[k, n1]
+                vec1lat = latb2[k, n2] - latb2[k, n1]
                 cross = vec0lon*vec1lat - vec0lat*vec1lon
 
-                if cross<0:
+                if cross < 0:
                     lonb2[k] = savelons
                     newbadcells.append(nonConvexCells[k])
                     break
@@ -207,7 +208,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         """Create a curvilinear grid.
         """
         if latAxis.shape != lonAxis.shape:
-            raise CDMSError, 'Latitude and longitude axes must have the same shape.'
+            raise CDMSError('Latitude and longitude axes must have the same shape.')
         AbstractHorizontalGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
         self._index_ = None
 
@@ -217,7 +218,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         return TransientCurveGrid(newlat, newlon, id=self.id)
 
     def __repr__(self):
-        return "<CurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<CurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
     __str__ = __repr__
 
     def getMesh(self, transpose=None):
@@ -226,20 +227,20 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         latbounds and lonbounds according to the tuple, (1,0,2) in this case.
         """
         if self._mesh_ is None:
-            LAT=0
-            LON=1
+            LAT = 0
+            LON = 1
             latbounds, lonbounds = self.getBounds()
-##             ## following work aronud a numpy.ma bug
-##             latbounds=latbounds.filled()
-##             lonbounds=lonbounds.filled()
+# following work aronud a numpy.ma bug
+# latbounds=latbounds.filled()
+# lonbounds=lonbounds.filled()
             if latbounds is None or lonbounds is None:
-                raise CDMSError, 'No boundary data is available for grid %s'%self.id
-            if (transpose is not None) and (transpose[1]==0):
-                latbounds = numpy.transpose(latbounds, (1,0,2))
-                lonbounds = numpy.transpose(lonbounds, (1,0,2))
-            mesh = numpy.zeros((self.size(),2,latbounds.shape[-1]),latbounds.dtype.char)
-            mesh[:,LAT,:] = numpy.reshape(latbounds,(self.size(),latbounds.shape[-1]))
-            mesh[:,LON,:]  = numpy.reshape(lonbounds,(self.size(),latbounds.shape[-1]))
+                raise CDMSError('No boundary data is available for grid %s'%self.id)
+            if (transpose is not None) and (transpose[1] == 0):
+                latbounds = numpy.transpose(latbounds, (1, 0, 2))
+                lonbounds = numpy.transpose(lonbounds, (1, 0, 2))
+            mesh = numpy.zeros((self.size(), 2, latbounds.shape[-1]), latbounds.dtype.char)
+            mesh[:, LAT,:] = numpy.reshape(latbounds, (self.size(), latbounds.shape[-1]))
+            mesh[:, LON,:]  = numpy.reshape(lonbounds, (self.size(), latbounds.shape[-1]))
             self._mesh_ = mesh
         return self._mesh_
 
@@ -291,9 +292,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         mask.shape = (ngrid,)
 
         clat = numpy.ma.filled(copy.copy(blat))
-        clat.shape = (ngrid,4)
+        clat.shape = (ngrid, 4)
         clon = numpy.ma.filled(copy.copy(blon))
-        clon.shape = (ngrid,4)
+        clon.shape = (ngrid, 4)
 
         # Write the file
         if gridTitle is None:
@@ -309,12 +310,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         gridcenterlon.units = "degrees"
         gridimask = cufile.createVariable("grid_imask", 'i', ("grid_size",))
         gridimask.units = "unitless"
-        gridcornerlat = cufile.createVariable("grid_corner_lat", 'd', ("grid_size","grid_corners"))
+        gridcornerlat = cufile.createVariable("grid_corner_lat", 'd', ("grid_size", "grid_corners"))
         gridcornerlat.units = "degrees"
-        gridcornerlon = cufile.createVariable("grid_corner_lon", 'd', ("grid_size","grid_corners"))
+        gridcornerlon = cufile.createVariable("grid_corner_lon", 'd', ("grid_size", "grid_corners"))
         gridcornerlon.units = "degrees"
 
-        griddims[:] = numpy.array([nj,ni], numpy.int32)
+        griddims[:] = numpy.array([nj, ni], numpy.int32)
         gridcenterlat[:] = centerLat
         gridcenterlon[:] = centerLon
         gridimask[:] = mask
@@ -324,9 +325,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
     def toGenericGrid(self, gridid=None):
 
         import copy
-        from auxcoord import TransientAuxAxis1D
-        from coord import TransientVirtualAxis
-        from gengrid import TransientGenericGrid
+        from .auxcoord import TransientAuxAxis1D
+        from .coord import TransientVirtualAxis
+        from .gengrid import TransientGenericGrid
 
         lat = numpy.ma.filled(self._lataxis_)
         latunits = self._lataxis_.units
@@ -345,11 +346,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             mask.shape = (ngrid,)
 
         cornerLat = numpy.ma.filled(copy.copy(blat))
-        cornerLat.shape = (ngrid,4)
+        cornerLat.shape = (ngrid, 4)
         cornerLon = numpy.ma.filled(copy.copy(blon))
-        cornerLon.shape = (ngrid,4)
+        cornerLon.shape = (ngrid, 4)
 
-        iaxis = TransientVirtualAxis("cell",ngrid)
+        iaxis = TransientVirtualAxis("cell", ngrid)
 
         lataxis = TransientAuxAxis1D(centerLat, axes=(iaxis,), bounds=cornerLat,
                                   attributes={'units':latunits}, id="latitude")
@@ -381,21 +382,21 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         The file, normally a CdmsFile, should already be open for writing
         and will be closed."""
         import time
-        from tvariable import TransientVariable
+        from .tvariable import TransientVariable
         
         # Set attributes
-        if ( hasattr(file,'Conventions') ):
-            if ( file.Conventions.find('Gridspec')<0 ):
+        if ( hasattr(file, 'Conventions') ):
+            if ( file.Conventions.find('Gridspec') < 0 ):
                 file.Conventions = file.Conventions + ' Gridspec-0.0'
         else:
             file.Conventions = 'Gridspec-0.0'
-        if ( hasattr(file,'gs_filetypes') ):
-            if ( file.gs_filetypes.find('Curvilinear_Tile')<0 ):
+        if ( hasattr(file, 'gs_filetypes') ):
+            if ( file.gs_filetypes.find('Curvilinear_Tile') < 0 ):
                 file.gs_filetypes = file.gs_filetypes + ' Curvilinear_Tile'
         else:
             file.gs_filetypes = 'Curvilinear_Tile'
-        t=time.time()
-        id=int((t-int(t))*1.0e9)
+        t = time.time()
+        id = int((t-int(t))*1.0e9)
         file.gs_id = id
         file.gs_originalfilename = os.path.basename( file.id )
 
@@ -411,19 +412,19 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
                        getattr( file, 'history', '' ) + newhistory
 
         # former tile variable and attributes
-        if ( hasattr(self,'long_name') and self.long_name!=None ):
+        if ( hasattr(self, 'long_name') and self.long_name != None ):
             file.long_name = self.long_name
         else:
             file.long_name = 'gridspec_tile'
         # gs_geometryType is no longer required of Gridspec files, but as yet
         # there is no other proposal for describing the geometry (July 2010)
-        if ( hasattr(self,'gs_geometryType') and self.gs_geometryType!=None):
+        if ( hasattr(self, 'gs_geometryType') and self.gs_geometryType != None):
             file.gs_geometryType = self.gs_geometryType
         else:
             file.gs_geometryType = 'spherical'
         # gs_discretizationType is no longer required of Gridspec files, but it's
         # harmless and may come in useful
-        if ( hasattr(self,'gs_discretizationType') and self.gs_discretizationType!=None ):
+        if ( hasattr(self, 'gs_discretizationType') and self.gs_discretizationType != None ):
             file.gs_discretizationType = self.gs_discretizationType
         else:
             file.gs_discretizationType = 'logically_rectangular'
@@ -434,11 +435,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # Set up and write variables.  When written, cdms writes not only the arrays
         # but also their coordinates, e.g. gs_nip.
         
-        x=self._lonaxis_
-        if ( not hasattr(x,'units') ):
+        x = self._lonaxis_
+        if ( not hasattr(x, 'units') ):
             print "Warning, no units found for longitude"
             x.units = 'degree_east'
-        if ( not hasattr(x,'standard_name') ):
+        if ( not hasattr(x, 'standard_name') ):
             print "Warning, no standard_name found for longitude axis"
             x.standard_name = 'longitude'
         if ( x.standard_name == 'geographic_longitude'):
@@ -448,11 +449,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # _lonaxis_ is a TransientAxis2D, hence a TransientVariable
         # But I don't know where the attribute _TransientVariable__domain comes from
         
-        y=self._lataxis_
-        if ( not hasattr(y,'units') ):
+        y = self._lataxis_
+        if ( not hasattr(y, 'units') ):
             print "Warning, no units found for latitude"
             y.units = 'degree_north'
-        if ( not hasattr(y,'standard_name') ):
+        if ( not hasattr(y, 'standard_name') ):
             print "Warning, no standard_name found for latitude axis"
             y.standard_name = 'latitude'
         if ( y.standard_name == 'geographic_latitude'):
@@ -460,18 +461,18 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             y.standard_name = 'latitude'
         y.id = file.gs_latv
 
-        if( not hasattr(x,'_TransientVariable__domain') ):
+        if( not hasattr(x, '_TransientVariable__domain') ):
             # There probably doesn't exist enough information to write a correct
             # grid, but this will help.
             x._TransientVariable__domain = [ (x,), (y,) ]
-        x._TransientVariable__domain[0][0].id='gs_njp'
-        x._TransientVariable__domain[1][0].id='gs_nip'
-        if ( not hasattr(y,'_TransientVariable__domain') ) :
+        x._TransientVariable__domain[0][0].id = 'gs_njp'
+        x._TransientVariable__domain[1][0].id = 'gs_nip'
+        if ( not hasattr(y, '_TransientVariable__domain') ) :
             # There probably doesn't exist enough information to write a correct
             # grid, but this will help.
             y._TransientVariable__domain = [ (x,), (y,) ]
-        y._TransientVariable__domain[0][0].id='gs_njp'
-        y._TransientVariable__domain[1][0].id='gs_nip'
+        y._TransientVariable__domain[0][0].id = 'gs_njp'
+        y._TransientVariable__domain[1][0].id = 'gs_nip'
 
         file.write(x)
         file.write(y)
@@ -486,12 +487,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # The functionality (other than checking gsfile) is now done by the writeg
         # method above.
         if ( not hasattr( self, "gsfile" ) ):
-            self.gsfile=None
-            self.gspath=None
-        if ( self.gsfile!=None ):
+            self.gsfile = None
+            self.gspath = None
+        if ( self.gsfile != None ):
             return ( tcg.gsfile, tcg.gspath )
         else:
-            raise RuntimeError, 'The libCF/Gridspec API does not provide for writing CurveGrids<<<'
+            raise RuntimeError('The libCF/Gridspec API does not provide for writing CurveGrids<<<')
 
     def init_from_gridspec( self, filename ):
         """reads to grid from a Gridspec-compliant file.  The filename should be a
@@ -575,8 +576,8 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
                 j = k
             k += 1
 
-        if i==-1 or j==-1:
-            raise RuntimeError, 'Grid lat/lon domains do not match variable domain'
+        if i == -1 or j == -1:
+            raise RuntimeError('Grid lat/lon domains do not match variable domain')
 
         return ((islice, jslice), (inewaxis, jnewaxis))
 
@@ -584,14 +585,14 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         """Get the grid index"""
         if self._index_ is None:
             # Trying to stick in Stephane Raynaud's patch for autodetection
-            nj,ni = self._lataxis_.shape
+            nj, ni = self._lataxis_.shape
             dlon = numpy.max(self._lonaxis_)-numpy.min(self._lonaxis_)
-            dx = max(dlon/ni,dlon/nj)
+            dx = max(dlon/ni, dlon/nj)
             dlat = numpy.max(self._lataxis_)-numpy.min(self._lataxis_)
-            dy = max(dlat/ni,dlat/nj)
+            dy = max(dlat/ni, dlat/nj)
             latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
             lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
-            _bindex.setDeltas(dx,dy)
+            _bindex.setDeltas(dx, dy)
             self._index_ = bindex.bindexHorizontalGrid(latlin, lonlin)
 
         return self._index_
@@ -614,12 +615,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
         lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
         points = bindex.intersectHorizontalGrid(latspec, lonspec, latlin, lonlin, index)
-        if len(points)==0:
-            raise CDMSError, 'No data in the specified region, longitude=%s, latitude=%s'%(`lonspec`, `latspec`)
+        if len(points) == 0:
+            raise CDMSError('No data in the specified region, longitude=%s, latitude=%s'%(repr(lonspec), repr(latspec)))
 
         fullmask = numpy.ones(ni*nj)
         numpy.put(fullmask, points, 0)
-        fullmask = numpy.reshape(fullmask, (ni,nj))
+        fullmask = numpy.reshape(fullmask, (ni, nj))
         
         iind = points/nj
         jind = points - iind*nj
@@ -628,7 +629,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
 
         yid = self.getAxis(0).id
         xid = self.getAxis(1).id
-        indexspecs = {yid:slice(imin,imax), xid:slice(jmin,jmax)}
+        indexspecs = {yid:slice(imin, imax), xid:slice(jmin, jmax)}
 
         return submask, indexspecs
 
@@ -677,9 +678,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             used = []                   # axes already matched
             for i in missing:
                 for item in axes:
-                    if (item not in used) and len(selfaxes[i])==len(item) and allclose(selfaxes[i], item):
-                        result._lataxis_.setAxis(i,item)
-                        result._lonaxis_.setAxis(i,item)
+                    if (item not in used) and len(selfaxes[i]) == len(item) and allclose(selfaxes[i], item):
+                        result._lataxis_.setAxis(i, item)
+                        result._lonaxis_.setAxis(i, item)
                         used.append(item)
                         break
                 else:
@@ -693,18 +694,18 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
         if self._flataxes_ is None:
-            import MV2 as MV
+            from . import MV2 as MV
             alat = MV.filled(self.getLatitude())
             alon = MV.filled(self.getLongitude())
             alatflat = numpy.ravel(alat)
             alonflat = numpy.ravel(alon)
             self._flataxes_ = (alatflat, alonflat)
         return self._flataxes_
-    shape = property(_getShape,None)
+    shape = property(_getShape, None)
     
-## PropertiedClasses.set_property (AbstractCurveGrid, 'shape', 
-##                                   AbstractCurveGrid._getShape, nowrite=1,
-##                                   nodelete=1)
+# PropertiedClasses.set_property (AbstractCurveGrid, 'shape',
+# AbstractCurveGrid._getShape, nowrite=1,
+# nodelete=1)
 
 class DatasetCurveGrid(AbstractCurveGrid):
 
@@ -715,7 +716,7 @@ class DatasetCurveGrid(AbstractCurveGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<DatasetCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<DatasetCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
 class FileCurveGrid(AbstractCurveGrid):
 
@@ -726,7 +727,7 @@ class FileCurveGrid(AbstractCurveGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<FileCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<FileCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
 class TransientCurveGrid(AbstractCurveGrid):
 
@@ -741,7 +742,7 @@ class TransientCurveGrid(AbstractCurveGrid):
         AbstractCurveGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask)
 
     def __repr__(self):
-        return "<TransientCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
+        return "<TransientCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
 
     def toCurveGrid(self, gridid=None):
         if gridid is None:
@@ -758,17 +759,17 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    from coord import TransientAxis2D
+    from .coord import TransientAxis2D
 
     if 'S' in fileobj.variables.keys():
-        if whichType=="grid":
+        if whichType == "grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid=="destination":
+        elif whichGrid == "destination":
             gridCornerLatName = 'yv_b'
             gridCornerLonName = 'xv_b'
             gridMaskName = 'mask_b'
@@ -783,14 +784,14 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
             gridCenterLonName = 'xc_a'
             titleName = 'source_grid'
     else:
-        if whichType=="grid":
+        if whichType == "grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid=="destination":
+        elif whichGrid == "destination":
             gridCornerLatName = 'dst_grid_corner_lat'
             gridCornerLonName = 'dst_grid_corner_lon'
             gridMaskName = 'dst_grid_imask'
@@ -813,17 +814,17 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     nj = dims[0]
     gridshape = (ni, nj)
     boundsshape = (ni, nj, ncorners)
-    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6]=='radian':
+    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6] == 'radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
     else:
         cornerLat = cornerLat.reshape(boundsshape)
         cornerLon = cornerLon.reshape(boundsshape)
 
-    iaxis = TransientVirtualAxis("i",ni)
-    jaxis = TransientVirtualAxis("j",nj)
+    iaxis = TransientVirtualAxis("i", ni)
+    jaxis = TransientVirtualAxis("j", nj)
 
-    if vardict.has_key(gridMaskName):
+    if gridMaskName in vardict:
         # SCRIP convention: 0 for invalid data
         # numpy.ma convention: 1 for invalid data
         mask = 1 - fileobj(gridMaskName)
@@ -831,27 +832,27 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     else:
         mask = None
         
-    if vardict.has_key(gridCenterLatName):
+    if gridCenterLatName in vardict:
         centerLat = fileobj(gridCenterLatName).reshape(gridshape)
         gclat = fileobj[gridCenterLatName]
-        if hasattr(gclat, "units") and gclat.units.lower()=='radians':
+        if hasattr(gclat, "units") and gclat.units.lower() == 'radians':
             centerLat *= (180.0/numpy.pi)
     else:
-        centerLat = cornerLat[:,:,0]
+        centerLat = cornerLat[:,:, 0]
 
-    if vardict.has_key(gridCenterLonName):
+    if gridCenterLonName in vardict:
         centerLon = fileobj(gridCenterLonName).reshape(gridshape)
         gclon = fileobj[gridCenterLonName]
-        if hasattr(gclon, "units") and gclon.units.lower()=='radians':
+        if hasattr(gclon, "units") and gclon.units.lower() == 'radians':
             centerLon *= (180.0/numpy.pi)
     else:
-        centerLon = cornerLon[:,:,0]
+        centerLon = cornerLon[:,:, 0]
 
-    if hasattr(fileobj,titleName):
+    if hasattr(fileobj, titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = gridid.strip().replace(' ','_')
+        gridid = gridid.strip().replace(' ', '_')
     else:
-        gridid="<None>"
+        gridid = "<None>"
 
     lataxis = TransientAxis2D(centerLat, axes=(iaxis, jaxis), bounds=cornerLat,
                               attributes={'units':'degrees_north'}, id="latitude")
diff --git a/Packages/cdms2/Lib/mvBaseWriter.py b/Packages/cdms2/Lib/mvBaseWriter.py
index 5f099c704..db80b843e 100644
--- a/Packages/cdms2/Lib/mvBaseWriter.py
+++ b/Packages/cdms2/Lib/mvBaseWriter.py
@@ -3,13 +3,14 @@
 """
 Abstract class for writing data into file
 
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 
 Alex Pletzer, Tech-X Corp. (2011)
 """
 
-import mvSphereMesh
+from . import mvSphereMesh
+
 
 class BaseWriter:
 
@@ -17,7 +18,7 @@ class BaseWriter:
         """
         Constructor
         @param var a cdms2 variable
-        @param sphereRadius radius of the sphere upon which the grid will 
+        @param sphereRadius radius of the sphere upon which the grid will
                             be projected
         @param maxElev max elevation/depth normalized to the sphere radius
         """
@@ -26,12 +27,12 @@ class BaseWriter:
 
         self.shape = sphere_mesh.shape
 
-        # there is currently a bug in vizSchema which causes 
-        # visit to crash if the leading index is 1, this is 
+        # there is currently a bug in vizSchema which causes
+        # visit to crash if the leading index is 1, this is
         # a workaround the problem
         if self.shape[0] == 1:
-            self.shape = list(sphere_mesh.shape[1:]) + [1,]
-        
+            self.shape = list(sphere_mesh.shape[1:]) + [1, ]
+
         self.mesh = sphere_mesh.getXYZCoords(sphereRadius)
 
     def write(self, filename):
@@ -39,5 +40,5 @@ class BaseWriter:
         Write data to file. This method is overloaded.
         @param filename file name
         """
-        raise NotImplementedError, \
-            'write method not implemented in derived class'
+        raise NotImplementedError(
+            'write method not implemented in derived class')
diff --git a/Packages/cdms2/Lib/mvCdmsRegrid.py b/Packages/cdms2/Lib/mvCdmsRegrid.py
index 5a1f34361..24c169acf 100644
--- a/Packages/cdms2/Lib/mvCdmsRegrid.py
+++ b/Packages/cdms2/Lib/mvCdmsRegrid.py
@@ -9,8 +9,9 @@ import operator
 import re
 import numpy
 import cdms2
-from error import CDMSError
+from .error import CDMSError
 import regrid2
+from functools import reduce
 
 def _areCellsOk(cornerCoords, mask=None):
     """
@@ -43,8 +44,8 @@ def _areCellsOk(cornerCoords, mask=None):
     # compute area elements in Cartesian space
     lat0 = numpy.array(cornerCoords[0][ :-1,  :-1], numpy.float64)
     lat1 = numpy.array(cornerCoords[0][ :-1, 1:  ], numpy.float64)
-    lat2 = numpy.array(cornerCoords[0][1:  , 1:  ], numpy.float64)
-    lat3 = numpy.array(cornerCoords[0][1:  ,  :-1], numpy.float64)
+    lat2 = numpy.array(cornerCoords[0][1:, 1:  ], numpy.float64)
+    lat3 = numpy.array(cornerCoords[0][1:,  :-1], numpy.float64)
 
     the0 = lat0*numpy.pi/180.
     the1 = lat1*numpy.pi/180.
@@ -52,8 +53,8 @@ def _areCellsOk(cornerCoords, mask=None):
     the3 = lat3*numpy.pi/180.
     lam0 = numpy.array(cornerCoords[1][ :-1,  :-1], numpy.float64)*numpy.pi/180.
     lam1 = numpy.array(cornerCoords[1][ :-1, 1:  ], numpy.float64)*numpy.pi/180.
-    lam2 = numpy.array(cornerCoords[1][1:  , 1:  ], numpy.float64)*numpy.pi/180.
-    lam3 = numpy.array(cornerCoords[1][1:  ,  :-1], numpy.float64)*numpy.pi/180.
+    lam2 = numpy.array(cornerCoords[1][1:, 1:  ], numpy.float64)*numpy.pi/180.
+    lam3 = numpy.array(cornerCoords[1][1:,  :-1], numpy.float64)*numpy.pi/180.
     
     x0, y0, z0 = projectToSphere(the0, lam0)
     x1, y1, z1 = projectToSphere(the1, lam1)
@@ -128,7 +129,7 @@ def _areCellsOk(cornerCoords, mask=None):
     if len(inds[0]) > 0:
         # package the result
         badCellIndices = [(inds[0][i], inds[1][i]) for i in range(len(inds[0]))]
-        bcis1 = [(inds[0][i]  , inds[1][i]+1) for i in range(len(inds[0]))]
+        bcis1 = [(inds[0][i], inds[1][i]+1) for i in range(len(inds[0]))]
         bcis2 = [(inds[0][i]+1, inds[1][i]+1) for i in range(len(inds[0]))]
         bcis3 = [(inds[0][i]+1, inds[1][i]  ) for i in range(len(inds[0]))]
         badCellCoords = [[(cornerCoords[0][badCellIndices[i]], cornerCoords[1][badCellIndices[i]]),
@@ -159,10 +160,10 @@ def _buildBounds(bounds):
         bnd[:-1] = bounds[..., 0]
         bnd[ -1] = bounds[ -1, 1]
     elif len(bndShape) > 1:
-        bnd[:-1, :-1] = bounds[  :,  :, 0]
-        bnd[:-1,  -1] = bounds[  :, -1, 1]
+        bnd[:-1, :-1] = bounds[:,:, 0]
+        bnd[:-1,  -1] = bounds[:, -1, 1]
         bnd[ -1,  -1] = bounds[ -1, -1, 2]
-        bnd[ -1, :-1] = bounds[ -1,  :, 3]
+        bnd[ -1, :-1] = bounds[ -1,:, 3]
 
     return bnd
 
@@ -381,8 +382,9 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
 
         # If LibCF handleCut is True, the bounds are needed to extend the grid
         # close the cut at the top
-        if re.search('LibCF', regridTool, re.I) and args.has_key('handleCut'):
-            if args['handleCut']: srcBounds = getBoundList(srcCoords)
+        if re.search('LibCF', regridTool, re.I) and 'handleCut' in args:
+            if args['handleCut']:
+                srcBounds = getBoundList(srcCoords)
 
         srcCoordsArrays = [numpy.array(sc) for sc in srcCoords]
         dstCoordsArrays = [numpy.array(dc) for dc in dstCoords]
@@ -439,7 +441,7 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
                              **args)
 
         # fill in diagnostic data
-        if args.has_key('diag'):
+        if 'diag' in args:
             self.regridObj.fillInDiagnosticData(diag = args['diag'], rootPe = 0)
 
         # construct the axis list for dstVar
diff --git a/Packages/cdms2/Lib/mvSphereMesh.py b/Packages/cdms2/Lib/mvSphereMesh.py
index 7b95922e2..1a5469c9c 100644
--- a/Packages/cdms2/Lib/mvSphereMesh.py
+++ b/Packages/cdms2/Lib/mvSphereMesh.py
@@ -3,22 +3,24 @@
 """
 Class for representing grids on the sphere
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
+from functools import reduce
+
 
 class SphereMesh:
-    
+
     def __init__(self, var, sphereThickness=0.1):
         """
         Constructor
         @param var cdms2 variable
-        @param sphereThickness thickness of the shell in normalized 
+        @param sphereThickness thickness of the shell in normalized
                                sphere radius
         """
-       
+
         self.isRectilinear = True
         self.ndims = 0
         self.elvPositiveDown = False
@@ -40,7 +42,7 @@ class SphereMesh:
                 if getattr(elvs, 'positive') == 'down':
                     self.elvPositiveDown = True
 
-        # determine the dimensionality and 
+        # determine the dimensionality and
         # whether the grid is rectilinear
         for axis in lons, lats, elvs:
             if axis is not None:
@@ -53,25 +55,31 @@ class SphereMesh:
             self.shape = []
             for axis in lons, lats, elvs:
                 if axis is not None:
-                    self.shape.append( len(axis) )
+                    self.shape.append(len(axis))
             self.shape.reverse()
 
         while len(self.shape) < 3:
-            self.shape = [1,] + list(self.shape)
+            self.shape = [1, ] + list(self.shape)
 
         # store lon, lat, elv as a curvilinear grid
         if self.isRectilinear:
             # apply tensore product of axes to generat curvilinear coordinates
             if elvs is not None:
-                self.elvs = numpy.outer(numpy.outer( numpy.ones(self.shape[:0], numpy.float32), elvs),
-                                        numpy.ones(self.shape[0+1:], numpy.float32)).reshape(self.shape)
+                self.elvs = numpy.outer(
+                    numpy.outer(
+                        numpy.ones(self.shape[:0],
+                                   numpy.float32),
+                        elvs),
+                                        numpy.ones(self.shape[0 + 1:], numpy.float32)).reshape(self.shape)
             else:
-                self.elvs = numpy.zeros( self.shape, numpy.float32 )
-            self.lats = numpy.outer(numpy.outer( numpy.ones(self.shape[:1], numpy.float32), lats),
-                                    numpy.ones(self.shape[1+1:], numpy.float32)).reshape(self.shape)
-            self.lons = numpy.outer(numpy.outer( numpy.ones(self.shape[:2], numpy.float32), lons),
-                                    numpy.ones(self.shape[2+1:], numpy.float32)).reshape(self.shape)
-    
+                self.elvs = numpy.zeros(self.shape, numpy.float32)
+            self.lats = numpy.outer(
+                numpy.outer(numpy.ones(self.shape[:1], numpy.float32), lats),
+                                    numpy.ones(self.shape[1 + 1:], numpy.float32)).reshape(self.shape)
+            self.lons = numpy.outer(
+                numpy.outer(numpy.ones(self.shape[:2], numpy.float32), lons),
+                                    numpy.ones(self.shape[2 + 1:], numpy.float32)).reshape(self.shape)
+
         else:
             # already in curvilinear form
             self.lons = lons[:]
@@ -79,10 +87,10 @@ class SphereMesh:
             if elvs is not None:
                 self.elvs = elvs[:]
             else:
-                self.elvs = numpy.zeros( self.shape, numpy.float32 )
+                self.elvs = numpy.zeros(self.shape, numpy.float32)
 
         # reshape as flat arrays
-        sz = reduce(lambda x, y: x*y, self.shape)
+        sz = reduce(lambda x, y: x * y, self.shape)
         self.lons = numpy.reshape(self.lons, (sz,))
         self.lats = numpy.reshape(self.lats, (sz,))
         self.elvs = numpy.reshape(self.elvs, (sz,))
@@ -90,32 +98,33 @@ class SphereMesh:
     def getXYZCoords(self, sphereRadius=1.0):
         """
         Get the curvilinear cartesian coordinates
-        @param sphereRadius radius of sphere 
+        @param sphereRadius radius of sphere
         @return mesh
         """
-        sz = reduce(lambda x, y: x*y, self.shape)
-        rr = sphereRadius*(1.0 + self.elvs)
+        sz = reduce(lambda x, y: x * y, self.shape)
+        rr = sphereRadius * (1.0 + self.elvs)
         diffElv = self.maxElv - self.minElv
-        rr = sphereRadius*numpy.ones(self.lons.shape, numpy.float32 )
+        rr = sphereRadius * numpy.ones(self.lons.shape, numpy.float32)
         if diffElv != 0:
-            coeff = sphereRadius*self.sphereThickness/diffElv
+            coeff = sphereRadius * self.sphereThickness / diffElv
             if self.elvPositiveDown:
                 # depth
-                rr += coeff*(self.maxElv - self.elvs)
+                rr += coeff * (self.maxElv - self.elvs)
             else:
                 # height
-                rr += coeff*(self.elvs - self.minElv)
+                rr += coeff * (self.elvs - self.minElv)
 
-        mesh = numpy.zeros( (sz, 3), numpy.float32 )
-        cosLats = numpy.cos( self.lats*numpy.pi/180. )
-        mesh[:, 0] = rr*numpy.cos(self.lons*numpy.pi/180.)*cosLats
-        mesh[:, 1] = rr*numpy.sin(self.lons*numpy.pi/180.)*cosLats
-        mesh[:, 2] = rr*numpy.sin(self.lats*numpy.pi/180.)
+        mesh = numpy.zeros((sz, 3), numpy.float32)
+        cosLats = numpy.cos(self.lats * numpy.pi / 180.)
+        mesh[:, 0] = rr * numpy.cos(self.lons * numpy.pi / 180.) * cosLats
+        mesh[:, 1] = rr * numpy.sin(self.lons * numpy.pi / 180.) * cosLats
+        mesh[:, 2] = rr * numpy.sin(self.lats * numpy.pi / 180.)
         return mesh
 
-#####################################################################
+#
 # Tests
 
+
 def test2DRect():
     """
     Test data on 2D rectilinear grid
@@ -123,17 +132,18 @@ def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 12, 15
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
-                                    0., nlon, 30./nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
+                                   0., nlon, 30. / nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
-                       sin(5*pi*lons[:]/180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
+    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
+                       sin(5 * pi * lons[:] / 180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect',
                                axes=(lats, lons))
     sphere_mesh = SphereMesh(var, 0.1)
     print sphere_mesh.getXYZCoords()
 
+
 def test2D():
     """
     Test data on 2D curvilinear grid
@@ -143,33 +153,34 @@ def test2D():
     from cdms2.hgrid import TransientCurveGrid
     from numpy import pi, cos, sin
     nlat, nlon = 3, 4
-    dlon, dlat = 60.0/float(nlon - 1), 30.0/float(nlat - 1)
-    lons1D = numpy.array([0.0 + i*dlon for i in range(nlon)])
-    lats1D = numpy.array([0.0 + j*dlat for j in range(nlat)])
+    dlon, dlat = 60.0 / float(nlon - 1), 30.0 / float(nlat - 1)
+    lons1D = numpy.array([0.0 + i * dlon for i in range(nlon)])
+    lats1D = numpy.array([0.0 + j * dlat for j in range(nlat)])
     lons = numpy.outer(numpy.ones((nlat,)), lons1D)
     lats = numpy.outer(lats1D, numpy.ones((nlon,)))
-    data = cos(3*pi*lats/180.0) * sin(5*pi*lons/180.0)
+    data = cos(3 * pi * lats / 180.0) * sin(5 * pi * lons / 180.0)
     # create grid
     iaxis = TransientVirtualAxis("i", nlon)
     jaxis = TransientVirtualAxis("j", nlat)
-    lataxis = TransientAxis2D(lats, 
-                       axes=(jaxis, iaxis), 
-                       attributes={'units': 'degree_north'}, 
-                       id='lats')
-    lonaxis = TransientAxis2D(lons, 
-                       axes=(jaxis, iaxis), 
-                       attributes={'units': 'degree_east'}, 
-                       id='lons')
-    grid =  TransientCurveGrid(lataxis, lonaxis, id='lats_lons')
-
-    var = cdms2.createVariable(data, id='fake_data_2d', 
-                               axes = grid.getAxisList(),
-                               grid = grid,
-                               attributes = {'coordinates': 'lats lons'},
+    lataxis = TransientAxis2D(lats,
+                              axes=(jaxis, iaxis),
+                              attributes={'units': 'degree_north'},
+                              id='lats')
+    lonaxis = TransientAxis2D(lons,
+                              axes=(jaxis, iaxis),
+                              attributes={'units': 'degree_east'},
+                              id='lons')
+    grid = TransientCurveGrid(lataxis, lonaxis, id='lats_lons')
+
+    var = cdms2.createVariable(data, id='fake_data_2d',
+                               axes=grid.getAxisList(),
+                               grid=grid,
+                               attributes={'coordinates': 'lats lons'},
                                )
     sphere_mesh = SphereMesh(var)
     print sphere_mesh.getXYZCoords()
 
+
 def test3DRect():
     """
     Test data on 3d rectilinear grid
@@ -177,29 +188,30 @@ def test3DRect():
     import cdms2
     from numpy import pi, cos, sin, exp
     nelv, nlat, nlon = 3, 4, 5
-    delv, dlon, dlat = 90000./float(nelv-1), \
-        60.0/float(nlon-1), 30.0/float(nlat-1)
-    elvs1D = numpy.array([100000 - i*delv for i in range(nelv)])
-    lons1D = numpy.array([0.0 + i*dlon for i in range(nlon)])
-    lats1D = numpy.array([0.0 + i*dlat for i in range(nlat)])
+    delv, dlon, dlat = 90000. / float(nelv - 1), \
+        60.0 / float(nlon - 1), 30.0 / float(nlat - 1)
+    elvs1D = numpy.array([100000 - i * delv for i in range(nelv)])
+    lons1D = numpy.array([0.0 + i * dlon for i in range(nlon)])
+    lats1D = numpy.array([0.0 + i * dlat for i in range(nlat)])
     # any order should work
-    lons = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
-    lats = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
-    elvs = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
-    data = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
+    lons = numpy.zeros((nlon, nlat, nelv), numpy.float32)
+    lats = numpy.zeros((nlon, nlat, nelv), numpy.float32)
+    elvs = numpy.zeros((nlon, nlat, nelv), numpy.float32)
+    data = numpy.zeros((nlon, nlat, nelv), numpy.float32)
     for i in range(nlon):
         for j in range(nlat):
             for k in range(nelv):
                 elvs[i, j, k] = elvs1D[k]
                 lats[i, j, k] = lats1D[j]
                 lons[i, j, k] = lons1D[i]
-                data[i, j, k] = cos(3*pi*lats[i, j, k]/180.) * \
-                    sin(5*pi*lons[i, j, k]/180.) * exp(-elvs[i, j, k])
-    var = cdms2.createVariable(data, id='fake_data_3d_rect', 
+                data[i, j, k] = cos(3 * pi * lats[i, j, k] / 180.) * \
+                    sin(5 * pi * lons[i, j, k] / 180.) * exp(-elvs[i, j, k])
+    var = cdms2.createVariable(data, id='fake_data_3d_rect',
                                axes=(elvs, lats, lons))
     sphere_mesh = SphereMesh(var)
     print sphereMesh.getXYZCoords()
 
+
 def test3DposDown():
     """
     Test 3d data with elev positive down. Need to work with 1D axes.
@@ -208,31 +220,33 @@ def test3DposDown():
     import cdms2
     import numpy
     nlev, nlat, nlon = 4, 5, 6
-    dlev, dlat, dlon = 5000./float(nlev-1), 180./float(nlat-1), 360./float(nlon-1)
+    dlev, dlat, dlon = 5000. / \
+        float(nlev - 1), 180. / float(nlat - 1), 360. / float(nlon - 1)
     levs1d = numpy.arange(0., 5001., dlev)
-    lats1d = numpy.array([0. - i*dlat for i in range(nlat)])
-    lons1d = numpy.array([0. - i*dlon for i in range(nlon)])
+    lats1d = numpy.array([0. - i * dlat for i in range(nlat)])
+    lons1d = numpy.array([0. - i * dlon for i in range(nlon)])
     data = numpy.zeros((nlev, nlat, nlon), numpy.float32)
 
     for k in range(nlev):
         for j in range(nlat):
             for i in range(nlon):
-                data[k, j, i] = numpy.cos(3*numpy.pi*lats1d[j]/180.) * \
-                                numpy.sin(5*numpy.pi*lons1d[i]/180.) * \
+                data[k, j, i] = numpy.cos(3 * numpy.pi * lats1d[j] / 180.) * \
+                    numpy.sin(5 * numpy.pi * lons1d[i] / 180.) * \
                                 numpy.exp(-levs1d[k])
 
-    a1 = cdms2.axis.TransientAxis(levs1d, id = 'levels', 
-                                  attributes = {'positive':'down'})
-    a2 = cdms2.axis.TransientAxis(lats1d, id = 'latitude')
-    a3 = cdms2.axis.TransientAxis(lons1d, id = 'longitude')
-    var = cdms2.createVariable(data, id = 'pos_down_3d_data',
-                               axes = (a1, a2, a3))
+    a1 = cdms2.axis.TransientAxis(levs1d, id='levels',
+                                  attributes={'positive': 'down'})
+    a2 = cdms2.axis.TransientAxis(lats1d, id='latitude')
+    a3 = cdms2.axis.TransientAxis(lons1d, id='longitude')
+    var = cdms2.createVariable(data, id='pos_down_3d_data',
+                               axes=(a1, a2, a3))
     sphereMesh = SphereMesh(var)
     aa = sphereMesh.getXYZCoords()
     bb = aa.reshape((4, 5, 6, 3))
-    for i in range(nlev): print levs1d[i], bb[i, 0, 0, :]
+    for i in range(nlev):
+        print levs1d[i], bb[i, 0, 0, :]
 
-if __name__ == '__main__': 
+if __name__ == '__main__':
 #    test2DRect()
 #    test2D()
 #    test3DRect()
diff --git a/Packages/cdms2/Lib/mvVTKSGWriter.py b/Packages/cdms2/Lib/mvVTKSGWriter.py
index 353d8b19b..fd2327245 100644
--- a/Packages/cdms2/Lib/mvVTKSGWriter.py
+++ b/Packages/cdms2/Lib/mvVTKSGWriter.py
@@ -3,13 +3,14 @@
 """
 Write data to VTK file format using the structured grid format
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
 import time
-import mvBaseWriter
+from . import mvBaseWriter
+
 
 class VTKSGWriter(mvBaseWriter.BaseWriter):
 
@@ -29,10 +30,10 @@ class VTKSGWriter(mvBaseWriter.BaseWriter):
         npts = self.mesh.shape[0]
         print >> f, 'POINTS %d float' % npts
         for i in range(npts):
-            print >> f, '%f %f %f' % tuple(self.mesh[i,:])
+            print >> f, '%f %f %f' % tuple(self.mesh[i, :])
         n0, n1, n2 = self.shape
         # nodal data
-        print >> f, 'POINT_DATA %d' % (n0*n1*n2)
+        print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
         print >> f, 'SCALARS %s float' % (self.var.id)
         print >> f, 'LOOKUP_TABLE default'
         if n0 > 1:
@@ -43,34 +44,34 @@ class VTKSGWriter(mvBaseWriter.BaseWriter):
         else:
             for j in range(n1):
                 for i in range(n2):
-                    print >> f, '%f' % self.var[j, i]            
+                    print >> f, '%f' % self.var[j, i]
         f.close()
 
 
-######################################################################
+#
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 6, 10
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
-                                    0., nlon, 30./nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
+                                   0., nlon, 30. / nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
-                       sin(5*pi*lons[:]/180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
+    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
+                       sin(5 * pi * lons[:] / 180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect',
                                axes=(lats, lons))
     vw = VTKSGWriter(var)
     vw.write('test2DRect_SG.vtk')
 
+
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VTKSGWriter(var[0,0:10,0:20,0:30])
+    vw = VTKSGWriter(var[0, 0:10, 0:20, 0:30])
     vw.write('test3D_SG.vtk')
 
-if __name__ == '__main__': 
+if __name__ == '__main__':
     test2DRect()
     test3D()
-    
diff --git a/Packages/cdms2/Lib/mvVTKUGWriter.py b/Packages/cdms2/Lib/mvVTKUGWriter.py
index 50dcb1195..e712efaf7 100644
--- a/Packages/cdms2/Lib/mvVTKUGWriter.py
+++ b/Packages/cdms2/Lib/mvVTKUGWriter.py
@@ -3,13 +3,14 @@
 """
 Write data to VTK file format using the unstructured grid format
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
 import time
-import mvBaseWriter
+from . import mvBaseWriter
+
 
 class VTKUGWriter(mvBaseWriter.BaseWriter):
 
@@ -26,9 +27,9 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
         npts = self.mesh.shape[0]
         print >> f, 'POINTS %d float' % npts
         for i in range(npts):
-            print >> f, '%f %f %f' % tuple(self.mesh[i,:])
+            print >> f, '%f %f %f' % tuple(self.mesh[i, :])
         n0, n1, n2 = self.shape
-        ncells = (n0 - 1)*(n1 - 1)*(n2 - 1)
+        ncells = (n0 - 1) * (n1 - 1) * (n2 - 1)
         if ncells != 0:
             # 3d
             ntot = ncells * (8 + 1)
@@ -36,17 +37,17 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
             for k in range(n0 - 1):
                 for j in range(n1 - 1):
                     for i in range(n2 - 1):
-                        index = i + n2*(j + n1*k)
+                        index = i + n2 * (j + n1 * k)
                         print >> f, '8 %d %d %d %d %d %d %d %d' % \
-                            (index, index+1, index+1+n2, index+n2, 
-                             index+n1*n2, index+n1*n2+1, 
-                             index+n1*n2+1+n2, index+n1*n2+n2)
+                            (index, index + 1, index + 1 + n2, index + n2,
+                             index + n1 * n2, index + n1 * n2 + 1,
+                             index + n1 * n2 + 1 + n2, index + n1 * n2 + n2)
             print >> f, 'CELL_TYPES %d' % ncells
             for i in range(ncells):
                 # hexahedron
                 print >> f, 12
             # nodal data
-            print >> f, 'POINT_DATA %d' % (n0*n1*n2)
+            print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
             print >> f, 'SCALARS %s float' % (self.var.id)
             print >> f, 'LOOKUP_TABLE default'
             for k in range(n0):
@@ -55,52 +56,52 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
                         print >> f, '%f' % self.var[k, j, i]
         else:
             # 2d
-            ncells = (n1 - 1)*(n2 - 1)
+            ncells = (n1 - 1) * (n2 - 1)
             ntot = ncells * (4 + 1)
             print >> f, 'CELLS %d %d' % (ncells, ntot)
             for j in range(n1 - 1):
                 for i in range(n2 - 1):
-                    index = i + n2*j
+                    index = i + n2 * j
                     print >> f, '4 %d %d %d %d' % \
-                        (index, index+1, index+1+n2, index+n2)
+                        (index, index + 1, index + 1 + n2, index + n2)
             print >> f, 'CELL_TYPES %d' % ncells
             for i in range(ncells):
                 # quad
                 print >> f, 9
             # nodal data
-            print >> f, 'POINT_DATA %d' % (n0*n1*n2)
+            print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
             print >> f, 'SCALARS %s float' % (self.var.id)
             print >> f, 'LOOKUP_TABLE default'
             for j in range(n1):
                 for i in range(n2):
-                    print >> f, '%f' % self.var[j, i]   
+                    print >> f, '%f' % self.var[j, i]
         f.close()
 
 
-######################################################################
+#
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 6, 10
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
-                                    0., nlon, 30./nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
+                                   0., nlon, 30. / nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
-                       sin(5*pi*lons[:]/180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
+    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
+                       sin(5 * pi * lons[:] / 180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect',
                                axes=(lats, lons))
     vw = VTKUGWriter(var)
     vw.write('test2DRect.vtk')
 
+
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VTKUGWriter(var[0,0:10,0:20,0:30])
+    vw = VTKUGWriter(var[0, 0:10, 0:20, 0:30])
     vw.write('test3D.vtk')
 
-if __name__ == '__main__': 
+if __name__ == '__main__':
     test2DRect()
     test3D()
-    
diff --git a/Packages/cdms2/Lib/mvVsWriter.py b/Packages/cdms2/Lib/mvVsWriter.py
index aed81a79f..72bb31526 100644
--- a/Packages/cdms2/Lib/mvVsWriter.py
+++ b/Packages/cdms2/Lib/mvVsWriter.py
@@ -3,14 +3,15 @@
 """
 Write data to VizSchema compliant file
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful. 
+This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
-import mvBaseWriter
+from . import mvBaseWriter
 import re
 
+
 class VsWriter(mvBaseWriter.BaseWriter):
 
     def write(self, filename):
@@ -21,58 +22,59 @@ class VsWriter(mvBaseWriter.BaseWriter):
         try:
             import tables
         except:
-            raise ImportError, 'You must have pytables installed'
-        
+            raise ImportError('You must have pytables installed')
+
         if filename.find('.vsh5') < 0 and filename.find('.h5') < 0:
-            filename += '.vsh5' # VizSchema hdf5 format
+            filename += '.vsh5'  # VizSchema hdf5 format
 
         # open file
         h5file = tables.openFile(filename, 'w')
         # put mesh
         meshid = 'mesh_' + self.var.id
-        mdata = numpy.reshape(self.mesh, self.shape + [3,])
+        mdata = numpy.reshape(self.mesh, self.shape + [3, ])
         mset = h5file.createArray("/", meshid, mdata)
         mset.attrs.vsType = "mesh"
         mset.attrs.vsKind = "structured"
         mset.attrs.vsIndexOrder = "compMinorC"
         # data
-        dset = h5file.createArray("/", self.var.id, 
+        dset = h5file.createArray("/", self.var.id,
                                   numpy.reshape(self.var, self.shape))
         dset.attrs.vsType = "variable"
         dset.attrs.vsMesh = meshid
         # additional attributes
         for a in self.var.attributes:
             # Skip mpi objects
-            if re.match('mpi',a.lower()):
+            if re.match('mpi', a.lower()):
                 continue
             setattr(dset.attrs, a, getattr(self.var, a))
         # close file
         h5file.close()
 
-######################################################################
+#
+
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 3, 4
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
-                                    0., nlon, 30./nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
+                                   0., nlon, 30. / nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
-                       sin(5*pi*lons[:]/180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
+    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
+                       sin(5 * pi * lons[:] / 180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect',
                                axes=(lats, lons))
     vw = VsWriter(var)
     vw.write('test2DRect.vsh5')
 
+
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VsWriter(var[0,0:10,0:20,0:30])
+    vw = VsWriter(var[0, 0:10, 0:20, 0:30])
     vw.write('test3D.vsh5')
 
-if __name__ == '__main__': 
+if __name__ == '__main__':
     test2DRect()
     test3D()
-    
diff --git a/Packages/cdms2/Lib/restApi.py b/Packages/cdms2/Lib/restApi.py
index 2b5ef1059..0ee17424d 100644
--- a/Packages/cdms2/Lib/restApi.py
+++ b/Packages/cdms2/Lib/restApi.py
@@ -6,116 +6,136 @@ try:
 except:
     pass
 import os
-#import bz2
+# import bz2
+
 
 class esgfConnectionException(Exception):
     pass
+
+
 class esgfDatasetException(Exception):
     pass
+
+
 class esgfFilesException(Exception):
     pass
-    ## def __init__(self,value):
-    ##     self.value=value
-    ## def __repr__(self):
-    ##     msg =  "rest API error: %s" % repr(value)
-    ##     print msg
-    ##     return msg
+    # def __init__(self,value):
+    # self.value=value
+    # def __repr__(self):
+    # msg =  "rest API error: %s" % repr(value)
+    # print msg
+    # return msg
+
 
 class FacetConnection(object):
-    def __init__(self,host='pcmdi9.llnl.gov'):
-        self.rqst="http://%s/esg-search/search?facets=*&type=Dataset&limit=1&latest=true" % host
-        self.rqst_count="http://%s/esg-search/search?facets=*&type=File&limit=0&latest=true" % host
+
+    def __init__(self, host='pcmdi9.llnl.gov'):
+        self.rqst = "http://%s/esg-search/search?facets=*&type=Dataset&limit=1&latest=true" % host
+        self.rqst_count = "http://%s/esg-search/search?facets=*&type=File&limit=0&latest=true" % host
         self.EsgfObjectException = esgfConnectionException
-    def get_xmlelement(self,facet_param=None):
+
+    def get_xmlelement(self, facet_param=None):
         try:
-            rqst=self.rqst
+            rqst = self.rqst
             if facet_param:
-                rqst=rqst+'&%s'%facet_param        
-            #print rqst
+                rqst = rqst + '&%s' % facet_param
+            # print rqst
             url = urllib2.urlopen(rqst)
-        except Exception,msg:
-             raise self.EsgfObjectException(msg)
+        except Exception as msg:
+            raise self.EsgfObjectException(msg)
         r = url.read()
         try:
             e = xml.etree.ElementTree.fromstring(r)
             return e
-        except Exception,err:
-            raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
-    def make_facet_dict(self,xmlelement):
-        facet_dict={}
+        except Exception as err:
+            raise self.EsgfObjectException(
+                "Could not interpret server's results: %s" %
+                err)
+
+    def make_facet_dict(self, xmlelement):
+        facet_dict = {}
         for lst in xmlelement.findall('lst'):
-            if lst.get('name')=='facet_counts':
-                myelement=lst
+            if lst.get('name') == 'facet_counts':
+                myelement = lst
                 for node in myelement.findall('lst'):
-                    if node.get('name')=='facet_fields':
+                    if node.get('name') == 'facet_fields':
                         for child in node.getchildren():
-                            facet_name=child.get('name')
-                            facet_dict[facet_name]=[]
+                            facet_name = child.get('name')
+                            facet_dict[facet_name] = []
                             for grandchild in child.getchildren():
-                                facet_dict[facet_name].append("%s (%s)"%(str(grandchild.get('name')),str(grandchild.text)))
+                                facet_dict[facet_name].append(
+                                    "%s (%s)" %
+                                    (str(grandchild.get('name')), str(grandchild.text)))
         return facet_dict
-    def get_xmlelement_count(self,facet_param=None):
+
+    def get_xmlelement_count(self, facet_param=None):
         try:
-            rqst=self.rqst_count
+            rqst = self.rqst_count
             if facet_param:
-                rqst=rqst+'&%s'%facet_param        
-                #print rqst
+                rqst = rqst + '&%s' % facet_param
+                # print rqst
             url = urllib2.urlopen(rqst)
-        except Exception,msg:
-             raise self.EsgfObjectException(msg)
+        except Exception as msg:
+            raise self.EsgfObjectException(msg)
         r = url.read()
         try:
             e = xml.etree.ElementTree.fromstring(r)
             return e
-        except Exception,err:
-            raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
+        except Exception as err:
+            raise self.EsgfObjectException(
+                "Could not interpret server's results: %s" %
+                err)
 
-    def make_facet_dict_count(self,xmlelement):
-        myelementlist=xmlelement.findall('result')
-        count=None
+    def make_facet_dict_count(self, xmlelement):
+        myelementlist = xmlelement.findall('result')
+        count = None
         if len(myelementlist) > 0:
-            myelement=myelementlist[0]
-            count=int(myelement.get('numFound'))
+            myelement = myelementlist[0]
+            count = int(myelement.get('numFound'))
         return count
 
-validSearchTypes =  ["Dataset","File"]#"ById","ByTimeStamp"]
+validSearchTypes = ["Dataset", "File"]  # "ById","ByTimeStamp"]
+
+
 class esgfConnection(object):
-    def __init__(self,host,port=80,timeout=15,limit=None,offset=0,mapping=None,datasetids=None,fileids=None,restPath=None):
-        self.port=port
-        url=str(host).replace("://","^^^---^^^")
-        sp= url.split("/")
-        host = sp[0].replace("^^^---^^^","://")
+
+    def __init__(self, host, port=80, timeout=15, limit=None, offset=0,
+                 mapping=None, datasetids=None, fileids=None, restPath=None):
+        self.port = port
+        url = str(host).replace("://", "^^^---^^^")
+        sp = url.split("/")
+        host = sp[0].replace("^^^---^^^", "://")
         if restPath is None:
             restPath = "/".join(sp[1:])
-            if len(restPath)==0:
-                self.restPath="/esg-search/search"
+            if len(restPath) == 0:
+                self.restPath = "/esg-search/search"
             else:
-                self.restPath=restPath
+                self.restPath = restPath
         else:
-            self.restPath=restPath
-        self.host=host
-        #self.host="esg-datanode.jpl.nasa.gov"
+            self.restPath = restPath
+        self.host = host
+        # self.host="esg-datanode.jpl.nasa.gov"
         self.defaultSearchType = "Dataset"
         self.EsgfObjectException = esgfConnectionException
-        self.validSearchTypes=validSearchTypes
-        self.validSearchTypes=["Dataset",]
-        all = self._search("facets=*",searchType=None)
-        ## Now figure out the facet fields
+        self.validSearchTypes = validSearchTypes
+        self.validSearchTypes = ["Dataset", ]
+        all = self._search("facets=*", searchType=None)
+        # Now figure out the facet fields
         self.serverOrder = []
         for e in all:
-            if e.tag=="lst" and "name" in e.keys() and e.get("name")=="responseHeader":
-                ## ok found the Header
+            if e.tag == "lst" and "name" in e.keys() and e.get("name") == "responseHeader":
+                # ok found the Header
                 for s in e:
-                    if s.get("name")=="params":
-                        params=s
+                    if s.get("name") == "params":
+                        params = s
                         break
-                self.params={"text":None,"limit":limit,"offset":offset}
-                self.searchableKeys=set(["text","limit","offset"])
+                self.params = {"text": None, "limit": limit, "offset": offset}
+                self.searchableKeys = set(["text", "limit", "offset"])
                 for p in params:
-                    if p.get("name")=="facet.field":
+                    if p.get("name") == "facet.field":
                         for f in p:
                             self.serverOrder.append(f.text)
-                            self.params[f.text]=None
+                            self.params[f.text] = None
                             self.searchableKeys.add(f.text)
 
         self.keys = self.params.keys
@@ -123,61 +143,68 @@ class esgfConnection(object):
         self.values = self.params.values
 
         if datasetids is not None:
-            self.datasetids=genutil.StringConstructor(datasetids)
+            self.datasetids = genutil.StringConstructor(datasetids)
         else:
-            self.datasetids=None
+            self.datasetids = None
         if fileids is not None:
-            self.fileids=genutil.StringConstructor(fileids)
+            self.fileids = genutil.StringConstructor(fileids)
             if datasetids is not None:
-                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
+                self.fileids.template = self.fileids.template.replace(
+                    "%(datasetid)", self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
+            self.fileids = genutil.StringConstructor(
+                "%s.%%(filename)" %
+                self.datasetids.template)
         else:
-            self.fileids=None
-        #self.setMapping(mapping)
-        self.mapping=mapping                    
-            
-    ## def setUserOrder(self,value):
-    ##     self.userOrder=value
-    ## def getUserOrder(self):
-    ##     return self.userOrder
-    ## order=property(getUserOrder,setUserOrder)
-    def __getitem__(self,key):
+            self.fileids = None
+        # self.setMapping(mapping)
+        self.mapping = mapping
+
+    # def setUserOrder(self,value):
+    # self.userOrder=value
+    # def getUserOrder(self):
+    # return self.userOrder
+    # order=property(getUserOrder,setUserOrder)
+    def __getitem__(self, key):
         try:
             val = self.params[key]
         except:
             raise self.EsgfObjectException("Invalid key: %s" % repr(key))
         return val
-    def __setitem__(self,key,value):
+
+    def __setitem__(self, key, value):
         if not key in self.params.keys():
-            raise self.EsgfObjectException("Invalid key: %s, valid keys are: %s" % (repr(key),repr(self.params.keys())))
-        self.params[key]=value
+            raise self.EsgfObjectException(
+                "Invalid key: %s, valid keys are: %s" %
+                (repr(key), repr(self.params.keys())))
+        self.params[key] = value
         return
 
-                            
-    def _search(self,search="",searchType=None,stringType=False):
+    def _search(self, search="", searchType=None, stringType=False):
         if searchType is None:
-            searchType=self.defaultSearchType
+            searchType = self.defaultSearchType
         if not searchType in self.validSearchTypes:
-            raise self.EsgfObjectException("Valid Search types are: %s" % repr(self.validSearchTypes))
-        while search[0]=="&":
-            search=search[1:]
-        rqst = "%s/?type=%s&%s" % (self.restPath,searchType,search)
-        #print "REQUEST: %s%s" % (self.host,rqst)
-        myhost=str(self.host)
-        myport=str(self.port)
-        if myhost.find("://")>-1:
-            urltype=""
+            raise self.EsgfObjectException(
+                "Valid Search types are: %s" %
+                repr(self.validSearchTypes))
+        while search[0] == "&":
+            search = search[1:]
+        rqst = "%s/?type=%s&%s" % (self.restPath, searchType, search)
+        # print "REQUEST: %s%s" % (self.host,rqst)
+        myhost = str(self.host)
+        myport = str(self.port)
+        if myhost.find("://") > -1:
+            urltype = ""
         else:
-            urltype="http://"
+            urltype = "http://"
         try:
-            rqst="%s%s:%s/%s" % (urltype,myhost,myport,rqst)
-            tmp=rqst[6:].replace("//","/")
-            rqst=rqst[:6]+tmp
-            #print "Request:%s"%rqst
+            rqst = "%s%s:%s/%s" % (urltype, myhost, myport, rqst)
+            tmp = rqst[6:].replace("//", "/")
+            rqst = rqst[:6] + tmp
+            # print "Request:%s"%rqst
             url = urllib2.urlopen(rqst)
-        except Exception,msg:
-             raise self.EsgfObjectException(msg)
+        except Exception as msg:
+            raise self.EsgfObjectException(msg)
         r = url.read()
         if stringType:
             return r
@@ -185,539 +212,591 @@ class esgfConnection(object):
             try:
                 e = xml.etree.ElementTree.fromstring(r)
                 return e
-            except Exception,err:
-                raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
+            except Exception as err:
+                raise self.EsgfObjectException(
+                    "Could not interpret server's results: %s" %
+                    err)
 
-    def generateRequest(self,stringType=False,**keys):
+    def generateRequest(self, stringType=False, **keys):
         search = ""
-        params={"limit":self["limit"],"offset":self["offset"]}
+        params = {"limit": self["limit"], "offset": self["offset"]}
 
-        ## for k in self.keys():
-        ##     if self[k] is not None and k in self.searchableKeys and k!="type":
-        ##         params[k]=self[k]
+        # for k in self.keys():
+        # if self[k] is not None and k in self.searchableKeys and k!="type":
+        # params[k]=self[k]
 
-        
         for k in keys.keys():
             if k == "stringType":
-                stringType=keys[k]
+                stringType = keys[k]
                 continue
             elif k == "type":
                 continue
-            ## elif not k in self.searchableKeys:
-            ##     raise self.EsgfObjectException("Invalid key: %s, valid keys are: %s" % (repr(k),repr(self.params.keys())))
+            # elif not k in self.searchableKeys:
+            # raise self.EsgfObjectException("Invalid key: %s, valid keys are:
+            # %s" % (repr(k),repr(self.params.keys())))
             if keys[k] is not None:
-                params[k]=keys[k]
+                params[k] = keys[k]
 
         search = ""
         for k in params.keys():
-            if isinstance(params[k],list):
+            if isinstance(params[k], list):
                 for v in params[k]:
-                    if isinstance(v,str):
-                        v=v.strip()
-                    search+="&%s=%s" % (k,v)
+                    if isinstance(v, str):
+                        v = v.strip()
+                    search += "&%s=%s" % (k, v)
             else:
                 v = params[k]
-                if isinstance(v,str):
-                    v=v.strip()
-                search+="&%s=%s" % (k,v)
+                if isinstance(v, str):
+                    v = v.strip()
+                search += "&%s=%s" % (k, v)
 
-#        search = "&".join(map(lambda x : "%s=%s" % (x[0],x[1]), params.items()))
-        search=search.replace(" ","%20")
+# search = "&".join(map(lambda x : "%s=%s" % (x[0],x[1]), params.items()))
+        search = search.replace(" ", "%20")
         return search
-    
-    def request(self,**keys):
-        numFound=0
+
+    def request(self, **keys):
+        numFound = 0
         cont = True
-        r=[]
+        r = []
         limit = self["limit"]
         while cont:
-            #print "Continuing",limit
-            self["offset"]=numFound
-            if limit is None or limit>1000:
-                self["limit"]=1000
+            # print "Continuing",limit
+            self["offset"] = numFound
+            if limit is None or limit > 1000:
+                self["limit"] = 1000
             search = self.generateRequest(**keys)
-            stringType=keys.get("stringType",False)
-            r.append(self._search(search,stringType=stringType))
-            if numFound==0:
+            stringType = keys.get("stringType", False)
+            r.append(self._search(search, stringType=stringType))
+            if numFound == 0:
                 for s in r[0][:]:
-                    if s.tag=="result":
+                    if s.tag == "result":
                         n = int(s.get("numFound"))
-            numFound+=self["limit"]
+            numFound += self["limit"]
             if limit is None:
-                if numFound>=n:
+                if numFound >= n:
                     cont = False
             else:
-                if numFound>=limit:
-                    cont=False
-            #print "N is:",numFound,n
-        self["limit"]=limit
-        self["offset"]=0
+                if numFound >= limit:
+                    cont = False
+            # print "N is:",numFound,n
+        self["limit"] = limit
+        self["offset"] = 0
         return r
-    
-    def extractTag(self,f):
-        out=None
-        if f.tag=="str":
-            out=f.text
-        elif f.tag=="arr":
-            out=[]
+
+    def extractTag(self, f):
+        out = None
+        if f.tag == "str":
+            out = f.text
+        elif f.tag == "arr":
+            out = []
             for sub in f[:]:
                 out.append(self.extractTag(sub))
-            
-        elif f.tag=="float":
+
+        elif f.tag == "float":
             out = float(f.text)
-        elif f.tag=="int":
+        elif f.tag == "int":
             out = int(f.text)
-        elif f.tag=="date":
-            ## Convert to cdtime?
-            out =f.text
+        elif f.tag == "date":
+            # Convert to cdtime?
+            out = f.text
         else:
-            out=f
-        if isinstance(out,list) and len(out)==1:
-            out=out[0]
+            out = f
+        if isinstance(out, list) and len(out) == 1:
+            out = out[0]
         return out
-        
-    def searchDatasets(self,**keys):
+
+    def searchDatasets(self, **keys):
         resps = self.request(**keys)
-        stringType=keys.get("stringType",False)
+        stringType = keys.get("stringType", False)
         if stringType:
             return resp
         datasets = []
         for resp in resps:
             for r in resp[:]:
-                if r.tag=="result":
-                    ##Ok let's go thru these datasets
+                if r.tag == "result":
+                    # Ok let's go thru these datasets
                     for d in r[:]:
-                        #print "************************************************"
-                        tmpkeys={}
+                        # print
+                        # "************************************************"
+                        tmpkeys = {}
                         for f in d[:]:
                             k = f.get("name")
-                            tmpkeys[k]=self.extractTag(f)
-                        if tmpkeys["type"]=="Dataset":
+                            tmpkeys[k] = self.extractTag(f)
+                        if tmpkeys["type"] == "Dataset":
                             datasetid = tmpkeys["id"]
-                            #print datasetid,self.restPath
-                            #print "KEYS FOR DATASET",keys.keys()
-                            datasets.append(esgfDataset(host=self.host,port=self.port,limit=1000,offset=0,mapping=self.mapping,datasetids=self.datasetids,fileids=self.fileids,keys=tmpkeys,originalKeys=keys,restPath=self.restPath))
+                            # print datasetid,self.restPath
+                            # print "KEYS FOR DATASET",keys.keys()
+                            datasets.append(
+                                esgfDataset(host=self.host,
+                                            port=self.port,
+                                            limit=1000,
+                                            offset=0,
+                                            mapping=self.mapping,
+                                            datasetids=self.datasetids,
+                                            fileids=self.fileids,
+                                            keys=tmpkeys,
+                                            originalKeys=keys,
+                                            restPath=self.restPath))
         return datasets
 
+
 class esgfDataset(esgfConnection):
-    def __init__(self,host=None,port=80,limit=1000,offset=0,mapping=None,datasetids=None,fileids=None,_http=None,restPath=None,keys={},originalKeys={}):
+
+    def __init__(self, host=None, port=80, limit=1000, offset=0, mapping=None,
+                 datasetids=None, fileids=None, _http=None, restPath=None, keys={}, originalKeys={}):
         if host is None:
             raise esgfDatasetException("You need to pass url")
-        self.host=host
-        #self.host="esg-datanode.jpl.nasa.gov"
-        self.port=port
-        self.defaultSearchType="File"
+        self.host = host
+        # self.host="esg-datanode.jpl.nasa.gov"
+        self.port = port
+        self.defaultSearchType = "File"
         if restPath is None:
-            self.restPath="/esg-search/search"
+            self.restPath = "/esg-search/search"
         else:
-            self.restPath=restPath
+            self.restPath = restPath
         if datasetids is None:
             if "dataset_id_template_" in keys:
-                tmp=keys["dataset_id_template_"]
-                if tmp[:5]=="cmip5":
-                    tmp = tmp.replace("valid_institute","institute")
-                    tmp="%(project)"+tmp[5:]
-                self.datasetids = genutil.StringConstructor(tmp.replace(")s",")"))
-            elif "project" in keys and keys["project"]=="cmip5":
-                self.datasetids = genutil.StringConstructor("%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")
+                tmp = keys["dataset_id_template_"]
+                if tmp[:5] == "cmip5":
+                    tmp = tmp.replace("valid_institute", "institute")
+                    tmp = "%(project)" + tmp[5:]
+                self.datasetids = genutil.StringConstructor(
+                    tmp.replace(")s", ")"))
+            elif "project" in keys and keys["project"] == "cmip5":
+                self.datasetids = genutil.StringConstructor(
+                    "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")
             else:
-                self.datasetids=None
-        if isinstance(datasetids,genutil.StringConstructor):
-            self.datasetids=datasetids
-        elif isinstance(datasetids,str):
-            self.datasetids=genutil.StringConstructor(datasetids)
+                self.datasetids = None
+        if isinstance(datasetids, genutil.StringConstructor):
+            self.datasetids = datasetids
+        elif isinstance(datasetids, str):
+            self.datasetids = genutil.StringConstructor(datasetids)
         if fileids is not None:
-            if isinstance(fileids,genutil.StringConstructor):
-                self.fileids=fileids
+            if isinstance(fileids, genutil.StringConstructor):
+                self.fileids = fileids
             else:
-                self.fileids=genutil.StringConstructor(fileids)
+                self.fileids = genutil.StringConstructor(fileids)
             if self.datasetids is not None:
-                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
+                self.fileids.template = self.fileids.template.replace(
+                    "%(datasetid)", self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
+            self.fileids = genutil.StringConstructor(
+                "%s.%%(filename)" %
+                self.datasetids.template)
         else:
-            self.fileids=None
-        self.originalKeys=originalKeys
-        self.validSearchTypes=validSearchTypes
-        self.validSearchTypes=["File",]
+            self.fileids = None
+        self.originalKeys = originalKeys
+        self.validSearchTypes = validSearchTypes
+        self.validSearchTypes = ["File", ]
         self.EsgfObjectException = esgfDatasetException
-        self.params=keys
+        self.params = keys
         self.keys = self.params.keys
         self.items = self.params.items
         self.values = self.params.values
-        #self.id=self["id"]
-        self.params["limit"]=limit
-        self.params["offset"]=offset
-        self.mapping=mapping
-        #print "SEARCHING DS:",originalKeys
-        self.resp=None
+        # self.id=self["id"]
+        self.params["limit"] = limit
+        self.params["offset"] = offset
+        self.mapping = mapping
+        # print "SEARCHING DS:",originalKeys
+        self.resp = None
         self.cacheTime = None
 #        self.search()
 #        self.remap()
-        
-        ## Ok now we need to "map" this according to the user wishes
-
-           
-
-    ## def mappedItems():
-    ##     mapped=[]
-    ##     mapppoint=self.mapped
-    ##     for k in self.mapping.keys():
-    ##         keys=[]
-    ##         level=[k,mappoint.keys()]
-    ##         mappoint
-    def _extractFiles(self,resp,**inKeys):
-        ## We need to stick in there the bit from Luca to fill in the matching key from facet for now it's empty
-        files=[]
-        skipped = ["type","title","timestamp","service","id","score","file_url","service_type"]
+
+        # Ok now we need to "map" this according to the user wishes
+
+    # def mappedItems():
+    # mapped=[]
+    # mapppoint=self.mapped
+    # for k in self.mapping.keys():
+    # keys=[]
+    # level=[k,mappoint.keys()]
+    # mappoint
+    def _extractFiles(self, resp, **inKeys):
+        # We need to stick in there the bit from Luca to fill in the matching
+        # key from facet for now it's empty
+        files = []
+        skipped = [
+            "type",
+            "title",
+            "timestamp",
+            "service",
+            "id",
+            "score",
+            "file_url",
+            "service_type"]
         for r in resp[:]:
-            if r.tag=="result":
+            if r.tag == "result":
                 for d in r[:][:]:
-                    keys={}
+                    keys = {}
                     for f in d[:]:
                         k = f.get("name")
-                        keys[k]=self.extractTag(f)
-                    if keys["type"]=="File":
-                        ## if self["id"]=="obs4MIPs.NASA-JPL.AIRS.mon":
-                        ##     verbose=True
-                        ## else:
-                        ##     verbose=False
-                        ## #verbose=True
-                        ## if verbose: print "OK",keys["variable"],keys["file_id"],self["id"]
-                        ## if verbose: print "FILEIDS:",self.fileids
-                        ## if verbose: print "Fileids:",self.fileids.template
-                        ## if verbose: print "keys:",keys
-                        ## if self.fileids is not None:
-                        ##     try:
-                        ##         if verbose: print "file:",keys["file_id"],self.fileids.template
-                        ##         k2 = self.fileids.reverse(keys["file_id"])
-                        ##         if verbose: print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@",k2
-                        ##         for k in k2.keys():
-                        ##             keys[k]=k2[k]
-                        ##     except:
-                        ##         if verbose: print "Failed:",ids[i].text,self.fileids.template
-                        ##         pass
-                        ## if verbose: print "KEYS FOR FILE:",keys.keys()
-                        ## if verbose: print "INKEYS:",inKeys.keys()
-                        ## matched = True
-                        ## matchWithKeys = {}
-                        ## for k in self.keys():
-                        ##     if k in self.originalKeys.keys():
-                        ##         matchWithKeys[k]=self.originalKeys[k]
-                        ##     else:
-                        ##         matchWithKeys[k]=self[k]
-                        ## for s in skipped:
-                        ##     try:
-                        ##         matchWithKeys.pop(s)
-                        ##     except:
-                        ##         pass
-                        ## for k in inKeys.keys():
-                        ##     matchWithKeys[k]=inKeys[k]
-                        ## if verbose: print "matching:",matchWithKeys.keys()
-                        ## for k in keys.keys():
-                        ##     if k in matchWithKeys.keys():
-                        ##         if verbose: print "Testing:",k,keys[k]
-                        ##         v = matchWithKeys[k]
-                        ##         if isinstance(v,(str,int,float)):
-                        ##             if verbose: print "\tComparing with:",v
-                        ##             if v != keys[k]:
-                        ##                 matched = False
-                        ##                 if verbose: print "\t\tNOPE"
-                        ##                 break
-                        ##         elif isinstance(v,list):
-                        ##             if verbose: print "\tComparing with (and %i more):%s"%(len(v),v[0]),v
-                        ##             if not keys[k] in v:
-                        ##                 matched = False
-                        ##                 if verbose: print "\t\tNOPE"
-                        ##                 break
-                        ##         else:
-                        ##             print "\twould compare %s with type: %s if I knew how to" % (str(v),type(v))
-                        ## if verbose: print keys["file_id"],matched
-                        ## if matched :
-                        ##     for k in self.keys():
-                        ##         if not k in keys.keys():
-                        ##             keys[k]=self[k]
-                        ##     print "KEYS:",keys
-                            files.append(esgfFile(**keys))
+                        keys[k] = self.extractTag(f)
+                    if keys["type"] == "File":
+                        # if self["id"]=="obs4MIPs.NASA-JPL.AIRS.mon":
+                        # verbose=True
+                        # else:
+                        # verbose=False
+                        # verbose=True
+                        # if verbose: print "OK",keys["variable"],keys["file_id"],self["id"]
+                        # if verbose: print "FILEIDS:",self.fileids
+                        # if verbose: print "Fileids:",self.fileids.template
+                        # if verbose: print "keys:",keys
+                        # if self.fileids is not None:
+                        # try:
+                        # if verbose: print "file:",keys["file_id"],self.fileids.template
+                        # k2 = self.fileids.reverse(keys["file_id"])
+                        # if verbose: print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@",k2
+                        # for k in k2.keys():
+                        # keys[k]=k2[k]
+                        # except:
+                        # if verbose: print "Failed:",ids[i].text,self.fileids.template
+                        # pass
+                        # if verbose: print "KEYS FOR FILE:",keys.keys()
+                        # if verbose: print "INKEYS:",inKeys.keys()
+                        # matched = True
+                        # matchWithKeys = {}
+                        # for k in self.keys():
+                        # if k in self.originalKeys.keys():
+                        # matchWithKeys[k]=self.originalKeys[k]
+                        # else:
+                        # matchWithKeys[k]=self[k]
+                        # for s in skipped:
+                        # try:
+                        # matchWithKeys.pop(s)
+                        # except:
+                        # pass
+                        # for k in inKeys.keys():
+                        # matchWithKeys[k]=inKeys[k]
+                        # if verbose: print "matching:",matchWithKeys.keys()
+                        # for k in keys.keys():
+                        # if k in matchWithKeys.keys():
+                        # if verbose: print "Testing:",k,keys[k]
+                        # v = matchWithKeys[k]
+                        # if isinstance(v,(str,int,float)):
+                        # if verbose: print "\tComparing with:",v
+                        # if v != keys[k]:
+                        # matched = False
+                        # if verbose: print "\t\tNOPE"
+                        # break
+                        # elif isinstance(v,list):
+                        # if verbose: print "\tComparing with (and %i more):%s"%(len(v),v[0]),v
+                        # if not keys[k] in v:
+                        # matched = False
+                        # if verbose: print "\t\tNOPE"
+                        # break
+                        # else:
+                        # print "\twould compare %s with type: %s if I knew how to" % (str(v),type(v))
+                        # if verbose: print keys["file_id"],matched
+                        # if matched :
+                        # for k in self.keys():
+                        # if not k in keys.keys():
+                        # keys[k]=self[k]
+                        # print "KEYS:",keys
+                        files.append(esgfFile(**keys))
         return files
-            
+
     def info(self):
         print self
 
     def __str__(self):
         st = "Dataset Information\nid: %s\nKeys:\n" % self.id
         for k in self.keys():
-            st+="\t%s : %s\n" % (k,self[k])
+            st += "\t%s : %s\n" % (k, self[k])
         return st
-        
+
     def clearWebCache(self):
         self.resp = None
 
-    def saveCache(self,target="."):
+    def saveCache(self, target="."):
         if self.resp is None:
             return
         if os.path.isdir(target):
-            target = os.path.join(target,"esgfDatasetsCache.pckl")
+            target = os.path.join(target, "esgfDatasetsCache.pckl")
         if os.path.exists(target):
-            f=open(source)
-            #dict=eval(bz2.decompress(f.read()))
-	    dict=eval(f.read())
+            f = open(source)
+            # dict=eval(bz2.decompress(f.read()))
+            dict = eval(f.read())
             f.close()
         else:
-            dict={}
-        dict[self.id]=[self["timestamp"],xml.etree.ElementTree.tostring(self.resp),self.originalKeys]
-        f=open(target,"w")
-        #f.write(bz2.compress(repr(self.cache)))
+            dict = {}
+        dict[self.id] = [self["timestamp"],
+                         xml.etree.ElementTree.tostring(
+                         self.resp),
+                         self.originalKeys]
+        f = open(target, "w")
+        # f.write(bz2.compress(repr(self.cache)))
         f.write(repr(self.cache))
         f.close()
-        
-    def loadCache(self,source):
-        if isinstance(source,dict):
-            dict=source
+
+    def loadCache(self, source):
+        if isinstance(source, dict):
+            dict = source
         else:
             if os.path.isdir(source):
-                source = os.path.join(source,"esgfDatasetsCache.pckl")
+                source = os.path.join(source, "esgfDatasetsCache.pckl")
             if os.path.exists(source):
-                f=open(source)
-                #dict=eval(bz2.decompress(f.read()))
-                dict=eval(f.read())
+                f = open(source)
+                # dict=eval(bz2.decompress(f.read()))
+                dict = eval(f.read())
                 f.close()
             else:
-                dict={}
-        vals = dict.get(self.id,["",None,{}])
+                dict = {}
+        vals = dict.get(self.id, ["", None, {}])
         if vals[1] is not None:
-            self.cacheTime=vals[0]
-            self.resp=xml.etree.ElementTree.fromstring(vals[0])
-            self.originalKeys=vals[1]
-        
+            self.cacheTime = vals[0]
+            self.resp = xml.etree.ElementTree.fromstring(vals[0])
+            self.originalKeys = vals[1]
+
     def clearOriginalQueryCache(self):
-        self.originalKeys={}
+        self.originalKeys = {}
 
     def clear(self):
         self.clearWebCache()
         self.clearOriginalQueryCache()
-    
-    def search(self,**keys):
-        #search = self.generateRequest(**keys)
-        stringType=keys.get("stringType",False)
+
+    def search(self, **keys):
+        # search = self.generateRequest(**keys)
+        stringType = keys.get("stringType", False)
         keys.update(self.originalKeys)
-        st=""
+        st = ""
         if not "limit" in keys:
-            keys["limit"]=[self["limit"]]
+            keys["limit"] = [self["limit"]]
         if not "offset" in keys:
-            keys["offset"]=[self["offset"]]
+            keys["offset"] = [self["offset"]]
         for k in keys:
-            if k in ["searchString","stringType",]:
+            if k in ["searchString", "stringType", ]:
                 continue
             for v in keys[k]:
-                st+="&%s=%s"%(k,v)    
-            #st+="&%s=%s" % (k,keys[k])
-        #if self.resp is None:
-            #self.resp = self._search("dataset_id=%s%s" % (self["id"],st),stringType=stringType)
-        self.resp = self._search(st,stringType=stringType)
+                st += "&%s=%s" % (k, v)
+            # st+="&%s=%s" % (k,keys[k])
+        # if self.resp is None:
+            # self.resp = self._search("dataset_id=%s%s" %
+            # (self["id"],st),stringType=stringType)
+        self.resp = self._search(st, stringType=stringType)
         if stringType:
             return self.resp
-        return esgfFiles(self._extractFiles(self.resp,**keys),self)
+        return esgfFiles(self._extractFiles(self.resp, **keys), self)
 
 
 class esgfFiles(object):
-    def __init__(self,files,parent,mapping=None,datasetids=None,fileids=None):
-        self._files=files
-        if not isinstance(parent,esgfDataset):
+
+    def __init__(self, files, parent,
+                 mapping=None, datasetids=None, fileids=None):
+        self._files = files
+        if not isinstance(parent, esgfDataset):
             raise esgfFilesException("parent must be an esgfDataset instance")
-        self.parent=parent
+        self.parent = parent
         self.EsgfObjectException = esgfFilesException
         if datasetids is None:
-            datasetids=parent.datasetids
-        if isinstance(datasetids,genutil.StringConstructor):
-            self.datasetids=datasetids
-        elif isinstance(datasetids,str):
-            self.datasetids=genutil.StringConstructor(datasetids)
+            datasetids = parent.datasetids
+        if isinstance(datasetids, genutil.StringConstructor):
+            self.datasetids = datasetids
+        elif isinstance(datasetids, str):
+            self.datasetids = genutil.StringConstructor(datasetids)
         else:
-            self.datasetids=None
+            self.datasetids = None
         if fileids is not None:
-            if isinstance(fileids,genutil.StringConstructor):
-                self.fileids=fileids
+            if isinstance(fileids, genutil.StringConstructor):
+                self.fileids = fileids
             else:
-                self.fileids=genutil.StringConstructor(fileids)
+                self.fileids = genutil.StringConstructor(fileids)
             if self.datasetids is not None:
-                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
+                self.fileids.template = self.fileids.template.replace(
+                    "%(datasetid)", self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
+            self.fileids = genutil.StringConstructor(
+                "%s.%%(filename)" %
+                self.datasetids.template)
         else:
-            self.fileids=parent.fileids
+            self.fileids = parent.fileids
         if mapping is None:
-            mapping=parent.mapping
+            mapping = parent.mapping
         self.setMapping(mapping)
         self.remap()
-        self.projects_dict = {"CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)" }
-        
-    def __getitem__(self,item):
-        if isinstance(item,int):
+        self.projects_dict = {
+            "CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)"}
+
+    def __getitem__(self, item):
+        if isinstance(item, int):
             return self._files[item]
-        elif isinstance(item,str):
+        elif isinstance(item, str):
             for f in self._files:
-                if f["id"]==item:
+                if f["id"] == item:
                     return f
-        elif isinstance(item,slice):
+        elif isinstance(item, slice):
             return self._files[item]
         else:
             raise esgfFilesException("unknown item type: %s" % type(item))
-    def __setitem__(self,item):
+
+    def __setitem__(self, item):
         raise esgfFilesException("You cannot set items")
+
     def __len__(self):
         return len(self._files)
+
     def getMapping(self):
-        if isinstance(self.mapping,genutil.StringConstructor):
+        if isinstance(self.mapping, genutil.StringConstructor):
             return self.mapping.template
         else:
             return self.mapping
+
     def getMappingKeys(self):
-        if isinstance(self.mapping,genutil.StringConstructor):
+        if isinstance(self.mapping, genutil.StringConstructor):
             return self.mapping.keys()
         else:
             return None
-    def setMapping(self,mapping):
+
+    def setMapping(self, mapping):
         if mapping is None:
-            self.mapping=""
+            self.mapping = ""
             if self.datasetids is not None:
-                self.mapping=self.datasetids
+                self.mapping = self.datasetids
             else:
                 for k in self.parent.keys():
-                    if not k in ["limit","offset","text"]:
-                        self.mapping+="%%(%s)" % k
+                    if not k in ["limit", "offset", "text"]:
+                        self.mapping += "%%(%s)" % k
         else:
-            self.mapping=mapping
-        #print "Stage 1 mapping:",self.mapping
-        if not isinstance(self.mapping,genutil.StringConstructor):
+            self.mapping = mapping
+        # print "Stage 1 mapping:",self.mapping
+        if not isinstance(self.mapping, genutil.StringConstructor):
             if self.datasetids is not None:
-                self.mapping=self.mapping.replace("%(datasetid)",self.datasetids.template)
+                self.mapping = self.mapping.replace(
+                    "%(datasetid)",
+                    self.datasetids.template)
             self.mapping = genutil.StringConstructor(self.mapping)
-        #print "Stage 2:",self.mapping.template
-
-        ## vk = self.parent.keys()
-        ## for k in self.mapping.keys():
-        ##     ok = False
-        ##     if self.datasetids is not None:
-        ##         vk += self.datasetids.keys()
-        ##         if k in self.datasetids.keys():
-        ##             ok = True
-        ##     if self.fileids is not None:
-        ##         vk+=self.fileids.keys()
-        ##         if k in self.fileids.keys():
-        ##             ok = True
-        ##     if k in self.parent.keys():
-        ##         ok=True
-        ##     ## Ok second to last hope... Matching to datasetids
-        ##     if isinstance(self.datasetids,genutil.StringConstructor) and ok is False:
-        ##         try:
-        ##             mapid = self.datasetids.reverse(self.parent.id)
-        ##             vk+=mapid.keys()
-        ##             if k in mapid.keys():
-        ##                 ok = True
-                        
-        ##         except:
-        ##             #print "Couldn't map: %s to %s" % (self.parent.id,self.datasetids.template)
-        ##             pass
-        ##     if ok is False:
-        ##         vk = set(vk)
-        ##         raise self.EsgfObjectException("Invalid mapping key: %s, valid keys are: %s" % (k,sorted(vk)))
-            
-    def remap(self,mapping=None,verbose=False):
+        # print "Stage 2:",self.mapping.template
+
+        # vk = self.parent.keys()
+        # for k in self.mapping.keys():
+        # ok = False
+        # if self.datasetids is not None:
+        # vk += self.datasetids.keys()
+        # if k in self.datasetids.keys():
+        # ok = True
+        # if self.fileids is not None:
+        # vk+=self.fileids.keys()
+        # if k in self.fileids.keys():
+        # ok = True
+        # if k in self.parent.keys():
+        # ok=True
+        # Ok second to last hope... Matching to datasetids
+        # if isinstance(self.datasetids,genutil.StringConstructor) and ok is False:
+        # try:
+        # mapid = self.datasetids.reverse(self.parent.id)
+        # vk+=mapid.keys()
+        # if k in mapid.keys():
+        # ok = True
+
+        # except:
+        # print "Couldn't map: %s to %s" % (self.parent.id,self.datasetids.template)
+        # pass
+        # if ok is False:
+        # vk = set(vk)
+        # raise self.EsgfObjectException("Invalid mapping key: %s, valid keys
+        # are: %s" % (k,sorted(vk)))
+
+    def remap(self, mapping=None, verbose=False):
         if mapping is None:
             thismapping = self.mapping
         else:
             thismapping = mapping
-        self.mapped={}
+        self.mapped = {}
         savedmapping = thismapping
-        #print "Remap:",self.mapping.template
-        ## if verbose: print "################ REMAPPING: %s: %s #############################" % (thismapping.template,repr(thismapping.keys()))
+        # print "Remap:",self.mapping.template
+        # if verbose: print "################ REMAPPING: %s: %s
+        # #############################" %
+        # (thismapping.template,repr(thismapping.keys()))
         for f in self._files:
-            mappoint=self.mapped
-            tabs=""
-            nok=0
+            mappoint = self.mapped
+            tabs = ""
+            nok = 0
             nlevels = len(thismapping.keys())
-            #print "This mapping",thismapping.template,nlevels
+            # print "This mapping",thismapping.template,nlevels
             if nlevels == 0:
-                ## ok no mapping, let's try to figure this one out
+                # ok no mapping, let's try to figure this one out
                 if 'dataset_id_template_' in f.keys():
-                    #print "We are good to go"
-                    ds = f['dataset_id_template_'].replace(")s",")")
+                    # print "We are good to go"
+                    ds = f['dataset_id_template_'].replace(")s", ")")
                     thismapping = genutil.StringConstructor(ds)
             for k in thismapping.keys():
-                ## if verbose: print tabs,"keys:",k,"File keys:",f.keys()
-                ## if k == self.mapping.keys()[0]:
-                ##     f.matched.keys()
-                ## else:
-                ##     ## if verbose: print
+                # if verbose: print tabs,"keys:",k,"File keys:",f.keys()
+                # if k == self.mapping.keys()[0]:
+                # f.matched.keys()
+                # else:
+                # if verbose: print
                 if k in f.keys():
-                    ## if verbose: print tabs,k,f[k]
-                    nok+=1
+                    # if verbose: print tabs,k,f[k]
+                    nok += 1
                     cont = f[k]
-                    if not isinstance(cont,(str,int,float)):
+                    if not isinstance(cont, (str, int, float)):
                         break
                     if not cont in mappoint.keys():
-                        mappoint[cont]={}
+                        mappoint[cont] = {}
                 elif k in self.parent.keys():
-                    ## if verbose: print tabs,k,f[k]
-                    nok+=1
+                    # if verbose: print tabs,k,f[k]
+                    nok += 1
                     cont = self[k]
                     if not cont in mappoint.keys():
-                        mappoint[cont]={}
-                elif isinstance(self.fileids,genutil.StringConstructor):
+                        mappoint[cont] = {}
+                elif isinstance(self.fileids, genutil.StringConstructor):
                     try:
                         mapid = self.fileids.reverse(self.parent.id)
-                        ## if verbose:
-                            ## print "MAPID:",k,mapid
+                        # if verbose:
+                        # print "MAPID:",k,mapid
                         if k in mapid.keys():
-                            ## if verbose: print tabs,k,mapid[k]
-                            nok+=1
+                        # if verbose: print tabs,k,mapid[k]
+                            nok += 1
                             cont = mapid[k]
                             if not cont in mappoint.keys():
-                                mappoint[cont]={}
+                                mappoint[cont] = {}
                     except:
                         break
                 else:
                     break
-                mappoint=mappoint[cont]
-                tabs+="\t"
-            tmp = mappoint.get("files",[])
+                mappoint = mappoint[cont]
+                tabs += "\t"
+            tmp = mappoint.get("files", [])
             tmp.append(f)
             mappoint["files"] = tmp
             thismapping = savedmapping
-        ## if verbose: print "################ REMAPPED: %s #############################" % (thismapping,)
+        # if verbose: print "################ REMAPPED: %s
+        # #############################" % (thismapping,)
+
 
 class esgfFile(object):
-    def __init__(self,**keys):
-        self.__items__=keys
+
+    def __init__(self, **keys):
+        self.__items__ = keys
         self.keys = self.__items__.keys
         self.items = self.__items__.items
         self.values = self.__items__.values
 
-        services=[]
-        #print "Keys:",self.keys()
-        #print self["url"]
-        S=self["url"]
-        if isinstance(S,str):
-            S=[S,]
+        services = []
+        # print "Keys:",self.keys()
+        # print self["url"]
+        S = self["url"]
+        if isinstance(S, str):
+            S = [S, ]
         for service in S:
-            url,s2,s1 = service.split("|")
-            setattr(self,s1,url)
+            url, s2, s1 = service.split("|")
+            setattr(self, s1, url)
             services.append(s1)
-        self.services=services
-        self.id=self["id"]
+        self.services = services
+        self.id = self["id"]
 
-    def __getitem__(self,key):
+    def __getitem__(self, key):
         val = self.__items__[key]
         return val
-    
-    def __setitem__(self,key,value):
-        self.__items__[key]=value
+
+    def __setitem__(self, key, value):
+        self.__items__[key] = value
         return
 
     def __str__(self):
-        st = "File Information\nid: %s\nParent Dataset: %s" % (self["id"],self["dataset_id"])
-        st+="Matched keys: %s\n" % (repr(self.__items__))
+        st = "File Information\nid: %s\nParent Dataset: %s" % (
+            self["id"], self["dataset_id"])
+        st += "Matched keys: %s\n" % (repr(self.__items__))
         for service in self.services:
-            st+="service: %s @ %s\n" % (service,getattr(self,service))
+            st += "service: %s @ %s\n" % (service, getattr(self, service))
         return st[:-1]
diff --git a/Packages/cdms2/Lib/selectors.py b/Packages/cdms2/Lib/selectors.py
index f22443976..2a35705d1 100644
--- a/Packages/cdms2/Lib/selectors.py
+++ b/Packages/cdms2/Lib/selectors.py
@@ -1,42 +1,48 @@
 
 """Classes to support easy selection of climate data"""
 import cdtime
-from axis import axisMatches
-from error import CDMSError
-from grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
+from .axis import axisMatches
+from .error import CDMSError
+from .grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
 
 _debug = 0
+
+
 class SelectorError (CDMSError):
+
     "The exception type for errors in the selector packages"
-    def __init__ (self, args):
+
+    def __init__(self, args):
         self.args = args
 
+
 class Selector:
+
     """Selector class"""
-    def __init__ (self, *args, **kwargs):
+
+    def __init__(self, *args, **kwargs):
         """Positional args are SelectorComponents or Selectors
          Keyword args and their value are passed to kwselect to create
-         selectors. All the selector components are put into the 
+         selectors. All the selector components are put into the
          components list of this Selector, along with all the components
          of any Selector arguments.
         """
         self.__components = []
         self.refine(*args, **kwargs)
         for a in args:
-            if isinstance(a,SelectorComponent):
+            if isinstance(a, SelectorComponent):
                 try:
-                    self.__str__=a.__str__
+                    self.__str__ = a.__str__
                 except:
                     pass
 
-
-    def components (self):
+    def components(self):
         "List of selector components, each an instance of SelectorComponent."
         return self.__components[:]
-    
-    def refine (self, *args, **kwargs):        
-        """Add components to this selector using the same syntax as the 
-         constructor. Ignores non-keyword arguments that are not 
+
+    def refine(self, *args, **kwargs):
+        """Add components to this selector using the same syntax as the
+         constructor. Ignores non-keyword arguments that are not
          SelectorComponents or Selectors.
         """
         for a in args:
@@ -46,27 +52,27 @@ class Selector:
                 self.__components.append(a)
             elif isinstance(a, Selector):
                 for x in a.components():
-                    self.refine(x)  
+                    self.refine(x)
             else:
                 self.refine(positionalComponent(a))
 
         for k, v in kwargs.items():
-               self.refine(kwselect(k, v))
- 
-    def __repr__ (self):
+            self.refine(kwselect(k, v))
+
+    def __repr__(self):
         s = 'Selector('
         sep = ''
         for c in self.__components:
             s = s + sep + repr(c)
             sep = ', '
         return s + ')'
-    
-    def __and__ (self, other):
-        """Implements the & operator, which returns 
+
+    def __and__(self, other):
+        """Implements the & operator, which returns
            self.clone() refined by other
         """
         if not isinstance(other, Selector):
-            raise SelectorError, 'Cannot combine Selector with non-selector'
+            raise SelectorError('Cannot combine Selector with non-selector')
         s = self.clone()
         s.refine(other)
         return s
@@ -75,7 +81,7 @@ class Selector:
         "Makes a copy of this Selector."
         return Selector(*self.__components)
 
-    def __call__ (self, *args, **kwargs):
+    def __call__(self, *args, **kwargs):
         """Return a new selector consisting of this one refined by the given arguments.
            Arguments are as per the constructor or method refine.
         """
@@ -92,31 +98,31 @@ class Selector:
            Options modify the result of the selection. The options and
            their default values are:
                -- raw = 0: if 1, return an numpy.ma only
-               -- squeeze = 0:  If 1, eliminate any dimensions of length 1 
+               -- squeeze = 0:  If 1, eliminate any dimensions of length 1
                                 from the result.
-               -- order = None: If given, is a string such as 
+               -- order = None: If given, is a string such as
                                 variable.getOrder()
                                 returns. Result is permuted into this order.
-               -- grid = None:  If given, is a grid object; result is 
+               -- grid = None:  If given, is a grid object; result is
                                 regridded onto this grid.
            Each of the components contributes arguments suitable for the
-           subRegion call in class cdms.AbstractVariable. If a component 
+           subRegion call in class cdms.AbstractVariable. If a component
            is to modify the same axis as a previous component, its application
            is postponed. subRegion is called and the result is then fed
-           to each of the components' "post" method. This returns a 
+           to each of the components' "post" method. This returns a
            possibly modified result, which becomes the input to the next
-           component's post method. 
+           component's post method.
 
            This procedure is repeated until no more components are postponed.
            Then the options are applied to the result in the order
-           listed above, and the result is returned. 
+           listed above, and the result is returned.
 
            Execption SelectorError is thrown if the selection is
            impossible.
 
            The result is a TransientVariable and id(variable) <> id(result)
            even if there are no components.
-        """  
+        """
         d = kwargs.copy()
         raw = d.setdefault('raw', 0)
         squeeze = d.setdefault('squeeze', 0)
@@ -125,13 +131,14 @@ class Selector:
         del d['squeeze'], d['grid'], d['order'], d['raw']
         # make the selector
         s = self(*args, **d)
-        return s.unmodified_select(variable, 
-                                   squeeze=squeeze, 
-                                   order=order, 
-                                   grid=grid, 
+        return s.unmodified_select(variable,
+                                   squeeze=squeeze,
+                                   order=order,
+                                   grid=grid,
                                    raw=raw)
 
-    def unmodified_select(self, variable, raw=0, squeeze=0, order=None, grid=None):
+    def unmodified_select(
+            self, variable, raw=0, squeeze=0, order=None, grid=None):
         "Select using this selector without further modification"
         result = variable
         components = self.components()
@@ -143,71 +150,80 @@ class Selector:
             newcomponents = []
             specs = defaultRegion()
             for c in components:
-                if c.specifyGrid(variable, vargrid, specs): # specs is modified
+                if c.specifyGrid(variable, vargrid, specs):  # specs is modified
                     newcomponents.append(c)
             components = newcomponents
             if specs != defaultRegion():
                 vgindices = result.getGridIndices()
                 mask, indexspecs = vargrid.intersect(specs)
                 result = result(**indexspecs)
-                result = result.setMaskFromGridMask(mask, vgindices) # Propagate the grid mask to result
+                result = result.setMaskFromGridMask(
+                    mask,
+                    vgindices)  # Propagate the grid mask to result
 
         # Now select on non-coordinate components.
         while(components):
             axes = result.getAxisList()
-            if _debug: print "Axes:", axes
-            specifications = [':']*len(axes)
-            confined_by = [None]*len(axes)
-            aux = {} # for extra state 
+            if _debug:
+                print "Axes:", axes
+            specifications = [':'] * len(axes)
+            confined_by = [None] * len(axes)
+            aux = {}  # for extra state
             overflow = []
-            if _debug: print "Component list:", components
+            if _debug:
+                print "Component list:", components
             for c in components:
                 if c.specify(result, axes, specifications, confined_by, aux):
-                    if _debug: print 'Defer ' + repr(c)
+                    if _debug:
+                        print 'Defer ' + repr(c)
                     overflow.append(c)
                 elif _debug:
                     print "After applying", c, ":"
-                    print  "specifications=", specifications
+                    print "specifications=", specifications
                     print "Confined_by", confined_by
                     print "aux", aux
                     print "-----------------"
-            if _debug: 
+            if _debug:
                 print 'About to call subRegion:', specifications
             fetched = result.subRegion(*specifications)
             axismap = range(len(axes))
             for c in components:
-                if c in overflow: continue
-                fetched = c.post(fetched, result, axes, specifications, 
+                if c in overflow:
+                    continue
+                fetched = c.post(fetched, result, axes, specifications,
                                  confined_by, aux, axismap)
             if not len(overflow) < len(components):
-                raise SelectorError, \
-                  'Internal selector error, infinite loop detected.'
+                raise SelectorError(
+                    'Internal selector error, infinite loop detected.')
             components = overflow
             result = fetched
 
         if squeeze != 0 or \
            order is not None or \
            grid is not None or \
-           raw !=0 or \
-           result is variable: 
+           raw != 0 or \
+           result is variable:
      # result is variable when there are no components, for example.
             return result.subRegion(squeeze=squeeze, order=order, grid=grid,
                                     raw=raw)
         else:
             return result
-             
+
+
 class SelectorComponent:
+
     """Base class representing selection for a given set of axes.
     """
-    def specify (self, slab, axes, specifications, confined_by, aux):
-        """Refine the specification suitable for slab.subRegion 
+
+    def specify(self, slab, axes, specifications, confined_by, aux):
+        """Refine the specification suitable for slab.subRegion
            Set confined_by to yourself for each axis you confine.
-           If you would normally confine an axis to ':', don't, 
+           If you would normally confine an axis to ':', don't,
            unless you *require* that axis not be confined by other
            components.
-           
+
            Returning:
-              Return 1 if you wish to skip your turn. You'll be called 
+              Return 1 if you wish to skip your turn. You'll be called
               later with the results of the other selectors.
 
               Raise a SelectorError exception if you can't do your job.
@@ -218,10 +234,10 @@ class SelectorComponent:
               Store any info you want in dictionary aux[id(self)]
         """
         return 0
-    
+
     def specifyGrid(self, var, grid, specs):
         """Refine the specification suitable for grid.intersect().
-        
+
         'var' is a variable.
         'grid' is the grid associated with the variable.
         'specs' is the result set of specifications, of the form defined in the grid module.
@@ -235,47 +251,53 @@ class SelectorComponent:
         """
         return 1
 
-    def post (self, fetched, slab, axes, specifications, confined_by, aux, axismap):
+    def post(self, fetched, slab, axes,
+             specifications, confined_by, aux, axismap):
         """Post-process fetched if desired, return new value.
-           Arguments slab, axes, specifications, confined_by, and aux are 
-           pre-subRegion call. 
-           
-           axismap gives the indices of fetched's axes in axes and should 
+           Arguments slab, axes, specifications, confined_by, and aux are
+           pre-subRegion call.
+
+           axismap gives the indices of fetched's axes in axes and should
            be modified as required by this method.  Set axismap[i] to None to
            indicate that you have eliminated an axis.
         """
         return fetched
-    
+
+
 class axisComponent (SelectorComponent):
+
     "A SelectorComponent that confines exactly one axis or coordinate dimension (e.g. latitude)."
-    def __init__ (self, id, spec):
+
+    def __init__(self, id, spec):
         self.id = id
         self.spec = spec
 
-    def specify (self, slab, axes, specifications, confined_by, aux):
+    def specify(self, slab, axes, specifications, confined_by, aux):
         "Do specification for axis self.id; skip if axis not present."
         for i in range(len(axes)):
             if axisMatches(axes[i], self.id):
-               if confined_by[i] is None:
-                   specifications[i] = self.spec
-                   confined_by[i] = self
-                   return 0
-               else:
-                   return 1
+                if confined_by[i] is None:
+                    specifications[i] = self.spec
+                    confined_by[i] = self
+                    return 0
+                else:
+                    return 1
         return 0
 
-    def __repr__ (self):
-        s = repr(self.__class__)+'("'+self.id+'", '+repr(self.spec) + ')'
+    def __repr__(self):
+        s = repr(self.__class__) + \
+            '("' + self.id + '", ' + repr(self.spec) + ')'
         return s
-        
+
+
 class coordinateComponent(axisComponent):
+
     "A SelectorComponent that confines exactly one coordinate dimension (e.g., latitude)"
 
     def __init__(self, id, spec):
         axisComponent.__init__(self, id, spec)
 
     def specifyGrid(self, var, grid, specs):
-
         "Determine if this component confines the grid, and if so set the specs and return 1"
         if grid.hasCoordType(self.id):
             setRegionSpecs(grid, self.spec, self.id, specs)
@@ -283,42 +305,52 @@ class coordinateComponent(axisComponent):
         else:
             return 1
 
+
 class requiredComponent (SelectorComponent):
+
     """Checks to see that a specific id axis must be present."""
-    def __init__ (self, ids):
+
+    def __init__(self, ids):
         """Checks to see that a specific axis or axes must be present.
            Initialize with a sequence of ids.
         """
         self.ids = ids
-        
-    def specify (self, slab, axes, specifications, confined_by, aux):
+
+    def specify(self, slab, axes, specifications, confined_by, aux):
         """Doesn't confine but checks for existance."""
         for id in self.ids:
             for i in range(len(axes)):
                 if axisMatches(axes[i], id):
                     break
             else:
-                raise SelectorError, \
-                      'Required axis %s not present in this variable.' % (id,)
+                raise SelectorError(
+                    'Required axis %s not present in this variable.' %
+                    (id,))
         return 0
 
+
 class indexComponent (axisComponent):
-    """An axisComponent that confines exactly one axis by 
-       specifying indices. 
+
+    """An axisComponent that confines exactly one axis by
+       specifying indices.
     """
-    def __init__ (self, id, start=None, stop=None, stride=None):
+
+    def __init__(self, id, start=None, stop=None, stride=None):
         self.id = id
-        self.spec = slice(start,stop, stride)
+        self.spec = slice(start, stop, stride)
+
 
 class indexedComponent (SelectorComponent):
-    """A SelectorComponent that confines exactly one axis  
-       whose index is given. 
+
+    """A SelectorComponent that confines exactly one axis
+       whose index is given.
     """
-    def __init__ (self, index, value):
+
+    def __init__(self, index, value):
         self.index = index
         self.spec = value
 
-    def specify (self, slab, axes, specifications, confined_by, aux):
+    def specify(self, slab, axes, specifications, confined_by, aux):
         "Do the specification for axis whose index is self.index."
         i = self.index
         if confined_by[i] is None:
@@ -328,13 +360,16 @@ class indexedComponent (SelectorComponent):
         else:
             return 1
 
+
 class positionalComponent (SelectorComponent):
+
     """A SelectorComponent that confines the next axis available.
     """
-    def __init__ (self, v):
+
+    def __init__(self, v):
         self.v = v
 
-    def specify (self, slab, axes, specifications, confined_by, aux):
+    def specify(self, slab, axes, specifications, confined_by, aux):
         "Find the next unconfined axis and confine it."
         n = 0
         for i in range(len(axes)):
@@ -344,51 +379,59 @@ class positionalComponent (SelectorComponent):
                 aux[id(self)] = i
                 return 0
         else:
-            raise SelectorError, \
-            'positional component cannot be applied, insufficent rank:' +\
-             repr(self)
+            raise SelectorError('positional component cannot be applied, insufficent rank:' +
+                                repr(self))
 
-    def __repr__ (self):
+    def __repr__(self):
         s = repr(self.__class__) + '(' + repr(self.v) + ')'
         return s
-    
-def longitude (*value):
+
+
+def longitude(*value):
     "Creates default selector corresponding to keyword longitude = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':': return all
+    if value == ':':
+        return all
     return Selector(coordinateComponent(LongitudeType, value))
-    
-def latitude (*value):
+
+
+def latitude(*value):
     "Creates default selector corresponding to keyword latitude = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':': return all
+    if value == ':':
+        return all
     return Selector(coordinateComponent(LatitudeType, value))
-    
-def time (*value):
+
+
+def time(*value):
     """Creates a default selector corresponding to keyword time=value
     """
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':': return all
+    if value == ':':
+        return all
     return Selector(coordinateComponent(TimeType, value))
 
-def level (*value):
+
+def level(*value):
     "Creates default selector corresponding to keyword level = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':': return all
+    if value == ':':
+        return all
     return Selector(coordinateComponent(VerticalType, value))
 
+
 def required(values):
     """Creates a selector that requires a certain axis to be present."""
     if values is None:
@@ -397,12 +440,13 @@ def required(values):
         values = (values,)
     return Selector(requiredComponent(values))
 
-def kwselect (k, value):
+
+def kwselect(k, value):
     """Turn a keyword/value pair into a SelectorComponent
        The words latitude, longitude, time, and level are
        used to pass value to the routine of the same name.
        Otherise, axis is called using k as the id.
-    """ 
+    """
     kx = k[0:3].lower()
     if kx == 'lat':
         return latitude(value)
@@ -416,17 +460,25 @@ def kwselect (k, value):
         return required(value)
     else:
         return Selector(requiredComponent((k,)), axisComponent(k, value))
-    
+
 all = Selector()
 
-def timeslice (start=None,stop=None,stride=None):
+
+def timeslice(start=None, stop=None, stride=None):
     return Selector(indexComponent('time', start, stop, stride))
-def latitudeslice (start=None,stop=None,stride=None):
+
+
+def latitudeslice(start=None, stop=None, stride=None):
     return Selector(indexComponent('latitude', start, stop, stride))
-def longitudeslice (start=None,stop=None,stride=None):
+
+
+def longitudeslice(start=None, stop=None, stride=None):
     return Selector(indexComponent('longitude', start, stop, stride))
-def levelslice (start=None,stop=None,stride=None):
+
+
+def levelslice(start=None, stop=None, stride=None):
     return Selector(indexComponent('level', start, stop, stride))
-def setslice (id, start=None,stop=None,stride=None):
-    return Selector(indexComponent(id, start, stop, stride))
 
+
+def setslice(id, start=None, stop=None, stride=None):
+    return Selector(indexComponent(id, start, stop, stride))
diff --git a/Packages/cdms2/Lib/slabinterface.py b/Packages/cdms2/Lib/slabinterface.py
index 642aea131..f12738ee7 100644
--- a/Packages/cdms2/Lib/slabinterface.py
+++ b/Packages/cdms2/Lib/slabinterface.py
@@ -1,64 +1,67 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
-## Further modified to be pure new numpy June 24th 2008
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Further modified to be pure new numpy June 24th 2008
 
 "Read part of the old cu slab interface implemented over CDMS"
 import numpy
 import sys
-from error import CDMSError
-from axis import std_axis_attributes
+from .error import CDMSError
+from .axis import std_axis_attributes
 import cdms2 as cdms
 
+
 class Slab:
+
     """Slab is the cu api
        This is an abstract class to inherit in AbstractVariable
        About axes:
        weight and bounds attributes always set but may be None
-       if bounds are None, getdimattribute returns result of querying the 
+       if bounds are None, getdimattribute returns result of querying the
        axis.
     """
     std_slab_atts = ['filename',
-                'missing_value',
-                'comments',
-                'grid_name',
-                'grid_type',
-                'time_statistic',
-                'long_name',
-                'units']
-    def __init__ (self):
+                     'missing_value',
+                     'comments',
+                     'grid_name',
+                     'grid_type',
+                     'time_statistic',
+                     'long_name',
+                     'units']
+
+    def __init__(self):
         pass
 
-    def getattribute (self, name):
+    def getattribute(self, name):
         "Get the attribute name."
-        defaultdict = {'filename':'N/A',
-                       'comments':'',
-                       'grid_name':'N/A',
-                       'grid_type':'N/A',
-                       'time_statistic':'',
-                       'long_name':'',
-                       'units':''}
+        defaultdict = {'filename': 'N/A',
+                       'comments': '',
+                       'grid_name': 'N/A',
+                       'grid_type': 'N/A',
+                       'time_statistic': '',
+                       'long_name': '',
+                       'units': ''}
         result = None
-        if name in defaultdict.keys() and not hasattr(self,name):
-            if name=='filename':
-                if (not hasattr(self,'parent')) or self.parent is None:
+        if name in defaultdict.keys() and not hasattr(self, name):
+            if name == 'filename':
+                if (not hasattr(self, 'parent')) or self.parent is None:
                     result = ''
                 else:
                     result = self.parent.id
 
-            elif name=='grid_name':
+            elif name == 'grid_name':
                 grid = self.getGrid()
                 if grid is None:
                     result = defaultdict[name]
                 else:
                     result = grid.id
-            elif name=='grid_type':
+            elif name == 'grid_type':
                 grid = self.getGrid()
                 if grid is None:
                     result = defaultdict[name]
-                elif isinstance(grid,cdms.grid.TransientRectGrid):
+                elif isinstance(grid, cdms.grid.TransientRectGrid):
                     result = grid.getType()
-                elif isinstance(grid,cdms.gengrid.AbstractGenericGrid):
+                elif isinstance(grid, cdms.gengrid.AbstractGenericGrid):
                     result = 'GenericGrid'
-                elif isinstance(grid,cdms.hgrid.AbstractCurveGrid):
+                elif isinstance(grid, cdms.hgrid.AbstractCurveGrid):
                     result = 'CurvilinearGrid'
             else:
                 result = defaultdict[name]
@@ -67,23 +70,23 @@ class Slab:
                 result = getattr(self, name)
             except AttributeError:
                 result = None
-            
+
         return result
 
-    def setattribute (self, name, value):
+    def setattribute(self, name, value):
         "Set the attribute name to value."
         setattr(self, name, value)
 
-    def createattribute (self, name, value):
+    def createattribute(self, name, value):
         "Create an attribute and set its name to value."
         setattr(self, name, value)
 
-    def deleteattribute (self, name):
+    def deleteattribute(self, name):
         "Delete the named attribute."
         if hasattr(self, name):
             delattr(self, name)
 
-    def listattributes (self):
+    def listattributes(self):
         "Return a list of attribute names."
         return self.attributes.keys()
 
@@ -92,10 +95,11 @@ class Slab:
         a = self.getAxis(dim)
         result = []
         for x in std_axis_attributes + a.attributes.keys():
-            if not x in result: result.append(x)
+            if not x in result:
+                result.append(x)
         return result
 
-    def getdimattribute (self, dim, field):
+    def getdimattribute(self, dim, field):
         """Get the attribute named field from the dim'th dimension.
          For bounds returns the old cu one-dimensional version.
         """
@@ -121,38 +125,37 @@ class Slab:
                 return g.getWeights()[0]
             elif d.isLongitude():
                 return g.getWeights()[1]
-            else: #should be impossible, actually
+            else:  # should be impossible, actually
                 return numpy.ones(len(d))
 
         elif field == "bounds":
             b = d.getBounds()
             n = b.shape[0]
-            result = numpy.zeros(n+1, b.dtype.char)
-            result[0:-1] = b[:,0]
-            result[-1] = b[-1,1]
+            result = numpy.zeros(n + 1, b.dtype.char)
+            result[0:-1] = b[:, 0]
+            result[-1] = b[-1, 1]
             return result
-        elif d.attributes.has_key(field):
+        elif field in d.attributes:
             return d.attributes[field]
         else:
-            raise CDMSError, "No %s attribute on given axis." % field
-            
-          
+            raise CDMSError("No %s attribute on given axis." % field)
+
     def showdim(self):
-        "Show the dimension attributes and values." 
+        "Show the dimension attributes and values."
         result = []
         for nd in range(self.rank()):
-            result.append('** Dimension ' + str(nd+1) + ' **')
+            result.append('** Dimension ' + str(nd + 1) + ' **')
             result = result + self.getAxis(nd).listall(1)
         print '\n'.join(result)
 
     def listdimnames(self):
         "Return a list of the names of the dimensions."
-        result=[]
+        result = []
         for nd in range(self.rank()):
             result.append(self.getdimattribute(nd, 'name'))
         return result
 
-    def listall (self, all=None):
+    def listall(self, all=None):
         "Get list of info about this slab."
         vname = self.id
         result = []
@@ -162,8 +165,10 @@ class Slab:
         for x in Slab.std_slab_atts:
             result.append(x + ": " + str(self.getattribute(x)))
         for x in self.attributes.keys():
-            if x in Slab.std_slab_atts: continue
-            if x == 'name': continue
+            if x in Slab.std_slab_atts:
+                continue
+            if x == 'name':
+                continue
             result.append(x + ": " + str(self.attributes[x]))
         g = self.getGrid()
         if g is None:
@@ -171,29 +176,30 @@ class Slab:
         else:
             result = result + g.listall(all)
         for nd in range(self.rank()):
-            result.append('** Dimension ' + str(nd+1) + ' **')
+            result.append('** Dimension ' + str(nd + 1) + ' **')
             result = result + self.getAxis(nd).listall(all)
         result.append('*** End of description for %s ***' % vname)
         return result
 
     def info(self, flag=None, device=None):
         "Write info about slab; include dimension values and weights if flag"
-        if device is None: device = sys.stdout
+        if device is None:
+            device = sys.stdout
         device.write('\n'.join(self.listall(all=flag)))
         device.write("\n")
 
-def cdms_bounds2cu_bounds (b):
+
+def cdms_bounds2cu_bounds(b):
     "Bounds are  len(v) by 2 in cdms but len(v)+1 in cu"
-    cub = numpy.ma.zeros(len(b)+1, numpy.float32)
+    cub = numpy.ma.zeros(len(b) + 1, numpy.float32)
     b1 = b.astype(numpy.float32)
-    if len(b)>1:
-        if (b[0,0]<b[0,1]) == (b[0,0]<b[-1,0]):
-            cub[0] = b[0,0]
-            cub[1:] = b[:,1]
+    if len(b) > 1:
+        if (b[0, 0] < b[0, 1]) == (b[0, 0] < b[-1, 0]):
+            cub[0] = b[0, 0]
+            cub[1:] = b[:, 1]
         else:
-            cub[0] = b[0,1]
-            cub[1:] = b[:,0]
+            cub[0] = b[0, 1]
+            cub[1:] = b[:, 0]
     else:
         cub[:] = b[0]
-    return numpy.array( cub )
-    
+    return numpy.array(cub)
diff --git a/Packages/cdms2/Lib/sliceut.py b/Packages/cdms2/Lib/sliceut.py
index e69e83262..4b8b55821 100644
--- a/Packages/cdms2/Lib/sliceut.py
+++ b/Packages/cdms2/Lib/sliceut.py
@@ -6,41 +6,41 @@
 # Returns a slice, or None if the intersection is empty.
 
 
-def sliceIntersect(aSlice,interval):
-    p0,p1 = interval
+def sliceIntersect(aSlice, interval):
+    p0, p1 = interval
     i = aSlice.start
     j = aSlice.stop
     k = aSlice.step
     if k is None:
-        k=1
+        k = 1
 
     # If the slice has a negative step, generate the
     # equivalent slice with positive step
-    irev=0
-    if k<0:
+    irev = 0
+    if k < 0:
         k = -k
-        pk = ((j-i+k)/k)*k+i
-        j = i+1
+        pk = ((j - i + k) / k) * k + i
+        j = i + 1
         i = pk
         irev = 1
 
     # Calculate the intersection for an increasing slice
-    px = ((p0-i+k-1)/k)*k+i
-    a = max(px,i)
-    b = min(j,p1)
-    if a<b:
-        if k==1:
-            newSlice = slice(a,b)
+    px = ((p0 - i + k - 1) / k) * k + i
+    a = max(px, i)
+    b = min(j, p1)
+    if a < b:
+        if k == 1:
+            newSlice = slice(a, b)
         else:
-            newSlice = slice(a,b,k)
+            newSlice = slice(a, b, k)
     else:
         newSlice = None
 
     # Reverse the slice if necessary
-    if irev==1 and newSlice is not None:
-        px = -((-b+a+k)/k*k-a)
-        newSlice = slice(px,a-1,-k)
-    
+    if irev == 1 and newSlice is not None:
+        px = -((-b + a + k) / k * k - a)
+        newSlice = slice(px, a - 1, -k)
+
     return newSlice
 
 # Intersect a slice with a partition. The partition is a list of
@@ -50,21 +50,23 @@ def sliceIntersect(aSlice,interval):
 # in the same order as in the partition. If the intersection is empty,
 # the result is an empty list.
 
-def slicePartition(aSlice,partition):
+
+def slicePartition(aSlice, partition):
     result = []
     for interval in partition:
-        intslice = sliceIntersect(aSlice,interval)
+        intslice = sliceIntersect(aSlice, interval)
         if intslice is not None:
-            result.append((interval,intslice))
+            result.append((interval, intslice))
     return result
 
+
 def lenSlice(aSlice):
     "Return the number of values associated with a slice"
 
     step = aSlice.step
     if step is None:
         step = 1
-    if step>0:
+    if step > 0:
         start = aSlice.start
         stop = aSlice.stop
     else:
@@ -72,121 +74,131 @@ def lenSlice(aSlice):
         stop = aSlice.start
         step = -step
 
-    return ((stop-start-1)/step + 1)
+    return ((stop - start - 1) / step + 1)
+
 
-def reverseSlice(s,size):
+def reverseSlice(s, size):
     """For 'reversed' slices (slices with negative stride),
     return an equivalent slice with positive step. For positive
     strides, just return the slice unchanged.
     """
-    if s.step>0 or s.step is None:
+    if s.step > 0 or s.step is None:
         return s
 
     i = s.start
     j = s.stop
     k = s.step
     if i is None:
-        i=size-1
-    elif i<0:
-        i = i%size
+        i = size - 1
+    elif i < 0:
+        i = i % size
     if j is None:
-        j=-1
-    elif -size-1<j<0:
-        j = j%size
-    
-    if i<-size or j<-size-1:
-        raise 'Invalid slice',`s`
+        j = -1
+    elif -size - 1 < j < 0:
+        j = j % size
+
+    if i < -size or j < -size - 1:
+        raise 'Invalid slice', repr(s)
 
     k = -k
-    pk = ((j-i+k)/k)*k+i
-    j = i+1
-    i = pk%size
+    pk = ((j - i + k) / k) * k + i
+    j = i + 1
+    i = pk % size
 
-##     if j==size:
-##         j = None
+# if j==size:
+# j = None
 
-    return slice(i,j,k)
+    return slice(i, j, k)
 
 
-def splitSlice(s,size):
+def splitSlice(s, size):
     """For a 'wraparound' slice, return two equivalent slices
     within the range 0..size-1."""
-    i,j,k = s.start,s.stop,s.step
-    if k>0:
-        wrap1 = slice(i,size,k)
-        wrap2 = slice((i-size)%k, j-size, k)
+    i, j, k = s.start, s.stop, s.step
+    if k > 0:
+        wrap1 = slice(i, size, k)
+        wrap2 = slice((i - size) % k, j - size, k)
     else:
-        wrap1 = slice(i-size, None, k)
-        wrap2 = slice(size+(i-size)%k, j, k)
-    return (wrap1,wrap2)
+        wrap1 = slice(i - size, None, k)
+        wrap2 = slice(size + (i - size) % k, j, k)
+    return (wrap1, wrap2)
 
 
-def splitSliceExt(s,size):
+def splitSliceExt(s, size):
     """
     mf 20010330 --
     For a 'wraparound' slice, return N equivalent slices
     within the range 0...(N*size) N = anything"""
-    i,j,k = s.start,s.stop,s.step
+    i, j, k = s.start, s.stop, s.step
 
     # slice of form [i:] sets j to large int
-    if j>2000000000L:
+    if j > 2000000000:
         j = size
 
-    _debug=0
-    if(_debug): print "SSSS0: ",i,j,k
-
-    wrap=[]
-
-    if k>0:
-
-        iter=0
-        if(_debug): print "SSSS1: iter ",iter,j,size,k
-        while(j>0):
-            if(_debug): print " "
-            if(_debug): print "SSSS2: iter",iter,j,size,k
-            jo=size
-            if(iter>0): jo=size+1
-            if(_debug): print "SSSS3: iter",iter,j,jo
-            if(j<size): jo=j
-            if(_debug): print "SSSS4: iter",iter,j,jo
-            wrap.append(slice(i,jo,k))
-            j=j-size
-            i=0
-            iter=iter+1
-            
+    _debug = 0
+    if(_debug):
+        print "SSSS0: ", i, j, k
+
+    wrap = []
+
+    if k > 0:
+
+        iter = 0
+        if(_debug):
+            print "SSSS1: iter ", iter, j, size, k
+        while(j > 0):
+            if(_debug):
+                print " "
+            if(_debug):
+                print "SSSS2: iter", iter, j, size, k
+            jo = size
+            if(iter > 0):
+                jo = size + 1
+            if(_debug):
+                print "SSSS3: iter", iter, j, jo
+            if(j < size):
+                jo = j
+            if(_debug):
+                print "SSSS4: iter", iter, j, jo
+            wrap.append(slice(i, jo, k))
+            j = j - size
+            i = 0
+            iter = iter + 1
+
     else:
 
-        wraprev=[]
-        iter=0
-        if(_debug): print "SSSS1 neg: iter ",iter,i,j,size,k
-        while(i>=0):
-            if(_debug): print " "
-            if(_debug): print "SSSS2 neg: iter",iter,i,j,size,k
-            io=size-1
-            if(_debug): print "SSSS3 neg: iter",iter,i,j,io
-            if(i<size): io=i
-            if(_debug): print "SSSS4 neg: iter",iter,i,j,io
-            
-            # mf 20010405 python does not return nothing for slice(size-1,size-1,-1); force it
-            if( not ( io==size-1 and j==size-1 ) ):
-                wraprev.append(slice(io,j,k))
-            
-            i=i-size
-            j=None
-            iter=iter+1
+        wraprev = []
+        iter = 0
+        if(_debug):
+            print "SSSS1 neg: iter ", iter, i, j, size, k
+        while(i >= 0):
+            if(_debug):
+                print " "
+            if(_debug):
+                print "SSSS2 neg: iter", iter, i, j, size, k
+            io = size - 1
+            if(_debug):
+                print "SSSS3 neg: iter", iter, i, j, io
+            if(i < size):
+                io = i
+            if(_debug):
+                print "SSSS4 neg: iter", iter, i, j, io
+
+            # mf 20010405 python does not return nothing for
+            # slice(size-1,size-1,-1); force it
+            if(not (io == size - 1 and j == size - 1)):
+                wraprev.append(slice(io, j, k))
+
+            i = i - size
+            j = None
+            iter = iter + 1
         #
         # reverse
         #
-        for k in range(0,len(wraprev)):
-            kk=len(wraprev)-k-1
+        for k in range(0, len(wraprev)):
+            kk = len(wraprev) - k - 1
             wrap.append(wraprev[kk])
-            if(_debug): print "SSSS5 neg: ",kk,wraprev[kk]
+            if(_debug):
+                print "SSSS5 neg: ", kk, wraprev[kk]
 
     return (wrap)
-
-
-
-
-
-
-
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 7d9db8b9d..9ebcca9c6 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -1,4 +1,4 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
 # Further modified to be pure new numpy June 24th 2008
 
 """
@@ -8,18 +8,18 @@ Contains also the write part of the old cu interface.
 """
 import json
 import re
-import typeconv
+from . import typeconv
 import numpy
 from numpy import sctype2char
-from error import CDMSError
-from avariable import AbstractVariable
+from .error import CDMSError
+from .avariable import AbstractVariable
 
-from axis import createAxis, AbstractAxis
-from grid import createRectGrid, AbstractRectGrid
-from hgrid import AbstractCurveGrid
-from gengrid import AbstractGenericGrid
+from .axis import createAxis, AbstractAxis
+from .grid import createRectGrid, AbstractRectGrid
+from .hgrid import AbstractCurveGrid
+from .gengrid import AbstractGenericGrid
 
-# dist array support 
+# dist array support
 HAVE_MPI = False
 try:
     from mpi4py import MPI
@@ -30,38 +30,43 @@ except:
 
 id_builtin = id  # built_in gets clobbered by keyword
 
+
 def fromJSON(jsn):
     """ Recreate a TV from a dumped jsn object"""
     D = json.loads(jsn)
 
-    ## First recreates the axes
-    axes=[]
+    # First recreates the axes
+    axes = []
     for a in D["_axes"]:
-        ax = createAxis(numpy.array(a["_values"],dtype=a["_dtype"]),id=a["id"])
-        for k,v in a.iteritems():
-            if not k in ["_values","id","_dtype"]:
-                setattr(ax,k,v)
+        ax = createAxis(
+            numpy.array(a["_values"],
+                        dtype=a["_dtype"]),
+            id=a["id"])
+        for k, v in a.iteritems():
+            if not k in ["_values", "id", "_dtype"]:
+                setattr(ax, k, v)
         axes.append(ax)
-    ## Now prep the variable
-    V= createVariable(D["_values"],id=D["id"],typecode=D["_dtype"])
+    # Now prep the variable
+    V = createVariable(D["_values"], id=D["id"], typecode=D["_dtype"])
     V.setAxisList(axes)
-    for k,v in D.iteritems():
-        if not k in ["id","_values","_axes","_grid","_fill_value","_dtype",]:
-            setattr(V,k,v)
+    for k, v in D.iteritems():
+        if not k in ["id", "_values", "_axes", "_grid", "_fill_value", "_dtype", ]:
+            setattr(V, k, v)
     V.set_fill_value(D["_fill_value"])
     return V
 
 
-class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
+class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
+
     "An in-memory variable."
     variable_count = 0
     _missing = numpy.ma.MaskedArray.fill_value
 
-
     def _getShape(self):
         return self._data.shape
 
-    shape = property(_getShape,None)
+    shape = property(_getShape, None)
+
     def iscontiguous(self):
         return self.flags['CONTIGUOUS']
 
@@ -70,79 +75,84 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         out = numpy.ascontiguousarray(d)
         m = numpy.ma.getmask(self)
         if m is not numpy.ma.nomask:
-            m= numpy.ascontiguousarray(m)
-        out = TransientVariable(out,mask=m,attributes=self.attributes)
+            m = numpy.ascontiguousarray(m)
+        out = TransientVariable(out, mask=m, attributes=self.attributes)
         out.setAxisList(self.getAxisList())
         out.setMissing(self.getMissing())
         return out
-    
+
     ascontiguous = ascontiguousarray
-    
+
     def asma(self):
-        return numpy.ma.array(self._data,mask=self._mask)
-    
-    def _update_from(self,obj):
-        numpy.ma.MaskedArray._update_from(self,obj)
-        if not hasattr(self,'___cdms_internals__'):
-            self.__dict__['___cdms_internals__']=['__cdms_internals__','___cdms_internals__','_node_','parent','attributes','shape']
-        if not hasattr(self,'attributes'):
-            self.attributes={}
-        self._grid_ = getattr(obj,'_grid_',None)
+        return numpy.ma.array(self._data, mask=self._mask)
+
+    def _update_from(self, obj):
+        numpy.ma.MaskedArray._update_from(self, obj)
+        if not hasattr(self, '___cdms_internals__'):
+            self.__dict__[
+                '___cdms_internals__'] = [
+                    '__cdms_internals__',
+                    '___cdms_internals__',
+                    '_node_',
+                    'parent',
+                    'attributes',
+                    'shape']
+        if not hasattr(self, 'attributes'):
+            self.attributes = {}
+        self._grid_ = getattr(obj, '_grid_', None)
         try:
-            for nm,val in obj.__dict__.items():
-                if nm[0]=='_':
-##                     print nm
+            for nm, val in obj.__dict__.items():
+                if nm[0] == '_':
+# print nm
                     pass
-##                     self.__dict__[nm]=val
+# self.__dict__[nm]=val
                 else:
-                    setattr(self,nm,val)
-        except Exception,err:
+                    setattr(self, nm, val)
+        except Exception as err:
             pass
-        id = getattr(self,'id',None)
+        id = getattr(self, 'id', None)
         if id is None:
-            TransientVariable.variable_count+=1
-            id = 'variable_'+str(TransientVariable.variable_count)
-            self.id=id
-        self.name = getattr(obj,'name',id)
-        if not hasattr(self,'__domain'):
+            TransientVariable.variable_count += 1
+            id = 'variable_' + str(TransientVariable.variable_count)
+            self.id = id
+        self.name = getattr(obj, 'name', id)
+        if not hasattr(self, '__domain'):
             self.initDomain(axes=None)
 
-
-    def __array_finalize__(self,obj):
-        numpy.ma.MaskedArray.__array_finalize__(self,obj)
+    def __array_finalize__(self, obj):
+        numpy.ma.MaskedArray.__array_finalize__(self, obj)
         return
 
-    
-    __mul__    = AbstractVariable.__mul__
-    __rmul__   = AbstractVariable.__rmul__
-    __imul__   = AbstractVariable.__imul__
-    __abs__    = AbstractVariable.__abs__
-    __neg__    = AbstractVariable.__neg__
-    __add__    = AbstractVariable.__add__
-    __iadd__   = AbstractVariable.__iadd__
-    __radd__   = AbstractVariable.__radd__
+    __mul__ = AbstractVariable.__mul__
+    __rmul__ = AbstractVariable.__rmul__
+    __imul__ = AbstractVariable.__imul__
+    __abs__ = AbstractVariable.__abs__
+    __neg__ = AbstractVariable.__neg__
+    __add__ = AbstractVariable.__add__
+    __iadd__ = AbstractVariable.__iadd__
+    __radd__ = AbstractVariable.__radd__
     __lshift__ = AbstractVariable.__lshift__
     __rshift__ = AbstractVariable.__rshift__
-    __sub__    = AbstractVariable.__sub__
-    __rsub__   = AbstractVariable.__rsub__    
-    __isub__   = AbstractVariable.__isub__    
-    __div__    = AbstractVariable.__div__
-    __rdiv__   = AbstractVariable.__rdiv__
-    __idiv__   = AbstractVariable.__idiv__
-    __pow__   = AbstractVariable.__pow__
-    __eq__    = AbstractVariable.__eq__
-    __ne__    = AbstractVariable.__ne__
-    __lt__    = AbstractVariable.__lt__
-    __le__    = AbstractVariable.__le__
-    __gt__    = AbstractVariable.__gt__
-    __ge__    = AbstractVariable.__ge__
-    __sqrt__    = AbstractVariable.__sqrt__
-
-    def __init__(self,data, typecode=None, copy=1, savespace=0, 
+    __sub__ = AbstractVariable.__sub__
+    __rsub__ = AbstractVariable.__rsub__
+    __isub__ = AbstractVariable.__isub__
+    __div__ = AbstractVariable.__div__
+    __rdiv__ = AbstractVariable.__rdiv__
+    __idiv__ = AbstractVariable.__idiv__
+    __pow__ = AbstractVariable.__pow__
+    __eq__ = AbstractVariable.__eq__
+    __ne__ = AbstractVariable.__ne__
+    __lt__ = AbstractVariable.__lt__
+    __le__ = AbstractVariable.__le__
+    __gt__ = AbstractVariable.__gt__
+    __ge__ = AbstractVariable.__ge__
+    __sqrt__ = AbstractVariable.__sqrt__
+
+    def __init__(self, data, typecode=None, copy=1, savespace=0,
                  mask=numpy.ma.nomask, fill_value=None, grid=None,
-                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None, 
-                 order=False, no_update_from=False,**kargs):
-        """createVariable (self, data, typecode=None, copy=0, savespace=0, 
+                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None,
+                 order=False, no_update_from=False, **kargs):
+        """createVariable (self, data, typecode=None, copy=0, savespace=0,
                  mask=None, fill_value=None, grid=None,
                  axes=None, attributes=None, id=None, dtype=None, order=False)
            The savespace argument is ignored, for backward compatibility only.
@@ -150,69 +160,70 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         try:
             if data.fill_value is not None:
                 self._setmissing(data.fill_value)
-                fill_value=data.fill_value
+                fill_value = data.fill_value
         except:
             pass
         if fill_value is not None:
-           self._setmissing(fill_value)
-        if attributes is not None  and "_FillValue" in attributes.keys():
-           self._setmissing(attributes["_FillValue"])
+            self._setmissing(fill_value)
+        if attributes is not None and "_FillValue" in attributes.keys():
+            self._setmissing(attributes["_FillValue"])
 
-        # tile index, None means no mosaic 
+        # tile index, None means no mosaic
         self.tileIndex = None
-        
+
         # Compatibility: assuming old typecode, map to new
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = sctype2char(dtype)
         if isinstance(data, tuple):
             data = list(data)
-        
-        AbstractVariable.__init__ (self)
+
+        AbstractVariable.__init__(self)
 
         if isinstance(data, AbstractVariable):
             if not isinstance(data, TransientVariable):
                 data = data.subSlice()
-##             if attributes is None: attributes = data.attributes
+# if attributes is None: attributes = data.attributes
             if axes is None and not no_update_from:
                 axes = map(lambda x: x[0], data.getDomain())
             if grid is None and not no_update_from:
                 grid = data.getGrid()
                 if (grid is not None) and (not isinstance(grid, AbstractRectGrid)) \
-                                      and (not grid.checkAxes(axes)):
-                    grid = grid.reconcile(axes) # Make sure grid and axes are consistent
-
-        ncopy = (copy!=0)
+                        and (not grid.checkAxes(axes)):
+                    grid = grid.reconcile(
+                        axes)  # Make sure grid and axes are consistent
 
+        ncopy = (copy != 0)
 
         # Initialize the geometry
         if grid is not None:
-            copyaxes=0                  # Otherwise grid axes won't match domain.
+            copyaxes = 0                  # Otherwise grid axes won't match domain.
         if axes is not None:
-            self.initDomain(axes, copyaxes=copyaxes)           # Note: clobbers the grid, so set the grid after.
+            self.initDomain(axes, copyaxes=copyaxes)
+                            # Note: clobbers the grid, so set the grid after.
         if grid is not None:
             self.setGrid(grid)
- 
+
         # Initialize the attributes
         if attributes is not None:
             for key, value in attributes.items():
-                if (key in ['shape','flat','imaginary','real'] or key[0]=='_') and key not in ['_FillValue']:
-                    raise CDMSError, 'Bad key in attributes: ' + key
+                if (key in ['shape', 'flat', 'imaginary', 'real'] or key[0] == '_') and key not in ['_FillValue']:
+                    raise CDMSError('Bad key in attributes: ' + key)
                 elif key == 'missing_value':
-                    #ignore if fill value given explicitly
+                    # ignore if fill value given explicitly
                     if fill_value is None:
                         self._setmissing(value)
-                elif key not in ['scale_factor','add_offset']:
+                elif key not in ['scale_factor', 'add_offset']:
                     setattr(self, key, value)
 
         # Sync up missing_value attribute and the fill value.
         self.missing_value = self._getmissing()
         self._FillValue = self._getmissing()
         if id is not None:
-            if not isinstance(id,(unicode,str)): 
-                raise CDMSError, 'id must be a string'
+            if not isinstance(id, (unicode, str)):
+                raise CDMSError('id must be a string')
             self.id = id
-        elif hasattr(data,'id'):
+        elif hasattr(data, 'id'):
             self.id = data.id
 
         if self.id is None:
@@ -227,22 +238,21 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         self.__mpiWindows = {}
         self.__mpiType = self.__getMPIType()
 
-
     def _getmissing(self):
         return self._missing
 
-    def _setmissing(self,value):
-        self._missing=numpy.array(value).astype(self.dtype)
+    def _setmissing(self, value):
+        self._missing = numpy.array(value).astype(self.dtype)
 
-    missing       = property(_getmissing,_setmissing)
-    fill_value    = property(_getmissing,_setmissing)
-    _FillValue    = property(_getmissing,_setmissing)
-    missing_value = property(_getmissing,_setmissing)
+    missing = property(_getmissing, _setmissing)
+    fill_value = property(_getmissing, _setmissing)
+    _FillValue = property(_getmissing, _setmissing)
+    missing_value = property(_getmissing, _setmissing)
 
-    def __new__(cls, data, typecode=None, copy=0, savespace=0, 
-                 mask=numpy.ma.nomask, fill_value=None, grid=None,
-                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None, order=False,**kargs):
-        """createVariable (self, data, typecode=None, copy=0, savespace=0, 
+    def __new__(cls, data, typecode=None, copy=0, savespace=0,
+                mask=numpy.ma.nomask, fill_value=None, grid=None,
+                axes=None, attributes=None, id=None, copyaxes=1, dtype=None, order=False, **kargs):
+        """createVariable (self, data, typecode=None, copy=0, savespace=0,
                  mask=None, fill_value=None, grid=None,
                  axes=None, attributes=None, id=None, dtype=None, order=False)
            The savespace argument is ignored, for backward compatibility only.
@@ -258,61 +268,60 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                 data = data.subSlice()
         if isinstance(data, numpy.ma.MaskedArray):
             try:
-                if fill_value is None: fill_value = data.fill_value
+                if fill_value is None:
+                    fill_value = data.fill_value
             except:
                 pass
 
-        ncopy = (copy!=0)
+        ncopy = (copy != 0)
         if mask is None:
             try:
                 mask = data.mask
-            except Exception,err:
+            except Exception as err:
                 mask = numpy.ma.nomask
 
         # Handle the case where ar[i:j] returns a single masked value
         if data is numpy.ma.masked:
-            #shape = tuple(len(axes)*[1])
+            # shape = tuple(len(axes)*[1])
             data = numpy.ma.masked.data
-            #data.shape = shape
+            # data.shape = shape
             mask = numpy.ma.masked.mask
-            #mask.shape = shape
-##         if data.getattr('mask',None) is not numpy.ma.nomask:
-##             mask = data.mask
-##         print 'passing:',mask.shape,data.shape,numpy.shape(cls)
+            # mask.shape = shape
+# if data.getattr('mask',None) is not numpy.ma.nomask:
+# mask = data.mask
+# print 'passing:',mask.shape,data.shape,numpy.shape(cls)
         if fill_value is not None:
             fill_value = numpy.array(fill_value).astype(dtype)
         else:
             fill_value = numpy.ma.MaskedArray(1).astype(dtype).item()
 
+        self = numpy.ma.MaskedArray.__new__(cls, data, dtype=dtype,
+                                            copy=ncopy,
+                                            mask=mask,
+                                            fill_value=fill_value,
+                                            subok=False,
+                                            order=order)
 
-        self = numpy.ma.MaskedArray.__new__(cls, data, dtype = dtype,
-                                      copy = ncopy,
-                                      mask = mask,
-                                      fill_value = fill_value,
-                                      subok = False,
-                                      order = order)
-
-            
-        
         return self
 
     # typecode = numpy.ma.array.typecode
     def typecode(self):
         return self.dtype.char
 
-    def assignValue(self,data):
+    def assignValue(self, data):
         self[...] = data
 
     def getValue(self, squeeze=1):
         return self.filled()
 
-    def expertSlice (self, slicelist):
+    def expertSlice(self, slicelist):
         return numpy.ma.MaskedArray.__getitem__(self, slicelist)
 
-    def initDomain (self, axes, copyaxes=1):
-        # lazy evaluation via getAxis to avoid creating axes that aren't ever used.
+    def initDomain(self, axes, copyaxes=1):
+        # lazy evaluation via getAxis to avoid creating axes that aren't ever
+        # used.
         newgrid = None
-        self.__domain = [None]*self.rank()
+        self.__domain = [None] * self.rank()
         if axes is not None:
             flataxes = []
             try:
@@ -325,22 +334,25 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                 elif isinstance(item, AbstractRectGrid) or isinstance(item, AbstractCurveGrid):
                     flataxes.append(item.getAxis(0))
                     flataxes.append(item.getAxis(1))
-                    copyaxes=0
+                    copyaxes = 0
                     newgrid = item
                 elif isinstance(item, AbstractGenericGrid):
                     flataxes.append(item.getAxis(0))
-                    copyaxes=0
+                    copyaxes = 0
                     newgrid = item
                 else:
-                    raise CDMSError, "Invalid item in axis list:\n"+`item`
+                    raise CDMSError(
+                        "Invalid item in axis list:\n" + repr(item))
             if len(flataxes) != self.rank():
-                raise CDMSError, "Wrong number of axes to initialize domain."
+                raise CDMSError("Wrong number of axes to initialize domain.")
             for i in range(len(flataxes)):
                 if flataxes[i] is not None:
-                    if (not flataxes[i].isVirtual()) and copyaxes==1:
+                    if (not flataxes[i].isVirtual()) and copyaxes == 1:
                         self.copyAxis(i, flataxes[i])
                     else:
-                        self.setAxis(i, flataxes[i]) # No sense copying a virtual axis.
+                        self.setAxis(
+                            i,
+                            flataxes[i])  # No sense copying a virtual axis.
             if newgrid is not None:     # Do this after setting the axes, so the grid is consistent
                 self.setGrid(newgrid)
 
@@ -350,25 +362,31 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                 junk = self.getAxis(i)  # will force a fill in
         return self.__domain
 
-    def getAxis (self, n):
-        if n < 0: n = n + self.rank()
+    def getAxis(self, n):
+        if n < 0:
+            n = n + self.rank()
         if self.__domain[n] is None:
             length = numpy.ma.size(self, n)
-            # axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n), typecode=numpy.Float))
-            axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n), dtype=numpy.float_))
+            # axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n),
+            # typecode=numpy.Float))
+            axis = createAxis(
+                numpy.ma.arange(numpy.ma.size(self, n), dtype=numpy.float_))
             axis.id = "axis_" + str(n)
             self.__domain[n] = (axis, 0, length, length)
         return self.__domain[n][0]
-        
-    def setAxis (self, n, axis, savegrid=0):
+
+    def setAxis(self, n, axis, savegrid=0):
         """Set n axis of self to a copy of axis. (0-based index)
         """
-        if n < 0: n = n + self.rank()
+        if n < 0:
+            n = n + self.rank()
         axislen = self.shape[n]
-        if len(axis)!=axislen:
-            raise CDMSError,"axis length %d does not match corresponding dimension %d"%(len(axis),axislen)
+        if len(axis) != axislen:
+            raise CDMSError(
+                "axis length %d does not match corresponding dimension %d" %
+                (len(axis), axislen))
         if not isinstance(axis, AbstractAxis):
-            raise CDMSError,"copydimension, other not a slab."
+            raise CDMSError("copydimension, other not a slab.")
         self.__domain[n] = (axis, 0, len(axis), len(axis))
 
     def setAxisList(self, axislist):
@@ -376,26 +394,27 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         for i in range(len(axislist)):
             self.setAxis(i, axislist[i])
 
-    def copyAxis (self, n, axis):
+    def copyAxis(self, n, axis):
         """Set n axis of self to a copy of axis. (0-based index)
            Invalidates grid.
         """
-        if n < 0: n = n + self.rank()
+        if n < 0:
+            n = n + self.rank()
         if not isinstance(axis, AbstractAxis):
-            raise CDMSError,"copydimension, other not an axis."
+            raise CDMSError("copydimension, other not an axis.")
         b = axis.getBounds()
         mycopy = createAxis(axis[:], b)
         mycopy.id = axis.id
         for k, v in axis.attributes.items():
-           setattr(mycopy, k, v)
-        self.setAxis (n, mycopy)
-   
-    def copyDomain (self, other):
+            setattr(mycopy, k, v)
+        self.setAxis(n, mycopy)
+
+    def copyDomain(self, other):
         "Set the axes and grid by copying variable other."
         if not isinstance(other, AbstractVariable):
-            raise CDMSError,"copyDomain, other not a variable."
+            raise CDMSError("copyDomain, other not a variable.")
         if self.rank() != other.rank():
-            raise CDMSError, "copyDomain, ranks do not match."
+            raise CDMSError("copyDomain, ranks do not match.")
         for i in range(self.rank()):
             self.copyAxis(i, other.getAxis(i))
         self.setGrid(other.getGrid())
@@ -406,21 +425,23 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
             for i in range(self.rank()):
                 ax = self.getAxis(i)
                 if ax.isLatitude():
-                    order = order+'y'
+                    order = order + 'y'
                     lat = ax
                 elif ax.isLongitude():
-                    order = order+'x'
+                    order = order + 'x'
                     lon = ax
-                if len(order)==2: break
+                if len(order) == 2:
+                    break
 
-            if order in ['yx','xy']:
-                self._grid_ = createRectGrid(lat,lon,order)
+            if order in ['yx', 'xy']:
+                self._grid_ = createRectGrid(lat, lon, order)
         return self._grid_
 
-    def astype (self, tc):
+    def astype(self, tc):
         "return self as array of given type."
-        maresult = numpy.ma.MaskedArray.astype(self,tc)
-        return TransientVariable(maresult, copy=0, axes=self.getAxisList(), fill_value=self.fill_value,
+        maresult = numpy.ma.MaskedArray.astype(self, tc)
+        return TransientVariable(
+            maresult, copy=0, axes=self.getAxisList(), fill_value=self.fill_value,
                                  attributes=self.attributes, id=self.id, grid=self.getGrid())
 
     def setMaskFromGridMask(self, mask, gridindices):
@@ -437,18 +458,18 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                 shapeprep.append(self.shape[i])
 
         # Broadcast mask
-        if tprep!=[]:
+        if tprep != []:
             newshape = tuple(shapeprep + list(mask.shape))
             bigmask = numpy.resize(mask, newshape)
 
             # Generate the tranpose vector
             t = tuple(tprep + list(gridindices))
-            tinv = [0]*len(t)
+            tinv = [0] * len(t)
             for i in range(len(t)):
                 tinv[t[i]] = i
 
             # And reshape to fit the variable
-            if tinv!=range(len(tinv)):
+            if tinv != range(len(tinv)):
                 bigmask = numpy.transpose(bigmask, tuple(tinv))
 
         else:
@@ -463,25 +484,25 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         return result
 
 # Old cu interface
-    def copydimension (self, idim, other, jdim):
-        """Set idim dimension of self to variable other's jdim'th 
+    def copydimension(self, idim, other, jdim):
+        """Set idim dimension of self to variable other's jdim'th
            This is for old cu compatibility. Use copyAxis for new code.
         """
         if not isinstance(other, AbstractVariable):
-            raise CDMSError,"copydimension, other not a variable."
+            raise CDMSError("copydimension, other not a variable.")
         a = other.getAxis(jdim)
         self.copyAxis(idim, a)
 
     def setdimattribute(self, dim, field, value):
         "Set the attribute named field from the dim'th dimension."
         if dim < 0 or dim >= self.rank():
-            raise CDMSError, "setdimattribute, dim out of bounds."
+            raise CDMSError("setdimattribute, dim out of bounds.")
         d = self.getAxis(dim)
         if field == "name":
             if not isinstance(value, basestring):
-               raise CDMSError, "setdimattribute: name not a string"
+                raise CDMSError("setdimattribute: name not a string")
             d.id = value
-            
+
         elif field == "values":
             # note -- invalidates grid, may break old code.
             a = createAxis(numpy.ma.filled(value[:]))
@@ -492,28 +513,29 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
 
         elif field == "units":
             if not isinstance(value, basestring):
-               raise CDMSError, "setdimattribute: units not a string"
+                raise CDMSError("setdimattribute: units not a string")
             d.units = value
 
         elif field == "weights":
             # Well, you can't really do this without modifying the grid
-            raise CDMSError, "setdimattribute weights not implemented."
+            raise CDMSError("setdimattribute weights not implemented.")
 
         elif field == "bounds":
             if value is None:
-               d.setBounds(None)
+                d.setBounds(None)
             else:
-               b = numpy.ma.filled(value)
-               if numpy.ma.rank(b) == 2:
-                   d.setBounds(b)
-               elif numpy.ma.rank(b) == 1:
-                   b1 = numpy.zeros((len(b)-1,2), b.dtype.char)
-                   b1[:,0] = b[:-1]
-                   b1[:,1] = b[1:]
-                   d.setBounds(b1)
-               else:
-                   raise CDMSError, \
-                   "setdimattribute, bounds improper shape: " + b.shape
+                b = numpy.ma.filled(value)
+                if numpy.ma.rank(b) == 2:
+                    d.setBounds(b)
+                elif numpy.ma.rank(b) == 1:
+                    b1 = numpy.zeros((len(b) - 1, 2), b.dtype.char)
+                    b1[:, 0] = b[:-1]
+                    b1[:, 1] = b[1:]
+                    d.setBounds(b1)
+                else:
+                    raise CDMSError(
+                        "setdimattribute, bounds improper shape: " +
+                        b.shape)
         else:
             setattr(d, field, value)
 
@@ -524,57 +546,57 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         result = createVariable(self, copy=copyData)
         return result
 
-    def dumps(self,*args,**kargs):
-        ## Probably need something for curv/gen grids
+    def dumps(self, *args, **kargs):
+        # Probably need something for curv/gen grids
         """ Dumps Variable to a jason object, args are passed directly to json.dump"""
-        J={}
-        for k,v in self.attributes.iteritems():
-            if k=="autoApiInfo":
+        J = {}
+        for k, v in self.attributes.iteritems():
+            if k == "autoApiInfo":
                 continue
-            J[k]=v
-        J['id']=self.id
-        axes=[]
+            J[k] = v
+        J['id'] = self.id
+        axes = []
         for a in self.getAxisList():
-            ax={}
-            for A,v in a.attributes.iteritems():
-                ax[A]=v
-            ax['id']=a.id
-            ax["_values"]=a[:].tolist()
-            ax["_dtype"]=a[:].dtype.char
+            ax = {}
+            for A, v in a.attributes.iteritems():
+                ax[A] = v
+            ax['id'] = a.id
+            ax["_values"] = a[:].tolist()
+            ax["_dtype"] = a[:].dtype.char
             axes.append(ax)
-        J["_axes"]=axes
-        J["_values"]=self[:].filled(self.fill_value).tolist()
-        J["_fill_value"]=float(self.fill_value)
-        J["_dtype"]=self.typecode()
-        J["_grid"]=None #self.getGrid()
-        return json.dumps(J,*args,**kargs)
+        J["_axes"] = axes
+        J["_values"] = self[:].filled(self.fill_value).tolist()
+        J["_fill_value"] = float(self.fill_value)
+        J["_dtype"] = self.typecode()
+        J["_grid"] = None  # self.getGrid()
+        return json.dumps(J, *args, **kargs)
 
     def isEncoded(self):
         "Transient variables are not encoded"
         return 0
 
-    def __len__ (self):
+    def __len__(self):
         "Length of first dimension"
-        if self.rank()>0:
-            (axis,start,length,true_length) = self.getDomain()[0]
+        if self.rank() > 0:
+            (axis, start, length, true_length) = self.getDomain()[0]
         else:
             length = 0
         return length
 
-    def __str__ (self):
+    def __str__(self):
         return numpy.ma.MaskedArray.__str__(self)
 
-    def __repr__ (self):
+    def __repr__(self):
         return self.id + '\n' + numpy.ma.MaskedArray.__repr__(self) + '\n'
 
     def set_fill_value(self, value):
         "Set missing value attribute and fill value"
         AbstractVariable.setMissing(self, value)
-        #self.__dict__['_fill_value'] = self.missing_value
-        ## Fix submitted by Ghislain Picard, this was broken with numpy 1.5
-        numpy.ma.MaskedArray.set_fill_value(self,value)
+        # self.__dict__['_fill_value'] = self.missing_value
+        # Fix submitted by Ghislain Picard, this was broken with numpy 1.5
+        numpy.ma.MaskedArray.set_fill_value(self, value)
 
-    def setMissing (self, value):
+    def setMissing(self, value):
         "Set missing value attribute and fill value"
         self.set_fill_value(value)
 
@@ -595,7 +617,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         """
         return self.tileIndex
 
-    def toVisit(self, filename, format='Vs', sphereRadius=1.0, 
+    def toVisit(self, filename, format='Vs', sphereRadius=1.0,
                 maxElev=0.1):
         """
         Save data to file for postprocessing by the VisIt visualization tool
@@ -604,21 +626,21 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         sphereRadius: radius of the earth
         maxElev: maximum elevation for representation on the sphere
         """
-        import mvSphereMesh
-        import mvVTKSGWriter
-        import mvVsWriter
+        from . import mvSphereMesh
+        from . import mvVTKSGWriter
+        from . import mvVsWriter
         try:
             # required by mvVsWriter
             import tables
         except:
-            # fall back 
+            # fall back
             format = 'VTK'
 
         def generateTimeFileName(filename, tIndex, tIndexMax, suffix):
-            ndigits = len('%d'%tIndexMax)
-            itdigits = len('%d'%tIndex)
-            tiStr = '0'*(ndigits-itdigits) + ('%d'%tIndex)
-            return re.sub(r'\.' + suffix, '_%s.%s' % (tiStr, suffix), 
+            ndigits = len('%d' % tIndexMax)
+            itdigits = len('%d' % tIndex)
+            tiStr = '0' * (ndigits - itdigits) + ('%d' % tIndex)
+            return re.sub(r'\.' + suffix, '_%s.%s' % (tiStr, suffix),
                           filename)
 
         # determine whether data are time dependent
@@ -632,46 +654,48 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                 counter += 1
                 if axis == 'time':
                     timeIndex = counter
-        
-        if timeAxis == None or timeIndex == -1:
+
+        if timeAxis is None or timeIndex == -1:
             # static data
             if format == 'VTK':
                 vw = mvVTKSGWriter.VTKSGWriter(self, maxElev)
-                if filename.find('.vtk') == -1: 
+                if filename.find('.vtk') == -1:
                     filename += '.vtk'
                 vw.write(filename)
             else:
                 vw = mvVsWriter.VsWriter(self, maxElev)
-                if filename.find('.vsh5') == -1: 
+                if filename.find('.vsh5') == -1:
                     filename += '.vsh5'
                 vw.write(filename)
         else:
             # time dependent data
             tIndexMax = len(timeAxis)
             for tIndex in range(tIndexMax):
-                sliceOp = 'self[' + (':,'*timeIndex) + ('%d,'%tIndex) + '...]'
+                sliceOp = 'self[' + (
+                    ':,' * timeIndex) + ('%d,' %
+                                         tIndex) + '...]'
                 var = eval(sliceOp)
                 if format == 'VTK':
                     if filename.find('.vtk') == -1:
                         filename += '.vtk'
-                    tFilename = generateTimeFileName(filename, 
+                    tFilename = generateTimeFileName(filename,
                                                      tIndex, tIndexMax, 'vtk')
                     vw = mvVTKSGWriter.VTKSGWriter(var, maxElev)
                     vw.write(tFilename)
                 else:
                     if filename.find('.h5') == -1:
                         filename += '.h5'
-                    tFilename = generateTimeFileName(filename, 
+                    tFilename = generateTimeFileName(filename,
                                                      tIndex, tIndexMax, 'h5')
                     vw = mvVsWriter.VsWriter(var, maxElev)
                     vw.write(tFilename)
-       
-    # Following are distributed array methods, they require mpi4py 
+
+    # Following are distributed array methods, they require mpi4py
     # to be installed
 
     def setMPIComm(self, comm):
         """
-        Set the MPI communicator. This is a no-op if MPI 
+        Set the MPI communicator. This is a no-op if MPI
         is not available.
         """
         if HAVE_MPI:
@@ -698,7 +722,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
     def exposeHalo(self, ghostWidth=1):
         """
         Expose the halo to other processors. The halo is the region
-        within the local MPI data domain that is accessible to other 
+        within the local MPI data domain that is accessible to other
         processors. The halo encompasses the edge of the data region
         and has thickness ghostWidth.
 
@@ -715,9 +739,9 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                     # given direction, a 1 represents a layer of
                     # thickness ghostWidth on the high index side,
                     # -1 on the low index side.
-                    winId = tuple( [0 for i in range(dim) ] \
-                                   + [drect] + \
-                                   [0 for i in range(dim+1, ndims) ] )
+                    winId = tuple([0 for i in range(dim)]
+                                  + [drect] +
+                                  [0 for i in range(dim + 1, ndims)])
 
                     slce = slice(0, ghostWidth)
                     if drect == 1:
@@ -726,19 +750,19 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
                     slab = self.__getSlab(dim, slce)
 
                     # create the MPI window
-                    dataSrc = numpy.zeros(self[slab].shape, self.dtype) 
-                    dataDst = numpy.zeros(self[slab].shape, self.dtype) 
+                    dataSrc = numpy.zeros(self[slab].shape, self.dtype)
+                    dataDst = numpy.zeros(self[slab].shape, self.dtype)
                     self.__mpiWindows[winId] = {
                         'slab': slab,
                         'dataSrc': dataSrc,
                         'dataDst': dataDst,
                         'window': MPI.Win.Create(dataSrc, comm=self.__mpiComm),
-                        }
-                
+                    }
+
     def getHaloEllipsis(self, side):
         """
-        Get the ellipsis for a given halo side. 
-        
+        Get the ellipsis for a given halo side.
+
         side - a tuple of zeros and one +1 or -1.  To access
                the "north" side for instance, set side=(1, 0),
                (-1, 0) to access the south side, (0, 1) the east
@@ -746,7 +770,7 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
 
         Return none if halo was not exposed (see exposeHalo)
         """
-        if HAVE_MPI and self.__mpiWindows.has_key(side):
+        if HAVE_MPI and side in self.__mpiWindows:
             return self.__mpiWindows[side]['slab']
         else:
             return None
@@ -754,20 +778,20 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
     def fetchHaloData(self, pe, side):
         """
         Fetch the halo data from another processor. The halo side
-        is a subdomain of the halo that is exposed to other 
+        is a subdomain of the halo that is exposed to other
         processors. It is an error to call this method when
         MPI is not enabled. This is a collective method (must
         be called by all processes), which involves synchronization
         of data among all processors.
 
         pe       -  processor owning the halo data. This is a no
-                    operation when pe is None. 
+                    operation when pe is None.
         side     -  a tuple of zeros and one +1 or -1.  To access
                     the "north" side for instance, set side=(1, 0),
                     (-1, 0) to access the south side, (0, 1) the east
-                    side, etc. 
+                    side, etc.
 
-        Note: collective, all procs must invoke this method. If some 
+        Note: collective, all procs must invoke this method. If some
         processors should not fetch then pass None for pe.
         """
         if HAVE_MPI:
@@ -780,37 +804,37 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
             dataSrc[...] = self[slab]
 
             win = iw['window']
-            win.Fence() # get the data ready
+            win.Fence()  # get the data ready
             if pe is not None:
-                win.Get( [dataDst, self.__mpiType], pe )
-            win.Fence() # make sure the communication completed
+                win.Get([dataDst, self.__mpiType], pe)
+            win.Fence()  # make sure the communication completed
             return dataDst
         else:
-            raise CDMSError, 'Must have MPI to invoke fetchHaloData'
+            raise CDMSError('Must have MPI to invoke fetchHaloData')
 
     def freeHalo(self):
         """
-        Free the MPI windows attached to the halo. This must be 
+        Free the MPI windows attached to the halo. This must be
         called before MPI_Finalize.
         """
         for iw in self.__mpiWindows:
-            self.__mpiWindows[iw]['window'].Free()        
+            self.__mpiWindows[iw]['window'].Free()
 
     def __getSlab(self, dim, slce):
         """
         Get slab. A slab is a multi-dimensional slice extending in
         all directions except along dim where slce applies
-        
+
         dim      - dimension (0=first index, 1=2nd index...)
         slce     - python slice object along dimension dim
-        
+
         return slab
         """
         ndims = len(self.shape)
-        
+
         slab = [ slice(0, None) for i in range(dim) ] \
-                    + [slce] + \
-                  [ slice(0, None) for i in range(dim+1, ndims) ]
+            + [slce] + \
+            [slice(0, None) for i in range(dim + 1, ndims)]
         return tuple(slab)
 
     def __getMPIType(self):
@@ -834,50 +858,54 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
             elif dtyp == numpy.int8:
                 typ = MPI.INT8_T
             else:
-                return None          
+                return None
         else:
             return typ
 
-## PropertiedClasses.set_property(TransientVariable, 'shape', 
-##                                nowrite=1, nodelete=1)
+# PropertiedClasses.set_property(TransientVariable, 'shape',
+# nowrite=1, nodelete=1)
+
 
-def createVariable(*args,**kargs):
-    if kargs.get("fromJSON",False):
+def createVariable(*args, **kargs):
+    if kargs.get("fromJSON", False):
         return fromJSON(*args)
     else:
-        return TransientVariable(*args,**kargs)
+        return TransientVariable(*args, **kargs)
 
-def isVariable (s):
+
+def isVariable(s):
     "Is s a variable?"
     return isinstance(s, AbstractVariable)
 
+
 def asVariable(s, writeable=1):
-    """Returns s if s is a Variable; if writeable is 1, return 
-       s if s is a TransientVariable. If s is not a variable of 
+    """Returns s if s is a Variable; if writeable is 1, return
+       s if s is a TransientVariable. If s is not a variable of
        the desired type, attempt to make it so and return that.
        If we fail raise CDMSError
     """
     target_class = AbstractVariable
-    if writeable: target_class = TransientVariable
+    if writeable:
+        target_class = TransientVariable
     if isinstance(s, target_class):
         return s
     elif isinstance(s, AbstractVariable):
         return s.subSlice()
-    
+
     try:
         result = createVariable(s)
     except CDMSError:
-        result =  None
-    
+        result = None
+
     # if result.dtype.char == numpy.ma.PyObject:
     if issubclass(result.dtype.type, numpy.object_):
         result = None
     if result is None:
-        raise CDMSError, "asVariable could not make a Variable from the input."
+        raise CDMSError("asVariable could not make a Variable from the input.")
     return result
 
 if __name__ == '__main__':
-    for s in [(20,), (4,5)]:
+    for s in [(20,), (4, 5)]:
         x = numpy.arange(20)
         x.shape = s
         t = createVariable(x)
@@ -886,21 +914,24 @@ if __name__ == '__main__':
         assert numpy.ma.allclose(x, t)
         assert t.dtype.char == numpy.int
         assert numpy.ma.size(t) == numpy.ma.size(x)
-        assert numpy.ma.size(t,0) == len(t)
-        assert numpy.ma.allclose(t.getAxis(0)[:], numpy.ma.arange(numpy.ma.size(t,0)))
+        assert numpy.ma.size(t, 0) == len(t)
+        assert numpy.ma.allclose(
+            t.getAxis(0)[:], numpy.ma.arange(numpy.ma.size(t, 0)))
         t.missing_value = -99
         assert t.missing_value == -99
         assert t.fill_value == -99
-    t = createVariable(numpy.ma.arange(5), mask=[0,0,0,1,0])
-    t.set_fill_value (1000)
+    t = createVariable(numpy.ma.arange(5), mask=[0, 0, 0, 1, 0])
+    t.set_fill_value(1000)
     assert t.fill_value == 1000
     assert t.missing_value == 1000
     t.missing_value = -99
     assert t[2] == 2
     t[3] = numpy.ma.masked
     assert t[3] is numpy.ma.masked
-    f = createVariable(numpy.ma.arange(5, typecode=numpy.float32), mask=[0,0,0,1,0])
-    f2 = createVariable(numpy.ma.arange(5, typecode=numpy.float32), mask=[0,0,0,1,0])
+    f = createVariable(
+        numpy.ma.arange(5, typecode=numpy.float32), mask=[0, 0, 0, 1, 0])
+    f2 = createVariable(
+        numpy.ma.arange(5, typecode=numpy.float32), mask=[0, 0, 0, 1, 0])
     f[3] = numpy.ma.masked
     assert f[3] is numpy.ma.masked
     assert numpy.ma.allclose(2.0, f[2])
@@ -910,6 +941,6 @@ if __name__ == '__main__':
     assert t.getdimattribute(0, 'name') == 'fudge'
     f2b = f2.getdimattribute(0, 'bounds')
     t.setdimattribute(0, 'bounds', f2b)
-    assert numpy.ma.allclose(f.getdimattribute(0,'bounds'), f2.getdimattribute(0,'bounds'))
+    assert numpy.ma.allclose(
+        f.getdimattribute(0, 'bounds'), f2.getdimattribute(0, 'bounds'))
     print "Transient Variable test passed ok."
-
diff --git a/Packages/cdms2/Lib/typeconv.py b/Packages/cdms2/Lib/typeconv.py
index 0f2bd2c18..541785877 100644
--- a/Packages/cdms2/Lib/typeconv.py
+++ b/Packages/cdms2/Lib/typeconv.py
@@ -4,21 +4,23 @@ import numpy as np
 
 oldtype2dtype = {'1': np.dtype(np.byte),
                  's': np.dtype(np.short),
-#                 'i': np.dtype(np.intc),
-#                 'l': np.dtype(int),
-#                 'b': np.dtype(np.ubyte),
+                 #                 'i': np.dtype(np.intc),
+                 #                 'l': np.dtype(int),
+                 #                 'b': np.dtype(np.ubyte),
                  'w': np.dtype(np.ushort),
                  'u': np.dtype(np.uintc),
-#                 'f': np.dtype(np.single),
-#                 'd': np.dtype(float),
-#                 'F': np.dtype(np.csingle),
-#                 'D': np.dtype(complex),
-#                 'O': np.dtype(object),
-#                 'c': np.dtype('c'),
+                 #                 'f': np.dtype(np.single),
+                 #                 'd': np.dtype(float),
+                 #                 'F': np.dtype(np.csingle),
+                 #                 'D': np.dtype(complex),
+                 #                 'O': np.dtype(object),
+                 #                 'c': np.dtype('c'),
                  None: np.dtype(int)
-    }
+                 }
 
 # converts typecode=None to int
+
+
 def convtypecode(typecode, dtype=None):
     if dtype is None:
         try:
@@ -28,8 +30,10 @@ def convtypecode(typecode, dtype=None):
     else:
         return dtype
 
-#if both typecode and dtype are None
+# if both typecode and dtype are None
 #  return None
+
+
 def convtypecode2(typecode, dtype=None):
     if dtype is None:
         if typecode is None:
@@ -45,10 +49,12 @@ def convtypecode2(typecode, dtype=None):
 _changedtypes = {'B': 'b',
                  'b': '1',
                  'h': 's',
-##                  'H': 'w',
+                 # 'H': 'w',
                  'I': 'u'}
 
+
 class _oldtypecodes(dict):
+
     def __getitem__(self, obj):
         char = np.dtype(obj).char
         try:
diff --git a/Packages/cdms2/Lib/variable.py b/Packages/cdms2/Lib/variable.py
index 23650e3d8..f271acc1d 100644
--- a/Packages/cdms2/Lib/variable.py
+++ b/Packages/cdms2/Lib/variable.py
@@ -1,20 +1,20 @@
-## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
 
 """
 DatasetVariable: Dataset-based variables
 """
 from cdms2 import Cdunif
 import numpy
-import cdmsNode
+from . import cdmsNode
 import cdtime
 import copy
 import os
 import sys
-import cdmsobj
-from cdmsobj import CdmsObj, getPathFromTemplate, Max32int
-from avariable import AbstractVariable
-from sliceut import *
-from error import CDMSError
+from . import cdmsobj
+from .cdmsobj import CdmsObj, getPathFromTemplate, Max32int
+from .avariable import AbstractVariable
+from .sliceut import *
+from .error import CDMSError
 
 InvalidGridElement = "Grid domain elements are not yet implemented: "
 InvalidRegion = "Invalid region: "
@@ -24,44 +24,47 @@ TooManyPartitions = "Variable has too many partitioned axes, max is two: "
 WriteNotImplemented = "Dataset write operation not implemented"
 FileClosed = "Cannot read from closed file or dataset, variable: "
 
+
 def timeindex(value, units, basetime, delta, delunits, calendar):
     """ Calculate (t - basetime)/delu
     where t = reltime(value, units)
     and delu is the time interval (delta, delunits) (e.g., 1 month).
     """
     tval = cdtime.reltime(value, units)
-    tounits = "%s since %s"%(delunits, basetime)
+    tounits = "%s since %s" % (delunits, basetime)
     newval = tval.torel(tounits, calendar)
-    return int(newval.value/delta)
+    return int(newval.value / delta)
+
 
 class DatasetVariable(AbstractVariable):
 
-    def __init__(self,parent,id, variableNode=None):
+    def __init__(self, parent, id, variableNode=None):
         """ "Variable (parent, variableNode=None)"
            variableNode is the variable tree node, if any.
            parent is the containing dataset instance.
         """
-        AbstractVariable.__init__ (self, parent, variableNode)
-        val = self.__cdms_internals__ + ['domain','name_in_file']
+        AbstractVariable.__init__(self, parent, variableNode)
+        val = self.__cdms_internals__ + ['domain', 'name_in_file']
         self.___cdms_internals__ = val
         self.id = id
         self.domain = []
         # Get self.name_in_file from the .xml file if present
         if not hasattr(self, 'name_in_file'):
             self.name_in_file = id
-            
+
         # if self.attributes.has_key('name_in_file'):
         #     self.name_in_file = self.attributes['name_in_file']
-        if variableNode is not None:          
-            self._numericType_ = cdmsNode.CdToNumericType.get(variableNode.datatype)
+        if variableNode is not None:
+            self._numericType_ = cdmsNode.CdToNumericType.get(
+                variableNode.datatype)
         else:
             self._numericType_ = numpy.float
         assert self.id is not None
-        
-    def __len__ (self):
+
+    def __len__(self):
         "Length of first dimension"
-        if len(self.domain)>0:
-            (axis,start,length,true_length) = self.domain[0]
+        if len(self.domain) > 0:
+            (axis, start, length, true_length) = self.domain[0]
         else:
             length = 0
 
@@ -72,33 +75,34 @@ class DatasetVariable(AbstractVariable):
 #            parentid = self.parent.id
 #        else:
 #            parentid = "**CLOSED**"
-#        return "<Variable: %s, dataset: %s, shape: %s>"%(self.id, parentid, `self.shape`)
+# return "<Variable: %s, dataset: %s, shape: %s>"%(self.id, parentid,
+# `self.shape`)
 
     def __getitem__(self, key):
         if self.parent is None:
-            raise CDMSError, FileClosed+str(self.id)
+            raise CDMSError(FileClosed + str(self.id))
         return AbstractVariable.__getitem__(self, key)
-        
+
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
+            raise CDMSError(FileClosed + self.id)
         return self.getSlice(Ellipsis, squeeze=squeeze)
-    
+
     def __getslice__(self, low, high):
         if self.parent is None:
-            raise CDMSError, FileClosed+self.id
+            raise CDMSError(FileClosed + self.id)
 
         # Hack to prevent netCDF overflow error on 64-bit architectures
         high = min(Max32int, high)
-        
+
         return AbstractVariable.__getslice__(self, low, high)
 
     def __setitem__(self, index, value):
-        raise CDMSError, WriteNotImplemented
+        raise CDMSError(WriteNotImplemented)
 
     def __setslice__(self, low, high, value):
-        raise CDMSError, WriteNotImplemented
+        raise CDMSError(WriteNotImplemented)
 
     def _getShape(self):
         return self.getShape()
@@ -108,19 +112,19 @@ class DatasetVariable(AbstractVariable):
         return numpy.dtype(tc)
 
     def getShape(self):
-        shape=[]
-        for (axis,start,length,true_length) in self.domain:
+        shape = []
+        for (axis, start, length, true_length) in self.domain:
             shape.append(length)
         return tuple(shape)
 
-    def typecode (self):
-      return numpy.dtype(self._numericType_).char
+    def typecode(self):
+        return numpy.dtype(self._numericType_).char
 
     def size(self):
         "Number of elements."
         n = 1
         for k in self.shape:
-            n = k*n
+            n = k * n
         return n
 
     def initDomain(self, axisdict, griddict):
@@ -133,9 +137,9 @@ class DatasetVariable(AbstractVariable):
             if domelem is None:
                 domelem = griddict.get(dename)
                 if grid is None:
-                    raise CDMSError, NoSuchAxisOrGrid + dename
+                    raise CDMSError(NoSuchAxisOrGrid + dename)
                 else:
-                    raise CDMSError, InvalidGridElement + dename
+                    raise CDMSError(InvalidGridElement + dename)
             partlenstr = denode.getExternalAttr('partition_length')
             if partlenstr is not None:
                 truelen = int(partlenstr)
@@ -145,26 +149,27 @@ class DatasetVariable(AbstractVariable):
 
     # Get the template
     def getTemplate(self):
-        if hasattr(self,'template'):
+        if hasattr(self, 'template'):
             template = self.template
-        elif hasattr(self.parent,'template'): 
+        elif hasattr(self.parent, 'template'):
             template = self.parent.template
         else:
             template = None
         return template
 
-    def getAxis (self, n):
-        if n < 0: n = n + self.rank()
+    def getAxis(self, n):
+        if n < 0:
+            n = n + self.rank()
         return self.domain[n][0]
 
-    def getDomain (self):
+    def getDomain(self):
         return self.domain
 
     # Get the paths associated with the interval region specified
     # by 'intervals'. This incorporates most of the logic of __getitem__,
     # without actually reading the data.
-    # 
-    # 'specs' is a list of interval range specifications as defined 
+    #
+    # 'specs' is a list of interval range specifications as defined
     # for getSlice.
     #
     # The function returns a list of tuples of the form (path,slicelist),
@@ -176,11 +181,11 @@ class DatasetVariable(AbstractVariable):
     #   f = Cdunif.CdunifFile(path,'r')
     #   var = f.variables[self.name_in_file]
     #   data = apply(var.getitem,slicelist)
-    #   
+    #
     def getPaths(self, *specs, **keys):
 
         # Create an equivalent list of slices
-        speclist = self._process_specs (specs, keys)
+        speclist = self._process_specs(specs, keys)
         slicelist = self.specs2slices(speclist)
 
         # Generate the filelist
@@ -190,20 +195,20 @@ class DatasetVariable(AbstractVariable):
         result = []
         if partitionSlices is None:
             pass
-        elif npart==0:
+        elif npart == 0:
             filename, slicelist = partitionSlices
             if filename is not None:
                 result.append((filename, tuple(slicelist)))
-        elif npart==1:
+        elif npart == 1:
             for filename, slicelist in partitionSlices:
                 if filename is not None:
                     result.append((filename, tuple(slicelist)))
-        elif npart==2:
+        elif npart == 2:
             for filelist in partitionSlices:
                 for filename, slicelist in filelist:
                     if filename is not None:
                         result.append((filename, tuple(slicelist)))
-                
+
         return result
 
     def genMatch(self, axis, interval, matchnames):
@@ -217,34 +222,34 @@ class DatasetVariable(AbstractVariable):
         returns the modified matchnames tuple.
         """
         if axis.isTime():
-            if hasattr(self.parent,'cdms_filemap'):
+            if hasattr(self.parent, 'cdms_filemap'):
                 start = interval[0]
                 end = interval[1]
             else:                       # Use template method
                 time0 = axis[interval[0]]
-                time1 = axis[interval[1]-1]
-                isabs = (axis.units.find(" as ")!=-1)
+                time1 = axis[interval[1] - 1]
+                isabs = (axis.units.find(" as ") != -1)
                 if isabs:
-                    start = cdtime.abstime(time0,axis.units)
-                    end = cdtime.abstime(time1,axis.units)
+                    start = cdtime.abstime(time0, axis.units)
+                    end = cdtime.abstime(time1, axis.units)
                 else:
                     cal = axis.getCalendar()
-                    start = cdtime.reltime(time0,axis.units).tocomp(cal)
-                    end = cdtime.reltime(time1,axis.units).tocomp(cal)
+                    start = cdtime.reltime(time0, axis.units).tocomp(cal)
+                    end = cdtime.reltime(time1, axis.units).tocomp(cal)
             matchnames[1] = start
             matchnames[2] = end
         elif axis.isForecast():
             start = axis.getValue()[interval[0]]
-            end   = axis.getValue()[interval[1]-1]
+            end = axis.getValue()[interval[1] - 1]
             matchnames[5] = start
             matchnames[6] = end
         else:
-            if hasattr(self.parent,'cdms_filemap'):
+            if hasattr(self.parent, 'cdms_filemap'):
                 start = interval[0]
                 end = interval[1]
             else:
                 start = int(axis[interval[0]])
-                end = int(axis[interval[1]-1])
+                end = int(axis[interval[1] - 1])
             matchnames[3] = start
             matchnames[4] = end
 
@@ -254,12 +259,13 @@ class DatasetVariable(AbstractVariable):
         """Lookup or generate the file path, depending on whether a filemap
         or template is present.
         """
-        if hasattr(self.parent,'cdms_filemap'):
+        if hasattr(self.parent, 'cdms_filemap'):
             id, tstart, tend, levstart, levend, fcstart, fcend = matchnames
-            filename = self.parent._filemap_[(self.id, tstart, levstart, fcstart)]
+            filename = self.parent._filemap_[
+                (self.id, tstart, levstart, fcstart)]
             # ... filemap uses dataset IDs
         else:
-            filename = getPathFromTemplate(template,matchnames)
+            filename = getPathFromTemplate(template, matchnames)
         return filename
 
     def getPartition(self, axis):
@@ -268,7 +274,7 @@ class DatasetVariable(AbstractVariable):
         get the partition from the _varpart_ attribute, otherwise (for templating) use
         axis.partition.
         """
-        if hasattr(self.parent,'cdms_filemap'):
+        if hasattr(self.parent, 'cdms_filemap'):
             if axis.isTime():
                 partition = self._varpart_[0]
             elif axis.isForecast():
@@ -279,7 +285,7 @@ class DatasetVariable(AbstractVariable):
             partition = axis.partition
         return partition
 
-    def expertPaths (self, slist):
+    def expertPaths(self, slist):
         """ expertPaths(self, slicelist)
         takes a list of slices,
         returns a 3-tuple: (npart, dimensionlist, partitionSlices) where:
@@ -316,7 +322,7 @@ class DatasetVariable(AbstractVariable):
 
         # Handle rank-0 variables separately
         if self.rank() == 0:
-            matchnames = [realid,None,None,None,None,None,None]
+            matchnames = [realid, None, None, None, None, None, None]
             filename = self.getFilePath(matchnames, template)
 
             result = (0, (), (filename, []))
@@ -325,162 +331,164 @@ class DatasetVariable(AbstractVariable):
         # Find the number of partitioned axes
         npart = 0
         ndim = 0
-        for (axis,start,length,true_length) in self.domain:
-            if hasattr(axis,'partition'):
-                npart = npart+1
-                if npart==1:
+        for (axis, start, length, true_length) in self.domain:
+            if hasattr(axis, 'partition'):
+                npart = npart + 1
+                if npart == 1:
                     part1 = axis
                     npart1 = ndim
-                elif npart==2:
+                elif npart == 2:
                     part2 = axis
                     npart2 = ndim
                 else:
-                    raise CDMSError,  TooManyPartitions + variable.id
-            ndim = ndim+1
+                    raise CDMSError(TooManyPartitions + variable.id)
+            ndim = ndim + 1
 
         # If no partitioned axes, just read the data
-        if npart==0:
-            matchnames = [realid,None,None,None,None,None,None]
+        if npart == 0:
+            matchnames = [realid, None, None, None, None, None, None]
             filename = self.getFilePath(matchnames, template)
             result = (0, (), (filename, slicelist))
 
         # If one partitioned axes:
-        elif npart==1:
+        elif npart == 1:
 
             # intersect the slice and partition for that axis
             slice1 = slicelist[npart1]
-            (axis,startelem,length,true_length) = self.domain[npart1]
+            (axis, startelem, length, true_length) = self.domain[npart1]
             partition = slicePartition(slice1, self.getPartition(axis))
-            if partition==[]:
+            if partition == []:
                 return (1, (npart1,), None)
 
             # For each (interval, partslice) in the partition:
             resultlist = []
             (firstinterval, firstslice) = partition[0]
             prevhigh = firstinterval[0]
-            for (interval,partslice) in partition:
+            for (interval, partslice) in partition:
 
                 # If the previous interval high is less than
                 # the current interval low value, interpose
                 # missing data.
                 low = interval[0]
-                if prevhigh<low:
-                    missing_interval = (prevhigh,low)
+                if prevhigh < low:
+                    missing_interval = (prevhigh, low)
                     missing_slice = sliceIntersect(slice1, missing_interval)
 
                     # Note: if the slice has a stride>1, it might not intersect,
                     # so don't interpose missing data in this case.
                     if missing_slice is not None:
                         slicelist[npart1] = missing_slice
-                        resultlist.append((None,copy.copy(slicelist)))
+                        resultlist.append((None, copy.copy(slicelist)))
                 prevhigh = interval[1]
 
                 # generate the filename
-                matchnames = [realid, None, None, None, None,None,None]
+                matchnames = [realid, None, None, None, None, None, None]
                 matchnames = self.genMatch(axis, interval, matchnames)
                 filename = self.getFilePath(matchnames, template)
 
                 # adjust the partslice for the interval offset
                 # and replace in the slice list
-                filestart = partslice.start-interval[0]
-                filestop = partslice.stop-interval[0]
-                fileslice = slice(filestart,filestop,partslice.step)
+                filestart = partslice.start - interval[0]
+                filestop = partslice.stop - interval[0]
+                fileslice = slice(filestart, filestop, partslice.step)
                 slicelist[npart1] = fileslice
 
-                resultlist.append((filename,copy.copy(slicelist)))
+                resultlist.append((filename, copy.copy(slicelist)))
 
-            result = (1,(npart1,),resultlist)
+            result = (1, (npart1,), resultlist)
 
         # If two partitioned axes, 2-D version of previous case
-        if npart==2:
+        if npart == 2:
             slice1 = slicelist[npart1]
             slice2 = slicelist[npart2]
-            (axis1,startelem1,length1,true_length1) = self.domain[npart1]
-            (axis2,startelem2,length2,true_length2) = self.domain[npart2]
+            (axis1, startelem1, length1, true_length1) = self.domain[npart1]
+            (axis2, startelem2, length2, true_length2) = self.domain[npart2]
             partition1 = slicePartition(slice1, self.getPartition(axis1))
             partition2 = slicePartition(slice2, self.getPartition(axis2))
-            if partition1==[] or partition2==[]:
-                return (2, (npart1,npart2), None)
+            if partition1 == [] or partition2 == []:
+                return (2, (npart1, npart2), None)
 
             # For each (interval, partslice) in the partition:
             resultlist = []
             (firstinterval1, firstslice1) = partition1[0]
             prevhigh1 = firstinterval1[0]
-            for (interval1,partslice1) in partition1:
+            for (interval1, partslice1) in partition1:
 
                 # If the previous interval high is less than
                 # the current interval low value, interpose
                 # missing data.
                 low = interval1[0]
-                if prevhigh1<low:
-                    missing_interval = (prevhigh1,low)
+                if prevhigh1 < low:
+                    missing_interval = (prevhigh1, low)
                     missing_slice = sliceIntersect(slice1, missing_interval)
                     if missing_slice is not None:
                         slicelist[npart1] = missing_slice
-                        resultlist.append( [(None,copy.copy(slicelist))] )
+                        resultlist.append([(None, copy.copy(slicelist))])
                 prevhigh1 = interval1[1]
 
                 # generate matchnames
-                matchnames = [realid, None, None, None, None,None,None]
+                matchnames = [realid, None, None, None, None, None, None]
                 matchnames = self.genMatch(axis1, interval1, matchnames)
 
                 # adjust the partslice for the interval offset
                 # and replace in the slice list
-                filestart = partslice1.start-interval1[0]
-                filestop = partslice1.stop-interval1[0]
-                fileslice = slice(filestart,filestop,partslice1.step)
+                filestart = partslice1.start - interval1[0]
+                filestop = partslice1.stop - interval1[0]
+                fileslice = slice(filestart, filestop, partslice1.step)
                 slicelist[npart1] = fileslice
 
                 chunklist = []
                 (firstinterval2, firstslice2) = partition2[0]
                 prevhigh2 = firstinterval2[0]
-                for (interval2,partslice2) in partition2:
+                for (interval2, partslice2) in partition2:
 
                     # If the previous interval high is less than
                     # the current interval low value, interpose
                     # missing data.
                     low = interval2[0]
-                    if prevhigh2<low:
-                        missing_interval = (prevhigh2,low)
-                        missing_slice = sliceIntersect(slice1, missing_interval)
+                    if prevhigh2 < low:
+                        missing_interval = (prevhigh2, low)
+                        missing_slice = sliceIntersect(
+                            slice1, missing_interval)
                         if missing_slice is not None:
                             slicelist[npart2] = missing_slice
-                            chunklist.append((None,copy.copy(slicelist)))
+                            chunklist.append((None, copy.copy(slicelist)))
                     prevhigh2 = interval2[1]
 
                     # generate the filename
                     matchnames = self.genMatch(axis2, interval2, matchnames)
                     filename = self.getFilePath(matchnames, template)
 
-                    filestart = partslice2.start-interval2[0]
-                    filestop = partslice2.stop-interval2[0]
-                    fileslice = slice(filestart,filestop,partslice2.step)
+                    filestart = partslice2.start - interval2[0]
+                    filestop = partslice2.stop - interval2[0]
+                    fileslice = slice(filestart, filestop, partslice2.step)
                     slicelist[npart2] = fileslice
 
-                    chunklist.append((filename,copy.copy(slicelist)))
+                    chunklist.append((filename, copy.copy(slicelist)))
 
                 resultlist.append(chunklist)
 
-            result = (2,(npart1,npart2),resultlist)
+            result = (2, (npart1, npart2), resultlist)
 
         return result
 
-    def expertSlice (self, initslist):
+    def expertSlice(self, initslist):
 
         # Handle negative slices
-        revlist = []                    # Slices to apply to result if reversals needed
+        revlist = []
+            # Slices to apply to result if reversals needed
         slist = []                      # Slices with positive strides
         haveReversals = 0               # True iff result array needs reversing
-        i=0
+        i = 0
         for s in initslist:
-            if s.step<0:
+            if s.step < 0:
                 axislen = self.shape[i]
-                slist.append(reverseSlice(s,axislen))
-                revlist.append(slice(None,None,-1))
+                slist.append(reverseSlice(s, axislen))
+                revlist.append(slice(None, None, -1))
                 haveReversals = 1
             else:
                 slist.append(s)
-                revlist.append(slice(None,None,1))
+                revlist.append(slice(None, None, 1))
             i += 1
 
         # This does most of the work
@@ -497,13 +505,13 @@ class DatasetVariable(AbstractVariable):
 
         # If no intersection, return an 'empty' array.
         if partitionSlices is None:
-            return numpy.ma.zeros((0,),self._numericType_)
+            return numpy.ma.zeros((0,), self._numericType_)
 
         # Handle rank-0 variables separately
         if self.rank() == 0:
             filename, dumlist = partitionSlices
 
-            f = self.parent.openFile(filename,'r')
+            f = self.parent.openFile(filename, 'r')
             try:
                 var = f.variables[self.name_in_file]
                 result = var.getValue()
@@ -512,32 +520,36 @@ class DatasetVariable(AbstractVariable):
             return result
 
         # If no partitioned axes, just read the data
-        if npart==0:
+        if npart == 0:
             filename, slicelist = partitionSlices
 
-            f = self.parent.openFile(filename,'r')
+            f = self.parent.openFile(filename, 'r')
             try:
                 var = f.variables[self.name_in_file]
-                if fci==None:
-                    result = self._returnArray(apply(var.getitem,tuple(slicelist)),0)
+                if fci is None:
+                    result = self._returnArray(
+                        var.getitem(*tuple(slicelist)),
+                        0)
                 else:
                     # If there's a forecast axis, the file doesn't know about it so
                     # don't use it in slicing data out of the file.
-                    result = self._returnArray( apply( var.getitem, \
-                                   tuple( slicelist[0:fci]+slicelist[fci+1:] ) ), \
-                                                0 )
+                    result = self._returnArray(
+                        var.getitem(
+                            *tuple(slicelist[0:fci] + slicelist[fci + 1:])),
+                                                0)
                     # But the result still needs an index in the forecast direction,
-                    # which is simple to do because there is only one forecast per file:
-                    result.resize( map(lenSlice,slicelist) )
+                    # which is simple to do because there is only one forecast
+                    # per file:
+                    result.resize(map(lenSlice, slicelist))
 
             finally:
                 f.close()
             sh = result.shape
             if 0 in sh:
-                raise CDMSError, IndexError + 'Coordinates out of Domain'
+                raise CDMSError(IndexError + 'Coordinates out of Domain')
 
         # If one partitioned axes:
-        elif npart==1:
+        elif npart == 1:
 
             npart1 = idims[0]
             resultlist = []
@@ -546,44 +558,47 @@ class DatasetVariable(AbstractVariable):
                 # If the slice is missing, interpose missing data
                 if filename is None:
                     shapelist = map(lenSlice, slicelist)
-                    chunk = numpy.ma.zeros(tuple(shapelist),self._numericType_)
+                    chunk = numpy.ma.zeros(
+                        tuple(shapelist),
+                        self._numericType_)
                     chunk[...] = numpy.ma.masked
 
                 # else read the data and close the file
                 else:
-                    f = self.parent.openFile(filename,'r')
+                    f = self.parent.openFile(filename, 'r')
                     try:
                         var = f.variables[self.name_in_file]
-                        if fci==None:
-                            chunk = apply(var.getitem,tuple(slicelist))
+                        if fci is None:
+                            chunk = var.getitem(*tuple(slicelist))
                         else:
                             # If there's a forecast axis, the file doesn't know about it so
                             # don't use it in slicing data out of the file.
-                            chunk = apply( var.getitem, \
-                                           tuple( slicelist[0:fci]+slicelist[fci+1:] ) )
+                            chunk = var.getitem(
+                                *tuple(slicelist[0:fci] + slicelist[fci + 1:]))
                             # But the chunk still needs an index in the forecast direction,
-                            # which is simple to do because there is only one forecast per file:
-                            chunk.resize( map(lenSlice,slicelist) )
+                            # which is simple to do because there is only one
+                            # forecast per file:
+                            chunk.resize(map(lenSlice, slicelist))
 
                     finally:
                         f.close()
                     sh = chunk.shape
                     if 0 in sh:
-                        raise CDMSError, 'Coordinates out of Domain'
+                        raise CDMSError('Coordinates out of Domain')
 
-                resultlist.append(self._returnArray(chunk,0))
+                resultlist.append(self._returnArray(chunk, 0))
 
             # Combine the chunks into a single array
             # Note: This works because slicelist is the same length
             # as the domain, and var.getitem returns a chunk
             # with singleton dimensions included. This means that
             # npart1 corresponds to the correct dimension of chunk.
-            result = numpy.ma.concatenate(resultlist,axis=npart1)
+            result = numpy.ma.concatenate(resultlist, axis=npart1)
             for chunk in resultlist:
                 del(chunk)
 
         # If two partitioned axes, 2-D version of previous case
-        if npart==2:
+        if npart == 2:
             npart1, npart2 = idims
 
             resultlist = []
@@ -594,42 +609,45 @@ class DatasetVariable(AbstractVariable):
                     # If the slice is missing, interpose missing data
                     if filename is None:
                         shapelist = map(lenSlice, slicelist)
-                        chunk = numpy.ma.zeros(tuple(shapelist),self._numericType_)
+                        chunk = numpy.ma.zeros(
+                            tuple(shapelist),
+                            self._numericType_)
                         chunk[...] = numpy.ma.masked
 
                     # else read the data and close the file
                     else:
-                        f = self.parent.openFile(filename,'r')
+                        f = self.parent.openFile(filename, 'r')
                         try:
                             var = f.variables[self.name_in_file]
-                            if fci==None:
-                                chunk = apply(var.getitem,tuple(slicelist))
+                            if fci is None:
+                                chunk = var.getitem(*tuple(slicelist))
                             else:
                                 # If there's a forecast axis, the file doesn't know about it so
                                 # don't use it in slicing data out of the file.
-                                chunk = apply( var.getitem, \
-                                               tuple( slicelist[0:fci]+slicelist[fci+1:] ) )
+                                chunk = var.getitem(
+                                    *tuple(slicelist[0:fci] + slicelist[fci + 1:]))
                                 # But the chunk still needs an index in the forecast direction,
-                                # which is simple to do because there is only one forecast per file:
-                                chunk.resize( map(lenSlice,slicelist) )
+                                # which is simple to do because there is only
+                                # one forecast per file:
+                                chunk.resize(map(lenSlice, slicelist))
 
                         finally:
                             f.close()
                         sh = chunk.shape
                         if 0 in sh:
-                            raise CDMSError, 'Coordinates out of Domain'
-                    chunklist.append(self._returnArray(chunk,0))
+                            raise CDMSError('Coordinates out of Domain')
+                    chunklist.append(self._returnArray(chunk, 0))
 
                 # Note: This works because slicelist is the same length
                 # as the domain, and var.getitem returns a chunk
                 # with singleton dimensions included. This means that
                 # npart1 corresponds to the correct dimension of chunk.
-                bigchunk = numpy.ma.concatenate(chunklist,axis=npart2)
+                bigchunk = numpy.ma.concatenate(chunklist, axis=npart2)
                 for chunk in chunklist:
                     del(chunk)
                 resultlist.append(bigchunk)
 
-            result = numpy.ma.concatenate(resultlist,axis=npart1)
+            result = numpy.ma.concatenate(resultlist, axis=npart1)
             for bigchunk in resultlist:
                 del(bigchunk)
 
@@ -640,16 +658,15 @@ class DatasetVariable(AbstractVariable):
 
         return result
 
+    shape = property(_getShape, None)
+# shape = _getShape
+    dtype = property(_getdtype, None)
 
-    shape = property(_getShape,None)
-##     shape = _getShape
-    dtype = property(_getdtype,None)
-    
-## PropertiedClasses.set_property (DatasetVariable, 'shape', 
-##                                   DatasetVariable._getShape, nowrite=1,
-##                                   nodelete=1)
-## PropertiedClasses.set_property (DatasetVariable, 'dtype', 
-##                                   DatasetVariable._getdtype, nowrite=1,
-##                                   nodelete=1)
+# PropertiedClasses.set_property (DatasetVariable, 'shape',
+# DatasetVariable._getShape, nowrite=1,
+# nodelete=1)
+# PropertiedClasses.set_property (DatasetVariable, 'dtype',
+# DatasetVariable._getdtype, nowrite=1,
+# nodelete=1)
 
-## internattr.add_internal_attribute(DatasetVariable, 'domain')
+# internattr.add_internal_attribute(DatasetVariable, 'domain')
diff --git a/Packages/cdms2/Lib/xlink.py b/Packages/cdms2/Lib/xlink.py
index 870b5fafd..6270a7509 100644
--- a/Packages/cdms2/Lib/xlink.py
+++ b/Packages/cdms2/Lib/xlink.py
@@ -1,9 +1,11 @@
 """
 CDMS Xlink objects - pointers to other objects
 """
+
+
 class Xlink(CdmsObj):
-    def __init__(self,xlinkNode=None):
-        assert xlinkNode is None or xlinkNode.tag=='xlink',\
-               'Node is not a link node'
-        CdmsObj.__init__(self,xlinkNode)
 
+    def __init__(self, xlinkNode=None):
+        assert xlinkNode is None or xlinkNode.tag == 'xlink',\
+            'Node is not a link node'
+        CdmsObj.__init__(self, xlinkNode)
-- 
GitLab


From 751866f6a32228f8a32432cb65af03cf3427a791 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 27 Apr 2016 09:36:48 -0700
Subject: [PATCH 037/196] begining of adressing flake8 but giving up

---
 Packages/cdms2/Lib/restApi.py       | 126 ++++------------------------
 Packages/cdms2/Lib/selectors.py     |   4 +-
 Packages/cdms2/Lib/slabinterface.py |   3 +-
 Packages/cdms2/Lib/tvariable.py     |  26 +++---
 Packages/cdms2/Lib/typeconv.py      |   2 +-
 Packages/cdms2/Lib/variable.py      |  15 +---
 Packages/cdms2/Lib/xlink.py         |  11 ---
 7 files changed, 32 insertions(+), 155 deletions(-)
 delete mode 100644 Packages/cdms2/Lib/xlink.py

diff --git a/Packages/cdms2/Lib/restApi.py b/Packages/cdms2/Lib/restApi.py
index 0ee17424d..159d50112 100644
--- a/Packages/cdms2/Lib/restApi.py
+++ b/Packages/cdms2/Lib/restApi.py
@@ -1,4 +1,3 @@
-import cdms2
 import urllib2
 import xml.etree.ElementTree
 try:
@@ -173,7 +172,7 @@ class esgfConnection(object):
         return val
 
     def __setitem__(self, key, value):
-        if not key in self.params.keys():
+        if key not in self.params.keys():
             raise self.EsgfObjectException(
                 "Invalid key: %s, valid keys are: %s" %
                 (repr(key), repr(self.params.keys())))
@@ -183,7 +182,7 @@ class esgfConnection(object):
     def _search(self, search="", searchType=None, stringType=False):
         if searchType is None:
             searchType = self.defaultSearchType
-        if not searchType in self.validSearchTypes:
+        if searchType not in self.validSearchTypes:
             raise self.EsgfObjectException(
                 "Valid Search types are: %s" %
                 repr(self.validSearchTypes))
@@ -226,14 +225,8 @@ class esgfConnection(object):
         # params[k]=self[k]
 
         for k in keys.keys():
-            if k == "stringType":
-                stringType = keys[k]
+            if k in ["stringType", "type"]:
                 continue
-            elif k == "type":
-                continue
-            # elif not k in self.searchableKeys:
-            # raise self.EsgfObjectException("Invalid key: %s, valid keys are:
-            # %s" % (repr(k),repr(self.params.keys())))
             if keys[k] is not None:
                 params[k] = keys[k]
 
@@ -309,23 +302,18 @@ class esgfConnection(object):
         resps = self.request(**keys)
         stringType = keys.get("stringType", False)
         if stringType:
-            return resp
+            return resps
         datasets = []
         for resp in resps:
             for r in resp[:]:
                 if r.tag == "result":
                     # Ok let's go thru these datasets
                     for d in r[:]:
-                        # print
-                        # "************************************************"
                         tmpkeys = {}
                         for f in d[:]:
                             k = f.get("name")
                             tmpkeys[k] = self.extractTag(f)
                         if tmpkeys["type"] == "Dataset":
-                            datasetid = tmpkeys["id"]
-                            # print datasetid,self.restPath
-                            # print "KEYS FOR DATASET",keys.keys()
                             datasets.append(
                                 esgfDataset(host=self.host,
                                             port=self.port,
@@ -364,7 +352,7 @@ class esgfDataset(esgfConnection):
                     tmp.replace(")s", ")"))
             elif "project" in keys and keys["project"] == "cmip5":
                 self.datasetids = genutil.StringConstructor(
-                    "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")
+                    "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")  # noqa
             else:
                 self.datasetids = None
         if isinstance(datasetids, genutil.StringConstructor):
@@ -393,38 +381,16 @@ class esgfDataset(esgfConnection):
         self.keys = self.params.keys
         self.items = self.params.items
         self.values = self.params.values
-        # self.id=self["id"]
         self.params["limit"] = limit
         self.params["offset"] = offset
         self.mapping = mapping
-        # print "SEARCHING DS:",originalKeys
         self.resp = None
         self.cacheTime = None
-#        self.search()
-#        self.remap()
-
-        # Ok now we need to "map" this according to the user wishes
-
-    # def mappedItems():
-    # mapped=[]
-    # mapppoint=self.mapped
-    # for k in self.mapping.keys():
-    # keys=[]
-    # level=[k,mappoint.keys()]
-    # mappoint
+
     def _extractFiles(self, resp, **inKeys):
         # We need to stick in there the bit from Luca to fill in the matching
         # key from facet for now it's empty
         files = []
-        skipped = [
-            "type",
-            "title",
-            "timestamp",
-            "service",
-            "id",
-            "score",
-            "file_url",
-            "service_type"]
         for r in resp[:]:
             if r.tag == "result":
                 for d in r[:][:]:
@@ -433,66 +399,6 @@ class esgfDataset(esgfConnection):
                         k = f.get("name")
                         keys[k] = self.extractTag(f)
                     if keys["type"] == "File":
-                        # if self["id"]=="obs4MIPs.NASA-JPL.AIRS.mon":
-                        # verbose=True
-                        # else:
-                        # verbose=False
-                        # verbose=True
-                        # if verbose: print "OK",keys["variable"],keys["file_id"],self["id"]
-                        # if verbose: print "FILEIDS:",self.fileids
-                        # if verbose: print "Fileids:",self.fileids.template
-                        # if verbose: print "keys:",keys
-                        # if self.fileids is not None:
-                        # try:
-                        # if verbose: print "file:",keys["file_id"],self.fileids.template
-                        # k2 = self.fileids.reverse(keys["file_id"])
-                        # if verbose: print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@",k2
-                        # for k in k2.keys():
-                        # keys[k]=k2[k]
-                        # except:
-                        # if verbose: print "Failed:",ids[i].text,self.fileids.template
-                        # pass
-                        # if verbose: print "KEYS FOR FILE:",keys.keys()
-                        # if verbose: print "INKEYS:",inKeys.keys()
-                        # matched = True
-                        # matchWithKeys = {}
-                        # for k in self.keys():
-                        # if k in self.originalKeys.keys():
-                        # matchWithKeys[k]=self.originalKeys[k]
-                        # else:
-                        # matchWithKeys[k]=self[k]
-                        # for s in skipped:
-                        # try:
-                        # matchWithKeys.pop(s)
-                        # except:
-                        # pass
-                        # for k in inKeys.keys():
-                        # matchWithKeys[k]=inKeys[k]
-                        # if verbose: print "matching:",matchWithKeys.keys()
-                        # for k in keys.keys():
-                        # if k in matchWithKeys.keys():
-                        # if verbose: print "Testing:",k,keys[k]
-                        # v = matchWithKeys[k]
-                        # if isinstance(v,(str,int,float)):
-                        # if verbose: print "\tComparing with:",v
-                        # if v != keys[k]:
-                        # matched = False
-                        # if verbose: print "\t\tNOPE"
-                        # break
-                        # elif isinstance(v,list):
-                        # if verbose: print "\tComparing with (and %i more):%s"%(len(v),v[0]),v
-                        # if not keys[k] in v:
-                        # matched = False
-                        # if verbose: print "\t\tNOPE"
-                        # break
-                        # else:
-                        # print "\twould compare %s with type: %s if I knew how to" % (str(v),type(v))
-                        # if verbose: print keys["file_id"],matched
-                        # if matched :
-                        # for k in self.keys():
-                        # if not k in keys.keys():
-                        # keys[k]=self[k]
-                        # print "KEYS:",keys
                         files.append(esgfFile(**keys))
         return files
 
@@ -514,7 +420,7 @@ class esgfDataset(esgfConnection):
         if os.path.isdir(target):
             target = os.path.join(target, "esgfDatasetsCache.pckl")
         if os.path.exists(target):
-            f = open(source)
+            f = open(target)
             # dict=eval(bz2.decompress(f.read()))
             dict = eval(f.read())
             f.close()
@@ -560,9 +466,9 @@ class esgfDataset(esgfConnection):
         stringType = keys.get("stringType", False)
         keys.update(self.originalKeys)
         st = ""
-        if not "limit" in keys:
+        if "limit" not in keys:
             keys["limit"] = [self["limit"]]
-        if not "offset" in keys:
+        if "offset" not in keys:
             keys["offset"] = [self["offset"]]
         for k in keys:
             if k in ["searchString", "stringType", ]:
@@ -615,7 +521,8 @@ class esgfFiles(object):
         self.setMapping(mapping)
         self.remap()
         self.projects_dict = {
-            "CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)"}
+            "CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)"  # noqa
+            }
 
     def __getitem__(self, item):
         if isinstance(item, int):
@@ -654,7 +561,7 @@ class esgfFiles(object):
                 self.mapping = self.datasetids
             else:
                 for k in self.parent.keys():
-                    if not k in ["limit", "offset", "text"]:
+                    if k not in ["limit", "offset", "text"]:
                         self.mapping += "%%(%s)" % k
         else:
             self.mapping = mapping
@@ -731,24 +638,21 @@ class esgfFiles(object):
                     cont = f[k]
                     if not isinstance(cont, (str, int, float)):
                         break
-                    if not cont in mappoint.keys():
+                    if cont not in mappoint.keys():
                         mappoint[cont] = {}
                 elif k in self.parent.keys():
                     # if verbose: print tabs,k,f[k]
                     nok += 1
                     cont = self[k]
-                    if not cont in mappoint.keys():
+                    if cont not in mappoint.keys():
                         mappoint[cont] = {}
                 elif isinstance(self.fileids, genutil.StringConstructor):
                     try:
                         mapid = self.fileids.reverse(self.parent.id)
-                        # if verbose:
-                        # print "MAPID:",k,mapid
                         if k in mapid.keys():
-                        # if verbose: print tabs,k,mapid[k]
                             nok += 1
                             cont = mapid[k]
-                            if not cont in mappoint.keys():
+                            if cont not in mappoint.keys():
                                 mappoint[cont] = {}
                     except:
                         break
diff --git a/Packages/cdms2/Lib/selectors.py b/Packages/cdms2/Lib/selectors.py
index 2a35705d1..9ec6c6721 100644
--- a/Packages/cdms2/Lib/selectors.py
+++ b/Packages/cdms2/Lib/selectors.py
@@ -1,6 +1,5 @@
 
 """Classes to support easy selection of climate data"""
-import cdtime
 from .axis import axisMatches
 from .error import CDMSError
 from .grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
@@ -203,7 +202,7 @@ class Selector:
            grid is not None or \
            raw != 0 or \
            result is variable:
-     # result is variable when there are no components, for example.
+            # result is variable when there are no components, for example.
             return result.subRegion(squeeze=squeeze, order=order, grid=grid,
                                     raw=raw)
         else:
@@ -371,7 +370,6 @@ class positionalComponent (SelectorComponent):
 
     def specify(self, slab, axes, specifications, confined_by, aux):
         "Find the next unconfined axis and confine it."
-        n = 0
         for i in range(len(axes)):
             if confined_by[i] is None:
                 specifications[i] = self.v
diff --git a/Packages/cdms2/Lib/slabinterface.py b/Packages/cdms2/Lib/slabinterface.py
index f12738ee7..a0fc8910e 100644
--- a/Packages/cdms2/Lib/slabinterface.py
+++ b/Packages/cdms2/Lib/slabinterface.py
@@ -95,7 +95,7 @@ class Slab:
         a = self.getAxis(dim)
         result = []
         for x in std_axis_attributes + a.attributes.keys():
-            if not x in result:
+            if x not in result:
                 result.append(x)
         return result
 
@@ -192,7 +192,6 @@ class Slab:
 def cdms_bounds2cu_bounds(b):
     "Bounds are  len(v) by 2 in cdms but len(v)+1 in cu"
     cub = numpy.ma.zeros(len(b) + 1, numpy.float32)
-    b1 = b.astype(numpy.float32)
     if len(b) > 1:
         if (b[0, 0] < b[0, 1]) == (b[0, 0] < b[-1, 0]):
             cub[0] = b[0, 0]
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 9ebcca9c6..0a2b3cdd5 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -43,14 +43,14 @@ def fromJSON(jsn):
                         dtype=a["_dtype"]),
             id=a["id"])
         for k, v in a.iteritems():
-            if not k in ["_values", "id", "_dtype"]:
+            if k not in ["_values", "id", "_dtype"]:
                 setattr(ax, k, v)
         axes.append(ax)
     # Now prep the variable
     V = createVariable(D["_values"], id=D["id"], typecode=D["_dtype"])
     V.setAxisList(axes)
     for k, v in D.iteritems():
-        if not k in ["id", "_values", "_axes", "_grid", "_fill_value", "_dtype", ]:
+        if k not in ["id", "_values", "_axes", "_grid", "_fill_value", "_dtype", ]:
             setattr(V, k, v)
     V.set_fill_value(D["_fill_value"])
     return V
@@ -103,12 +103,10 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         try:
             for nm, val in obj.__dict__.items():
                 if nm[0] == '_':
-# print nm
                     pass
-# self.__dict__[nm]=val
                 else:
                     setattr(self, nm, val)
-        except Exception as err:
+        except Exception:
             pass
         id = getattr(self, 'id', None)
         if id is None:
@@ -193,14 +191,12 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                     grid = grid.reconcile(
                         axes)  # Make sure grid and axes are consistent
 
-        ncopy = (copy != 0)
-
         # Initialize the geometry
         if grid is not None:
             copyaxes = 0                  # Otherwise grid axes won't match domain.
         if axes is not None:
             self.initDomain(axes, copyaxes=copyaxes)
-                            # Note: clobbers the grid, so set the grid after.
+        # Note: clobbers the grid, so set the grid after.
         if grid is not None:
             self.setGrid(grid)
 
@@ -277,7 +273,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         if mask is None:
             try:
                 mask = data.mask
-            except Exception as err:
+            except Exception:
                 mask = numpy.ma.nomask
 
         # Handle the case where ar[i:j] returns a single masked value
@@ -359,7 +355,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
     def getDomain(self):
         for i in range(self.rank()):
             if self.__domain[i] is None:
-                junk = self.getAxis(i)  # will force a fill in
+                self.getAxis(i)  # will force a fill in
         return self.__domain
 
     def getAxis(self, n):
@@ -442,7 +438,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         maresult = numpy.ma.MaskedArray.astype(self, tc)
         return TransientVariable(
             maresult, copy=0, axes=self.getAxisList(), fill_value=self.fill_value,
-                                 attributes=self.attributes, id=self.id, grid=self.getGrid())
+            attributes=self.attributes, id=self.id, grid=self.getGrid())
 
     def setMaskFromGridMask(self, mask, gridindices):
         """Set the mask for self, given a grid mask and the variable domain
@@ -626,12 +622,11 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         sphereRadius: radius of the earth
         maxElev: maximum elevation for representation on the sphere
         """
-        from . import mvSphereMesh
         from . import mvVTKSGWriter
         from . import mvVsWriter
         try:
             # required by mvVsWriter
-            import tables
+            import tables  # noqa
         except:
             # fall back
             format = 'VTK'
@@ -739,8 +734,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                     # given direction, a 1 represents a layer of
                     # thickness ghostWidth on the high index side,
                     # -1 on the low index side.
-                    winId = tuple([0 for i in range(dim)]
-                                  + [drect] +
+                    winId = tuple([0 for i in range(dim)] + [drect] +
                                   [0 for i in range(dim + 1, ndims)])
 
                     slce = slice(0, ghostWidth)
@@ -832,7 +826,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         """
         ndims = len(self.shape)
 
-        slab = [ slice(0, None) for i in range(dim) ] \
+        slab = [slice(0, None) for i in range(dim)] \
             + [slce] + \
             [slice(0, None) for i in range(dim + 1, ndims)]
         return tuple(slab)
diff --git a/Packages/cdms2/Lib/typeconv.py b/Packages/cdms2/Lib/typeconv.py
index 541785877..e63ad64d3 100644
--- a/Packages/cdms2/Lib/typeconv.py
+++ b/Packages/cdms2/Lib/typeconv.py
@@ -1,6 +1,6 @@
+import numpy as np
 __all__ = ['oldtype2dtype', 'convtypecode', 'convtypecode2', 'oldtypecodes']
 
-import numpy as np
 
 oldtype2dtype = {'1': np.dtype(np.byte),
                  's': np.dtype(np.short),
diff --git a/Packages/cdms2/Lib/variable.py b/Packages/cdms2/Lib/variable.py
index f271acc1d..c96dd5e21 100644
--- a/Packages/cdms2/Lib/variable.py
+++ b/Packages/cdms2/Lib/variable.py
@@ -3,17 +3,13 @@
 """
 DatasetVariable: Dataset-based variables
 """
-from cdms2 import Cdunif
 import numpy
 from . import cdmsNode
 import cdtime
 import copy
-import os
-import sys
-from . import cdmsobj
-from .cdmsobj import CdmsObj, getPathFromTemplate, Max32int
+from .cdmsobj import getPathFromTemplate, Max32int
 from .avariable import AbstractVariable
-from .sliceut import *
+from .sliceut import *  # noqa
 from .error import CDMSError
 
 InvalidGridElement = "Grid domain elements are not yet implemented: "
@@ -335,10 +331,8 @@ class DatasetVariable(AbstractVariable):
             if hasattr(axis, 'partition'):
                 npart = npart + 1
                 if npart == 1:
-                    part1 = axis
                     npart1 = ndim
                 elif npart == 2:
-                    part2 = axis
                     npart2 = ndim
                 else:
                     raise CDMSError(TooManyPartitions + variable.id)
@@ -476,7 +470,7 @@ class DatasetVariable(AbstractVariable):
 
         # Handle negative slices
         revlist = []
-            # Slices to apply to result if reversals needed
+        # Slices to apply to result if reversals needed
         slist = []                      # Slices with positive strides
         haveReversals = 0               # True iff result array needs reversing
         i = 0
@@ -500,7 +494,6 @@ class DatasetVariable(AbstractVariable):
         for i in range(len(self.domain)):
             if self.domain[i][0].isForecast():
                 fci = i
-                fcv = initslist[i].start
                 break
 
         # If no intersection, return an 'empty' array.
@@ -536,7 +529,7 @@ class DatasetVariable(AbstractVariable):
                     result = self._returnArray(
                         var.getitem(
                             *tuple(slicelist[0:fci] + slicelist[fci + 1:])),
-                                                0)
+                        0)
                     # But the result still needs an index in the forecast direction,
                     # which is simple to do because there is only one forecast
                     # per file:
diff --git a/Packages/cdms2/Lib/xlink.py b/Packages/cdms2/Lib/xlink.py
deleted file mode 100644
index 6270a7509..000000000
--- a/Packages/cdms2/Lib/xlink.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""
-CDMS Xlink objects - pointers to other objects
-"""
-
-
-class Xlink(CdmsObj):
-
-    def __init__(self, xlinkNode=None):
-        assert xlinkNode is None or xlinkNode.tag == 'xlink',\
-            'Node is not a link node'
-        CdmsObj.__init__(self, xlinkNode)
-- 
GitLab


From 995fa79e87d50986eb3bc5a976d0c47d3186239a Mon Sep 17 00:00:00 2001
From: Sam Fries <samuelbfries@gmail.com>
Date: Thu, 28 Apr 2016 10:08:15 -0700
Subject: [PATCH 038/196] Update VTKPlots.py

---
 Packages/vcs/vcs/VTKPlots.py | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 4d870cd15..8cfe83a28 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -755,7 +755,10 @@ class VTKVCSBackend(object):
                 plot.onClosing(cell)
 
     def plotContinents(self, wc, projection, wrap, vp, priority, **kargs):
-        contData = vcs2vtk.prepContinents(self.canvas._continentspath())
+        continents_path = self.canvas._continentspath()
+        if continents_path is None:
+            return (None, 1, 1)
+        contData = vcs2vtk.prepContinents(continents_path)
         contMapper = vtk.vtkPolyDataMapper()
         contMapper.SetInputData(contData)
         contActor = vtk.vtkActor()
-- 
GitLab


From 6ad67a3f9dcfa3b10cb1579b9a17087a1fbf55fe Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Thu, 28 Apr 2016 14:14:37 -0700
Subject: [PATCH 039/196] Added test

---
 testing/vcs/CMakeLists.txt            |  8 +++++-
 testing/vcs/test_vcs_no_continents.py | 38 +++++++++++++++++++++++++++
 2 files changed, 45 insertions(+), 1 deletion(-)
 create mode 100644 testing/vcs/test_vcs_no_continents.py

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index a5faf6e24..7024dd66a 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1016,13 +1016,19 @@ cdat_add_test(test_vcs_init_open_sizing
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_init_open_sizing.py
 )
-# Rename baseline
+## Rename baseline
 cdat_add_test(test_vcs_matplotlib_colormap
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_matplotlib_colormap.py
   ${BASELINE_DIR}/test_vcs_matplotlib_colormap.png
 )
 
+cdat_add_test(test_vcs_no_continents
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_no_continents.py
+  ${BASELINE_DIR}/test_vcs_no_continents.png
+)
+
 
 add_subdirectory(vtk_ui)
 add_subdirectory(editors)
diff --git a/testing/vcs/test_vcs_no_continents.py b/testing/vcs/test_vcs_no_continents.py
new file mode 100644
index 000000000..03630d457
--- /dev/null
+++ b/testing/vcs/test_vcs_no_continents.py
@@ -0,0 +1,38 @@
+import cdms2
+import os
+import sys
+import vcs
+
+# Load the clt data:
+dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = dataFile("clt")
+clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
+          time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
+
+# Initialize canvas:
+canvas = vcs.init()
+canvas.setantialiasing(0)
+canvas.setbgoutputdimensions(1200,1091,units="pixels")
+canvas.drawlogooff()
+
+t1 = vcs.createtemplate()
+t1.scale(.5, "y")
+t1.move(-.15, "y")
+t2 = vcs.createtemplate(source=t1.name)
+t2.move(.5, 'y')
+
+canvas.plot(clt, t1, continents=0, bg=True)
+canvas.plot(clt, t2, continents=1, bg=True)
+
+# Load the image testing module:
+testingDir = os.path.join(os.path.dirname(__file__), "..")
+sys.path.append(testingDir)
+import checkimage
+
+# Create the test image and compare:
+baseline = sys.argv[1]
+testFile = "test_vcs_no_continents.png"
+canvas.png(testFile)
+ret = checkimage.check_result_image(testFile, baseline,
+                                    checkimage.defaultThreshold)
+sys.exit(ret)
-- 
GitLab


From 9e3f9eeed01503090890e4ee9f3b9a3e52768a4d Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 9 May 2016 15:42:19 -0700
Subject: [PATCH 040/196] Fixed failing baselines

---
 Packages/vcs/vcs/boxfill.py | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/Packages/vcs/vcs/boxfill.py b/Packages/vcs/vcs/boxfill.py
index e17145437..4003606c9 100755
--- a/Packages/vcs/vcs/boxfill.py
+++ b/Packages/vcs/vcs/boxfill.py
@@ -767,8 +767,8 @@ class Gfb(object):
         if dx == 0:
             high_end += .00001
             return [low_end, high_end]
-
-        contourLevels = numpy.arange(low_end, high_end + dx, dx)
+        float_epsilon = numpy.finfo(numpy.float32).eps
+        contourLevels = numpy.arange(low_end, high_end + float_epsilon, dx)
 
         return contourLevels
 
@@ -788,9 +788,10 @@ class Gfb(object):
                 return vcs.mklabels(scale)
             else:
                 # Create our own scale
-                dx = (self.level_2 - self.level_1) / float(len(scale))
+                dx = (self.level_2 - self.level_1) / float(len(scale) - 1)
                 real_values = [self.level_1, self.level_2]
-                levels = numpy.arange(levels[0], levels[-1] + dx, dx)
+                float_epsilon = numpy.finfo(numpy.float32).eps
+                levels = numpy.arange(levels[0], levels[-1] + float_epsilon, dx)
         else:
             real_values = levels
 
-- 
GitLab


From 08d3eba0f2ae9230343f4838ec8db1ab09a67c4b Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Fri, 22 Apr 2016 11:52:46 -0400
Subject: [PATCH 041/196] ENH #1881: Add plot_based_dual_grid option to plot().

Traditionally, we created a point or cell dataset based on the plot requested.
For isofill, isoline and vector we created point datasets, for boxfill and meshfill we
created cell datasets. We keep this behavior for backward compatibility but we
add a parameter plot_based_dual_grid to plot(). If this parameter is missing or it is
True, we have the traditional behavior. If this parameter is False, we create the
dataset that is specified in the file, regardless of the plot requested.
---
 Packages/cdms2/Lib/avariable.py             |  15 +-
 Packages/cdms2/Lib/axis.py                  |  97 +++++--
 Packages/cdms2/Lib/tvariable.py             |   5 +-
 Packages/vcs/vcs/Canvas.py                  |   6 +-
 Packages/vcs/vcs/vcs2vtk.py                 | 274 +++++++-------------
 Packages/vcs/vcs/vcsvtk/boxfillpipeline.py  |   6 +-
 Packages/vcs/vcs/vcsvtk/isofillpipeline.py  |  27 +-
 Packages/vcs/vcs/vcsvtk/isolinepipeline.py  |  32 +--
 Packages/vcs/vcs/vcsvtk/meshfillpipeline.py |  11 +-
 Packages/vcs/vcs/vcsvtk/pipeline2d.py       |  55 +++-
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py   |  33 ++-
 11 files changed, 260 insertions(+), 301 deletions(-)

diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index dd2a2792d..f15d7a069 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -363,6 +363,16 @@ class AbstractVariable(CdmsObj, Slab):
             if axisMatches(self.getAxis(i), axis_spec):
                 return i
         return -1
+    
+    def hasCellData(self):
+        '''
+        If any of the variable's axis has explicit bounds, we have cell data
+        otherwise we have point data.
+        '''
+        for axis in self.getAxisList():
+            if (axis.getExplicitBounds() is not None):
+                return True
+        return False
 
     def getAxisListIndex (self, axes=None, omit=None, order=None):
         """Return a list of indices of axis objects;
@@ -511,7 +521,6 @@ class AbstractVariable(CdmsObj, Slab):
                 
         return result
 
-
     # Get an order string, such as "tzyx"
     def getOrder(self, ids=0):
         """getOrder(ids=0) returns the order string, such as tzyx.
@@ -915,7 +924,9 @@ class AbstractVariable(CdmsObj, Slab):
                 else:
                     delta_beg_wrap_dimvalue = ncyclesrev*cycle
 
-                axis.setBounds(axis.getBounds() - delta_beg_wrap_dimvalue)
+                isGeneric = [False]
+                b = axis.getBounds(isGeneric) - delta_beg_wrap_dimvalue
+                axis.setBounds(b, isGeneric=isGeneric[0])
                 
                 axis[:]= (axis[:] - delta_beg_wrap_dimvalue).astype(axis.typecode())
 
diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 703c0e938..209fbda42 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -105,8 +105,8 @@ def getAutoBounds():
     return _autobounds
 
 # Create a transient axis
-def createAxis(data, bounds=None, id=None, copy=0):
-    return TransientAxis(data, bounds, id, copy=copy)
+def createAxis(data, bounds=None, id=None, copy=0, genericBounds=False):
+    return TransientAxis(data, bounds=bounds, id=id, copy=copy, genericBounds=genericBounds)
 
 # Generate a Gaussian latitude axis, north-to-south
 def createGaussianAxis(nlat):
@@ -959,13 +959,41 @@ class AbstractAxis(CdmsObj):
     def isLinear(self):
         raise CDMSError, MethodNotImplemented
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        isGeneric is a list with one boolean which says if the bounds
+        are read from file (False) or generated (True)
+        '''
         raise CDMSError, MethodNotImplemented
 
-    # Return None if not explicitly defined
     def getExplicitBounds(self):
+        '''
+        Return None if not explicitly defined
+        This is a way to determine if attributes are defined at cell
+        or at point level. If this function returns None attributes are
+        defined at points, otherwise they are defined at cells
+        '''
         raise CDMSError, MethodNotImplemented
 
+    def getBoundsForDualGrid(self, dualGrid):
+        '''
+        dualGrid changes the type of dataset from the current type to the dual.
+        So, if we have a point dataset we switch to a cell dataset and viceversa.
+        '''
+        explicitBounds = self.getExplicitBounds()
+        if (explicitBounds is None):
+            # point data
+            if (dualGrid):
+                return self.getBounds()
+            else:
+                return None
+        else:
+            # cell data
+            if (dualGrid):
+                return None
+            else:
+                return explicitBounds
+
     def setBounds(self, bounds):
         raise CDMSError, MethodNotImplemented
 
@@ -1279,7 +1307,8 @@ class AbstractAxis(CdmsObj):
         The stride k can be positive or negative. Wraparound is
         supported for longitude dimensions or those with a modulus attribute.
         """
-        fullBounds = self.getBounds()
+        isGeneric = [False]
+        fullBounds = self.getBounds(isGeneric)
         _debug=0
         _debugprefix="SS__XX subaxis "
         
@@ -1359,7 +1388,7 @@ class AbstractAxis(CdmsObj):
             else:
                 bounds = None
         
-        newaxis = TransientAxis(data, bounds, id=self.id, copy=1)
+        newaxis = TransientAxis(data, bounds, id=self.id, copy=1, genericBounds=isGeneric[0])
 
         if self.isLatitude(): newaxis.designateLatitude()
         if self.isLongitude(): newaxis.designateLongitude()
@@ -1440,17 +1469,18 @@ class AbstractAxis(CdmsObj):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        b = self.getBounds()
+        isGeneric = [False]
+        b = self.getBounds(isGeneric)
         if copyData==1:
             mycopy = createAxis(copy.copy(self[:]))
         else:
             mycopy = createAxis(self[:])
         mycopy.id = self.id
         try:
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=isGeneric[0])
         except CDMSError:
             b = mycopy.genGenericBounds()
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=False)
         for k, v in self.attributes.items():
            setattr(mycopy, k, v)
         return mycopy
@@ -1570,7 +1600,13 @@ class Axis(AbstractAxis):
         return self._node_.dataRepresent==cdmsNode.CdLinear
 
     # Return the bounds array, or generate a default if autoBounds mode is on
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        If isGeneric is a list with one element, we set its element to True if the
+        bounds were generated and False if bounds were read from the file.
+        '''
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             self.validateBounds(boundsArray)
@@ -1578,6 +1614,8 @@ class Axis(AbstractAxis):
             boundsArray = None
         abopt = getAutoBounds()
         if boundsArray is None and (abopt==1 or (abopt==2 and (self.isLatitude() or self.isLongitude()))) :
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1609,7 +1647,10 @@ class Axis(AbstractAxis):
 # In-memory coordinate axis
 class TransientAxis(AbstractAxis):
     axis_count = 0
-    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0):
+    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0, genericBounds=False):
+        '''
+        genericBounds specify if bounds were generated (True) or read from a file (False)
+        '''
         AbstractAxis.__init__(self, None, None)
         if id is None:
             TransientAxis.axis_count = TransientAxis.axis_count + 1
@@ -1646,7 +1687,8 @@ class TransientAxis(AbstractAxis):
             self._data_ = numpy.array(data)
 
         self._doubledata_ = None
-        self.setBounds(bounds)
+        self._genericBounds_ = genericBounds
+        self.setBounds(bounds, isGeneric=genericBounds)
 
     def __getitem__(self, key):
         return self._data_[key]
@@ -1663,10 +1705,15 @@ class TransientAxis(AbstractAxis):
     def __len__(self):
         return len(self._data_)
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = self._genericBounds_
         if self._bounds_ is not None:
             return copy.copy(self._bounds_)
         elif (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
+            self._genericBounds_ = True
             return self.genGenericBounds()
         else:
             return None
@@ -1675,14 +1722,17 @@ class TransientAxis(AbstractAxis):
         return self._data_
 
     def getExplicitBounds(self):
-        return copy.copy(self._bounds_)
+        if (self._genericBounds_):
+            return None
+        else:
+            return copy.copy(self._bounds_)
 
     # Set bounds. The persistent argument is for compatibility with
     # persistent versions, is ignored. Same for boundsid and index.
     #
     # mf 20010308 - add validate key word, by default do not validate
-    #
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is False if bounds were generated, True if they were read from a file
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if bounds is not None:
             if isinstance(bounds, numpy.ma.MaskedArray):
                 bounds = numpy.ma.filled(bounds)
@@ -1700,9 +1750,11 @@ class TransientAxis(AbstractAxis):
                     bounds2[:,1]=bounds[1::]
                     bounds=bounds2
             self._bounds_ = copy.copy(bounds)
+            self._genericBounds_ = isGeneric
         else:
             if (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
                 self._bounds_ = self.genGenericBounds()
+                self._genericBounds_ = True
             else:
                 self._bounds_ = None
 
@@ -1745,7 +1797,7 @@ class TransientVirtualAxis(TransientAxis):
         "Return true iff coordinate values are implicitly defined."
         return 1
 
-    def setBounds(self, bounds):
+    def setBounds(self, bounds, isGeneric=False):
         "No boundaries on virtual axes"
         self._bounds_ = None
 
@@ -1953,13 +2005,19 @@ class FileAxis(AbstractAxis):
         return 0                        # All file axes are vector representation
 
     # Return the bounds array, or generate a default if autobounds mode is set
-    def getBounds(self):
+    # If isGeneric is a list with one element, we set its element to True if the
+    # bounds were generated and False if bounds were read from the file.
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             boundsArray = self.validateBounds(boundsArray)
         except Exception,err:
             boundsArray = None
         if boundsArray is None and (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1989,7 +2047,8 @@ class FileAxis(AbstractAxis):
     # index in the extended dimension (default is index=0).
     # If the bounds variable is new, use the name boundsid, or 'bounds_<varid>'
     # if unspecified.
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is only used for TransientAxis
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if persistent:
             if index is None:
                 if validate:
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 27cab8156..152875adf 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -384,8 +384,9 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         if n < 0: n = n + self.rank()
         if not isinstance(axis, AbstractAxis):
             raise CDMSError,"copydimension, other not an axis."
-        b = axis.getBounds()
-        mycopy = createAxis(axis[:], b)
+        isGeneric = [False]
+        b = axis.getBounds(isGeneric)
+        mycopy = createAxis(axis[:], b, genericBounds=isGeneric[0])
         mycopy.id = axis.id
         for k, v in axis.attributes.items():
            setattr(mycopy, k, v)
diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index d98270c55..cee91f99d 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -2333,7 +2333,11 @@ Options:::
                                                     # bunch of plots in a row)
         display_name = "__display_123"                # VCS Display plot name (used to prevent duplicate display plots)
         ratio = 1.5|"autot"|"auto"                   # Ratio of height/width for the plot; autot and auto will choose a
-                                                    # "good" ratio for you.
+                                                     # "good" ratio for you.
+        plot_based_dual_grid = True | False          # Plot the actual grid or the dual grid based on what is
+                                                     # needed by the plot: isofill, isoline, vector need
+                                                     # point attributes, boxfill and meshfill need cell attributes
+                                                     # The default is True (if the parameter is not specified).
     Graphics Output in Background Mode:
        bg                 = 0|1   # if ==1, create images in the background
                                                              (Don't display the VCS Canvas)
diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index d6c449b05..86cfcfd0a 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -180,142 +180,48 @@ def handleProjectionEdgeCases(projection, data):
     return data
 
 
-def genGridOnPoints(data1, gm, deep=True, grid=None, geo=None,
-                    data2=None):
-    continents = False
-    projection = vcs.elements["projection"][gm.projection]
-    xm, xM, ym, yM = None, None, None, None
-    useStructuredGrid = True
-
-    data1 = handleProjectionEdgeCases(projection, data1)
-    if data2 is not None:
-        data2 = handleProjectionEdgeCases(projection, data2)
-
-    try:
-        g = data1.getGrid()
-        if grid is None:
-            x = g.getLongitude()[:]
-            y = g.getLatitude()[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-        continents = True
-        wrap = [0, 360]
-        # Ok need unstrctured grid
-        if isinstance(g, cdms2.gengrid.AbstractGenericGrid):
-            useStructuredGrid = False
-    except:
-        # hum no grid that's much easier
-        wrap = None
-        if grid is None:
-            x = data1.getAxis(-1)[:]
-            y = data1.getAxis(-2)[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-
-    if grid is None:
-        if x.ndim == 1:
-            y = y[:, numpy.newaxis] * numpy.ones(x.shape)[numpy.newaxis, :]
-            x = x[numpy.newaxis, :] * numpy.ones(y.shape)
-        x = x.flatten()
-        y = y.flatten()
-        sh = list(x.shape)
-        sh.append(1)
-        x = numpy.reshape(x, sh)
-        y = numpy.reshape(y, sh)
-        # Ok we have our points in 2D let's create unstructured points grid
-        if xm is None:
-            xm = x.min()
-        if xM is None:
-            xM = x.max()
-        if ym is None:
-            ym = y.min()
-        if yM is None:
-            yM = y.max()
-        z = numpy.zeros(x.shape)
-        m3 = numpy.concatenate((x, y), axis=1)
-        m3 = numpy.concatenate((m3, z), axis=1)
-        deep = True
-        pts = vtk.vtkPoints()
-        # Convert nupmy array to vtk ones
-        ppV = numpy_to_vtk_wrapper(m3, deep=deep)
-        pts.SetData(ppV)
-        xm, xM, ym, yM, tmp, tmp2 = pts.GetBounds()
-    else:
-        xm, xM, ym, yM, tmp, tmp2 = grid.GetPoints().GetBounds()
-        vg = grid
-    oldpts = pts
-    if geo is None:
-        bounds = pts.GetBounds()
-        xm, xM, ym, yM = [bounds[0], bounds[1], bounds[2], bounds[3]]
-        # We use zooming feature (gm.datawc) for linear and polar projections.
-        # We use wrapped coordinates for doing the projection
-        # such that parameters like the central meridian are set correctly.
-        if (gm.g_name == 'Gfm'):
-            # axes are not lon/lat for meshfill
-            wc = [gm.datawc_x1, gm.datawc_x2, gm.datawc_y1, gm.datawc_y2]
-        else:
-            wc = vcs.utils.getworldcoordinates(gm,
-                                               data1.getAxis(-1),
-                                               data1.getAxis(-2))
-        geo, geopts = project(pts, projection, getWrappedBounds(
-            wc, [xm, xM, ym, yM], wrap))
-        pts = geopts
-    # Sets the vertices into the grid
-    if grid is None:
-        if useStructuredGrid:
-            vg = vtk.vtkStructuredGrid()
-            vg.SetDimensions(data1.shape[1], data1.shape[0], 1)
-        else:
-            vg = vtk.vtkUnstructuredGrid()
-        vg.SetPoints(oldpts)
-        vg.SetPoints(pts)
-    else:
-        vg = grid
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    vg.GetPointData().SetScalars(scalar)
-    out = {"vtk_backend_grid": vg,
-           "xm": xm,
-           "xM": xM,
-           "ym": ym,
-           "yM": yM,
-           "continents": continents,
-           "wrap": wrap,
-           "geo": geo,
-           "data": data1,
-           "data2": data2
-           }
-    return out
-
-
-# Returns the bounds list for 'axis'. If axis has n elements the
-# bounds list will have n+1 elements
-def getBoundsList(axis):
-    bounds = numpy.zeros(len(axis) + 1)
-    try:
-        axisBounds = axis.getBounds()
+def getBoundsList(axis, hasCellData, dualGrid):
+    '''
+    Returns the bounds list for 'axis'. If axis has n elements the
+    bounds list will have n+1 elements
+    If there are not explicit bounds in the file we return None
+    '''
+    needsCellData = (hasCellData != dualGrid)
+    axisBounds = axis.getBoundsForDualGrid(dualGrid)
+    # we still have to generate bounds for non lon-lat axes, because
+    # the default in axis.py is 2 (generate bounds only for lat/lon axis)
+    # this is used for non lon-lat plots - by default numpy arrays are POINT data
+    if (not axis.isLatitude() and not axis.isLongitude() and needsCellData):
+        axisBounds = axis.genGenericBounds()
+    if (axisBounds is not None):
+        bounds = numpy.zeros(len(axis) + 1)
         if (axis[0] < axis[-1]):
             # axis is increasing
-            bounds[:len(axis)] = axisBounds[:, 0]
-            bounds[len(axis)] = axisBounds[-1, 1]
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
         else:
             # axis is decreasing
-            bounds[:len(axis)] = axisBounds[:, 1]
-            bounds[len(axis)] = axisBounds[-1, 0]
-    except Exception:
-        # No luck we have to generate bounds ourselves
-        bounds[1:-1] = (axis[:-1] + axis[1:]) / 2.
-        bounds[0] = axis[0] - (axis[1] - axis[0]) / 2.
-        bounds[-1] = axis[-1] + (axis[-1] - axis[-2]) / 2.
-    return bounds
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+        return bounds
+    else:
+        return None
 
 
-def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
+def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
+            dualGrid=False):
     continents = False
     wrap = None
     m3 = None
@@ -325,6 +231,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
     projection = vcs.elements["projection"][gm.projection]
 
     data1 = handleProjectionEdgeCases(projection, data1)
+    if data2 is not None:
+        data2 = handleProjectionEdgeCases(projection, data2)
 
     try:  # First try to see if we can get a mesh out of this
         g = data1.getGrid()
@@ -388,6 +296,7 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
         # Ok a simple structured grid is enough
         if grid is None:
             vg = vtk.vtkStructuredGrid()
+        hasCellData = data1.hasCellData()
         if g is not None:
             # Ok we have grid
             continents = True
@@ -406,38 +315,49 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                 lon = data1.getAxis(-1)
                 lat = data1.getAxis(-2)
                 # Ok let's try to get the bounds
-                lon2 = getBoundsList(lon)
-                lat2 = getBoundsList(lat)
+                lon2 = getBoundsList(lon, hasCellData, dualGrid)
+                lat2 = getBoundsList(lat, hasCellData, dualGrid)
+                if (lon2 is not None and lat2 is not None):
+                    lon3 = lon2
+                    lat3 = lat2
+                else:
+                    lon3 = lon
+                    lat3 = lat
+                    cellData = False
                 # Note that m,M is min,max for an increasing list
                 # and max,min for a decreasing list
-                xm = lon2[0]
-                xM = lon2[-1]
-                ym = lat2[0]
-                yM = lat2[-1]
-
-                lat = lat2[:, numpy.newaxis] * \
-                    numpy.ones(lon2.shape)[numpy.newaxis, :]
-                lon = lon2[numpy.newaxis,
-                           :] * numpy.ones(lat2.shape)[:,
-                                                       numpy.newaxis]
+                xm = lon3[0]
+                xM = lon3[-1]
+                ym = lat3[0]
+                yM = lat3[-1]
+
+                lat = lat3[:, numpy.newaxis] * numpy.ones(lon3.shape)[numpy.newaxis, :]
+                lon = lon3[numpy.newaxis, :] * numpy.ones(lat3.shape)[:, numpy.newaxis]
         elif grid is None:
             # No grid info from data, making one up
             data1 = cdms2.asVariable(data1)
             lon = data1.getAxis(-1)
             lat = data1.getAxis(-2)
             # Ok let's try to get the bounds
-            lon2 = getBoundsList(lon)
-            lat2 = getBoundsList(lat)
+            lon2 = getBoundsList(lon, hasCellData, dualGrid)
+            lat2 = getBoundsList(lat, hasCellData, dualGrid)
+            if (lon2 is not None and lat2 is not None):
+                lon3 = lon2
+                lat3 = lat2
+            else:
+                lon3 = lon
+                lat3 = lat
+                cellData = False
             # Note that m,M is min,max for an increasing list
             # and max,min for a decreasing list
-            xm = lon2[0]
-            xM = lon2[-1]
-            ym = lat2[0]
-            yM = lat2[-1]
-            lat = lat2[:, numpy.newaxis] * \
-                numpy.ones(lon2.shape)[numpy.newaxis, :]
-            lon = lon2[numpy.newaxis, :] * \
-                numpy.ones(lat2.shape)[:, numpy.newaxis]
+            xm = lon3[0]
+            xM = lon3[-1]
+            ym = lat3[0]
+            yM = lat3[-1]
+            lat = lat3[:, numpy.newaxis] * \
+                numpy.ones(lon3.shape)[numpy.newaxis, :]
+            lon = lon3[numpy.newaxis, :] * \
+                numpy.ones(lat3.shape)[:, numpy.newaxis]
         if grid is None:
             vg.SetDimensions(lat.shape[1], lat.shape[0], 1)
             lon = numpy.ma.ravel(lon)
@@ -461,15 +381,23 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                     ym = lat.min()
                     yM = lat.max()
 
-    # scalar data
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    gridForScalar = grid if grid else vg
+    # attribute data
+    gridForAttribute = grid if grid else vg
+    if genVectors:
+        attribute = generateVectorArray(data1, data2, gridForAttribute)
+    else:
+        attribute = numpy_to_vtk_wrapper(data1.filled(0.).flat,
+                                         deep=False)
+        attribute.SetName("scalar")
     if cellData:
-        gridForScalar.GetCellData().SetScalars(scalar)
+        attributes = gridForAttribute.GetCellData()
+    else:
+        attributes = gridForAttribute.GetPointData()
+    if genVectors:
+        attributes.SetVectors(attribute)
     else:
-        gridForScalar.GetPointData().SetScalars(scalar)
+        attributes.SetScalars(attribute)
+
     if grid is None:
         # First create the points/vertices (in vcs terms)
         pts = vtk.vtkPoints()
@@ -489,8 +417,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
             # wrapping
             pedigreeId = vtk.vtkIntArray()
             pedigreeId.SetName("PedigreeIds")
-            pedigreeId.SetNumberOfTuples(scalar.GetNumberOfTuples())
-            for i in range(0, scalar.GetNumberOfTuples()):
+            pedigreeId.SetNumberOfTuples(attribute.GetNumberOfTuples())
+            for i in range(0, attribute.GetNumberOfTuples()):
                 pedigreeId.SetValue(i, i)
             if cellData:
                 vg.GetCellData().SetPedigreeIds(pedigreeId)
@@ -533,7 +461,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
            "wrap": wrap,
            "geo": geo,
            "cellData": cellData,
-           "data": data1
+           "data": data1,
+           "data2": data2
            }
     return out
 
@@ -1853,27 +1782,8 @@ def generateVectorArray(data1, data2, vtk_grid):
     w = numpy.concatenate((u, v), axis=1)
     w = numpy.concatenate((w, z), axis=1)
 
-    # HACK The grid returned by vtk2vcs.genGrid is not the same size as the
-    # data array. I'm not sure where the issue is...for now let's just zero-pad
-    # data array so that we can at least test rendering until Charles gets
-    # back from vacation:
-    wLen = len(w)
-    numPts = vtk_grid.GetNumberOfPoints()
-    if wLen != numPts:
-        warnings.warn("!!! Warning during vector plotting: "
-                      "Number of points does not "
-                      "match the number of vectors to be "
-                      "glyphed (%s points vs %s "
-                      "vectors). The vectors will be "
-                      "padded/truncated to match for "
-                      "rendering purposes, but the resulting "
-                      "image should not be "
-                      "trusted." % (numPts, wLen))
-        newShape = (numPts,) + w.shape[1:]
-        w = numpy.ma.resize(w, newShape)
-
     w = numpy_to_vtk_wrapper(w, deep=False)
-    w.SetName("vectors")
+    w.SetName("vector")
     return w
 
 
diff --git a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
index 772652172..1eb39085c 100644
--- a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
@@ -24,6 +24,7 @@ class BoxfillPipeline(Pipeline2D):
         self._contourLabels = None
         self._mappers = None
         self._customBoxfillArgs = {}
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -93,11 +94,6 @@ class BoxfillPipeline(Pipeline2D):
         # Use consecutive colors:
         self._contourColors = range(self._gm.color_1, self._gm.color_2 + 1)
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Special case for custom boxfills:
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index e2ee5360e..8f46aa5bd 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -13,36 +13,11 @@ class IsofillPipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsofillPipeline, self).__init__(gm, context_)
-
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
+        self._needsCellData = False
 
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._vtkPolyDataFilter.Update()
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
 
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index af79a45b8..2d9b66472 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -12,21 +12,7 @@ class IsolinePipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsolinePipeline, self).__init__(gm, context_)
-
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
-
-        data = vcs2vtk.numpy_to_vtk_wrapper(self._data1.filled(0.).flat,
-                                            deep=False)
-        self._vtkDataSet.GetPointData().SetScalars(data)
+        self._needsCellData = False
 
     def _updateContourLevelsAndColors(self):
         """Overrides baseclass implementation."""
@@ -49,20 +35,6 @@ class IsolinePipeline(Pipeline2D):
         # Contour colors:
         self._contourColors = self._gm.linecolors
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
         tmpLevels = []
@@ -164,7 +136,7 @@ class IsolinePipeline(Pipeline2D):
             numLevels = len(l)
 
             cot = vtk.vtkContourFilter()
-            if self._useCellScalars:
+            if self._hasCellData:
                 cot.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
             else:
                 cot.SetInputData(self._vtkDataSet)
diff --git a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
index 90c06718d..7101a4729 100644
--- a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
@@ -15,6 +15,7 @@ class MeshfillPipeline(Pipeline2D):
         super(MeshfillPipeline, self).__init__(gm, context_)
 
         self._patternActors = []
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -25,16 +26,6 @@ class MeshfillPipeline(Pipeline2D):
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        else:
-            p2c = vtk.vtkPointDataToCellData()
-            p2c.SetInputData(self._vtkDataSet)
-            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
-
     def _plotInternal(self):
 
         prepedContours = self._prepContours()
diff --git a/Packages/vcs/vcs/vcsvtk/pipeline2d.py b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
index 4d370db8a..baa2f8915 100644
--- a/Packages/vcs/vcs/vcsvtk/pipeline2d.py
+++ b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
@@ -1,9 +1,10 @@
 from .pipeline import Pipeline
 from .. import vcs2vtk
 
-import vcs
-import numpy
 import fillareautils
+import numpy
+import vcs
+import vtk
 import warnings
 
 
@@ -45,8 +46,10 @@ class IPipeline2D(Pipeline):
         - _useContinents: Whether or not to plot continents.
         - _dataWrapModulo: Wrap modulo as [YMax, XMax], in degrees. 0 means
             'no wrapping'.
-        - _useCellScalars: True if data is applied to cell, false if data is
+        - _hasCellData: True if data is applied to cell, false if data is
             applied to points.
+        - _needsCellData: True if the plot needs cell scalars, false if
+            the plot needs point scalars
         - _scalarRange: The range of _data1 as tuple(float min, float max)
         - _maskedDataMapper: The mapper used to render masked data.
     """
@@ -74,7 +77,8 @@ class IPipeline2D(Pipeline):
         self._colorMap = None
         self._useContinents = None
         self._dataWrapModulo = None
-        self._useCellScalars = None
+        self._hasCellData = None
+        self._needsCellData = None
         self._scalarRange = None
         self._maskedDataMapper = None
 
@@ -82,7 +86,7 @@ class IPipeline2D(Pipeline):
         """Create _data1 and _data2 from _originalData1 and _originalData2."""
         raise NotImplementedError("Missing override.")
 
-    def _updateVTKDataSet(self):
+    def _updateVTKDataSet(self, plotBasedDualGrid):
         """Apply the vcs data to _vtkDataSet, creating it if necessary."""
         raise NotImplementedError("Missing override.")
 
@@ -275,7 +279,8 @@ class Pipeline2D(IPipeline2D):
         self._scalarRange = vcs.minmax(self._data1)
 
         # Create/update the VTK dataset.
-        self._updateVTKDataSet()
+        plotBasedDualGrid = kargs.get('plot_based_dual_grid', True)
+        self._updateVTKDataSet(plotBasedDualGrid)
 
         # Update the results:
         self._resultDict["vtk_backend_grid"] = self._vtkDataSet
@@ -311,16 +316,40 @@ class Pipeline2D(IPipeline2D):
         self._min = self._data1.min()
         self._max = self._data1.max()
 
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
+    def _updateVTKDataSet(self, plotBasedDualGrid):
+        """
+        """
+        if (plotBasedDualGrid):
+            hasCellData = self._data1.hasCellData()
+            dualGrid = (hasCellData != self._needsCellData)
+        else:
+            dualGrid = False
         genGridDict = vcs2vtk.genGrid(self._data1, self._data2, self._gm,
                                       deep=False,
                                       grid=self._vtkDataSet,
-                                      geo=self._vtkGeoTransform)
-
+                                      geo=self._vtkGeoTransform, dualGrid=dualGrid)
         self._data1 = genGridDict["data"]
         self._updateFromGenGridDict(genGridDict)
 
+    def _createPolyDataFilter(self):
+        """This is only used when we use the grid stored in the file for all plots."""
+        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
+        if self._hasCellData == self._needsCellData:
+            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
+        elif self._hasCellData:
+            # use cells but needs points
+            c2p = vtk.vtkCellDataToPointData()
+            c2p.SetInputData(self._vtkDataSet)
+            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
+        else:
+            # use points but needs cells
+            p2c = vtk.vtkPointDataToCellData()
+            p2c.SetInputData(self._vtkDataSet)
+            # For contouring duplicate points seem to confuse it
+            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
+        self._vtkPolyDataFilter.Update()
+        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
+
     def _updateFromGenGridDict(self, genGridDict):
         """Overrides baseclass implementation."""
         self._vtkDataSet = genGridDict['vtk_backend_grid']
@@ -329,7 +358,7 @@ class Pipeline2D(IPipeline2D):
         self._useContinents = genGridDict['continents']
         self._dataWrapModulo = genGridDict['wrap']
         self._vtkGeoTransform = genGridDict['geo']
-        self._useCellScalars = genGridDict['cellData']
+        self._hasCellData = genGridDict['cellData']
 
     def _createMaskedDataMapper(self):
         """Overrides baseclass implementation."""
@@ -338,11 +367,11 @@ class Pipeline2D(IPipeline2D):
         if color is not None:
             color = self.getColorIndexOrRGBA(_colorMap, color)
         self._maskedDataMapper = vcs2vtk.putMaskOnVTKGrid(
-            self._data1, self._vtkDataSet, color, self._useCellScalars,
+            self._data1, self._vtkDataSet, color, self._hasCellData,
             deep=False)
 
         self._resultDict["vtk_backend_missing_mapper"] = (
-            self._maskedDataMapper, color, self._useCellScalars)
+            self._maskedDataMapper, color, self._hasCellData)
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds if there is not geographic projection
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index b8c4eaead..bc34e3c9e 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -33,14 +33,21 @@ class VectorPipeline(Pipeline):
         lon = None
 
         latAccessor = data1.getLatitude()
-        lonAccesrsor = data1.getLongitude()
+        lonAccessor = data1.getLongitude()
         if latAccessor:
             lat = latAccessor[:]
-        if lonAccesrsor:
-            lon = lonAccesrsor[:]
+        if lonAccessor:
+            lon = lonAccessor[:]
 
-        gridGenDict = vcs2vtk.genGridOnPoints(data1, self._gm, deep=False, grid=grid,
-                                              geo=transform, data2=data2)
+        plotBasedDualGrid = kargs.get('plot_based_dual_grid', True)
+        if (plotBasedDualGrid):
+            hasCellData = data1.hasCellData()
+            dualGrid = hasCellData
+        else:
+            dualGrid = False
+        gridGenDict = vcs2vtk.genGrid(data1, data2, self._gm, deep=False, grid=grid,
+                                      geo=transform, genVectors=True,
+                                      dualGrid=dualGrid)
 
         data1 = gridGenDict["data"]
         data2 = gridGenDict["data2"]
@@ -54,6 +61,7 @@ class VectorPipeline(Pipeline):
         continents = gridGenDict['continents']
         self._dataWrapModulo = gridGenDict['wrap']
         geo = gridGenDict['geo']
+        cellData = gridGenDict['cellData']
 
         if geo is not None:
             newv = vtk.vtkDoubleArray()
@@ -83,16 +91,19 @@ class VectorPipeline(Pipeline):
 
         returned["vtk_backend_grid"] = grid
         returned["vtk_backend_geo"] = geo
-        missingMapper = vcs2vtk.putMaskOnVTKGrid(data1, grid, None, False,
-                                                 deep=False)
+        missingMapper = vcs2vtk.putMaskOnVTKGrid(data1, grid, actorColor=None,
+                                                 cellData=cellData, deep=False)
 
         # None/False are for color and cellData
         # (sent to vcs2vtk.putMaskOnVTKGrid)
         returned["vtk_backend_missing_mapper"] = (missingMapper, None, False)
 
-        w = vcs2vtk.generateVectorArray(data1, data2, grid)
-
-        grid.GetPointData().AddArray(w)
+        # convert to point data
+        if cellData:
+            c2p = vtk.vtkCellDataToPointData()
+            c2p.SetInputData(grid)
+            c2p.Update()
+            grid = c2p.GetOutput()
 
         # Vector attempt
         l = self._gm.line
@@ -119,7 +130,7 @@ class VectorPipeline(Pipeline):
 
         glyphFilter = vtk.vtkGlyph2D()
         glyphFilter.SetInputData(grid)
-        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vectors")
+        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
         glyphFilter.SetVectorModeToUseVector()
 
-- 
GitLab


From 0d6bfec7174d847b46038d1c86a7ca06beceb3db Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Wed, 4 May 2016 16:24:18 -0400
Subject: [PATCH 042/196] BUG: Fix mean computation for point datasets.

With the new flag TransientAxis._genericBounds_ TransientAxis.getExplicitBounds
returns None if the bounds were not read from a NetCDF file but were autogenerated.
This creates a problem in cdutil.averager as bound axis are artificially extended to -90, 90
So, for a latbnds=[[90, 88], [88, 84], ..., [4, 0]] we extended it to
[[90, 88], [88,84], ..., [4, -90]]. We remove the code that did this.

This fix also improves the baseline for testEsmfRegridRegion.
---
 Packages/cdms2/Lib/grid.py | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index f11ca1764..9930f263a 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -496,14 +496,6 @@ class AbstractRectGrid(AbstractGrid):
         else:
             latbnds = lat.genGenericBounds()
 
-        # Stretch latitude bounds to +/- 90.0
-        if ascending:
-            latbnds[0,0] = min(latbnds[0,0],-90.0)
-            latbnds[-1,1] = max(latbnds[-1,1],90.0)
-        else:
-            latbnds[0,0] = max(latbnds[0,0],+90.0)
-            latbnds[-1,1] = min(latbnds[-1,1],-90.0)
-
         # Get longitude bounds
         lon = self.getLongitude()
         if len(lon)>1:
-- 
GitLab


From 2acc77cdf88741f93b8ff79c357d623b993197ea Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 11 May 2016 09:12:41 -0700
Subject: [PATCH 043/196] Made flake8 pass, removed extra line, unwrapped some
 wrapped lines

---
 Packages/vcs/vcs/boxfill.py  |  7 ++-----
 Packages/vcs/vcs/template.py | 23 +++++++++++++++++------
 2 files changed, 19 insertions(+), 11 deletions(-)

diff --git a/Packages/vcs/vcs/boxfill.py b/Packages/vcs/vcs/boxfill.py
index 4003606c9..b403895fc 100755
--- a/Packages/vcs/vcs/boxfill.py
+++ b/Packages/vcs/vcs/boxfill.py
@@ -742,8 +742,7 @@ class Gfb(object):
         nlev = float(self.color_2 - self.color_1 + 1)
         autolevels = False
 
-        if numpy.allclose(self.level_1, 1.e20) or \
-           numpy.allclose(self.level_2, 1.e20):
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
             autolevels = True
             low_end = varmin
             high_end = varmax
@@ -755,7 +754,6 @@ class Gfb(object):
             low_end = numpy.ma.log10(low_end)
             high_end = numpy.ma.log10(high_end)
 
-
         if autolevels:
             # Use nice values for the scale
             scale = vcs.mkscale(low_end, high_end)
@@ -776,8 +774,7 @@ class Gfb(object):
         if self.legend:
             return self.legend
 
-        if numpy.allclose(self.level_1, 1.e20) or \
-           numpy.allclose(self.level_2, 1.e20):
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
             autolevels = True
         else:
             autolevels = False
diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py
index 3eaac046b..fd2ee2f0c 100644
--- a/Packages/vcs/vcs/template.py
+++ b/Packages/vcs/vcs/template.py
@@ -56,6 +56,18 @@ def _setgen(self, name, cls, value):
     setattr(self, "_%s" % name, value)
 
 
+def epsilon_gte(a, b):
+    """a >= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return -float_epsilon < a - b
+
+
+def epsilon_lte(a, b):
+    """a <= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return float_epsilon > a - b
+
+
 # read .scr file
 def process_src(nm, code):
     """Takes VCS script code (string) as input and generates boxfill gm from it"""
@@ -1874,15 +1886,14 @@ class P(object):
             if legend is None:
                 legend = vcs.mklabels(levels)
             # We'll use the less precise float epsilon since this is just for labels
-            float_epsilon = numpy.finfo(numpy.float32).eps
             if levels[0] < levels[1]:
-                # <=
-                comparison = lambda a, b: float_epsilon > a - b
+                comparison = epsilon_lte
             else:
-                # >=
-                comparison = lambda a, b: -float_epsilon < a - b
+                comparison = epsilon_gte
+
+            def in_bounds(x):
+                return comparison(levels[0], x) and comparison(x, levels[-1])
 
-            in_bounds = lambda x: comparison(levels[0], x) and comparison(x, levels[-1])
             dlong = dD / (len(levels) - 1)
 
             for l in legend.keys():
-- 
GitLab


From 60d644963a9cdf7a0b4a8467ae9563d1db002166 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Thu, 12 May 2016 13:36:33 -0700
Subject: [PATCH 044/196] Harden histogram a bit

---
 Packages/vcsaddons/Lib/core.py       | 20 +++++-----
 Packages/vcsaddons/Lib/histograms.py | 58 ++++++++++++++++++++++------
 2 files changed, 56 insertions(+), 22 deletions(-)

diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index 8421e247c..d1b49bf11 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -20,7 +20,7 @@ class VCSaddon(object):
             self.x=vcs.init()
         else:
             self.x=x
-            
+
         if template is None:
             self.template = self.x.gettemplate()
         elif isinstance(template,str):
@@ -39,10 +39,10 @@ class VCSaddon(object):
             self.datawc_y1=1.e20
             self.datawc_y2=1.e20
             self.colormap="default"
-            self.xmtics1='*'
-            self.xmtics2='*'
-            self.ymtics1='*'
-            self.ymtics2='*'
+            self.xmtics1=''
+            self.xmtics2=''
+            self.ymtics1=''
+            self.ymtics2=''
             self.xticlabels1='*'
             self.xticlabels2='*'
             self.yticlabels1='*'
@@ -60,7 +60,7 @@ class VCSaddon(object):
             self.datawc_x1=gm.datawc_x1
             self.datawc_x2=gm.datawc_x2
             self.datawc_y1=gm.datawc_y1
-            self.datawc_y2=gm.datawc_x2
+            self.datawc_y2=gm.datawc_y2
             self.colormap=gm.colormap
             self.xmtics1=gm.xmtics1
             self.xmtics2=gm.xmtics2
@@ -78,7 +78,7 @@ class VCSaddon(object):
             self.projection=gm.projection
         self.name = name
         vcsaddons.gms[self.g_type][name]=self
-        
+
 
     def list(self):
         print 'graphics method = ',self.g_name
@@ -103,9 +103,9 @@ class VCSaddon(object):
         raise "Plot function not implemented for graphic method type: %s" % self.g_name
 
     def prep_plot(self,xmn,xmx,ymn,ymx):
-        
+
         self.save()
-        
+
         if self.datawc_x1!=1.e20:
             xmn = self.datawc_x1
         if self.datawc_x2!=1.e20:
@@ -180,4 +180,4 @@ class VCSaddon(object):
 
     def creategm(self,name,source='default'):
         return self.__init__(name,source=source,x=self.x,template=self.template)
-        
+
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 60f866492..452ac671e 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -60,31 +60,48 @@ class Ghg(VCSaddon):
                 try:
                     data_name = data.id + data.units
                 except AttributeError:
-                    data_name = data.id
+                    try:
+                        data_name = data.id
+                    except AttributeError:
+                        data_name = "array"
 
         # We'll just flatten the data... if they want to be more precise, should pass in more precise data
-        data = data.flatten().asma()
+        if isinstance(data, cdms2.avariable.AbstractVariable):
+            data = data.asma()
+        data = data.flatten()
 
         # ok now we have a good x and a good data
         if not self.bins:
             self.bins = vcs.utils.mkscale(*vcs.minmax(data))
 
+        # Sort the bins
+        self.bins.sort()
+
+        # Prune duplicates
+        pruned_bins = []
+        for bin in self.bins:
+            if pruned_bins and numpy.allclose(bin, pruned_bins[-1]):
+                continue
+            pruned_bins.append(bin)
+        self.bins = pruned_bins
         data_bins = numpy.digitize(data, self.bins) - 1
         binned = [data[data_bins==i] for i in range(len(self.bins))]
-
         means = []
         stds = []
 
         max_possible_deviance = 0
 
         for ind, databin in enumerate(binned):
-            means.append(databin.mean())
-            stds.append(databin.std())
+            if len(databin) > 0:
+                means.append(databin.mean())
+                stds.append(databin.std())
+            else:
+                means.append(0)
+                stds.append(0)
             if len(self.bins) > ind + 1:
                 max_possible_deviance = max(means[ind] - self.bins[ind], self.bins[ind + 1] - means[ind], max_possible_deviance)
             else:
                 max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance)
-
         color_values = [std / max_possible_deviance for std in stds]
         y_values = [len(databin) for databin in binned]
         nbars = len(self.bins) - 1
@@ -97,11 +114,27 @@ class Ghg(VCSaddon):
         line.viewport = [
             template.data.x1, template.data.x2, template.data.y1, template.data.y2]
 
-        xmn, xmx = vcs.minmax(self.bins)
-        # Make the y scale be slightly larger than the largest bar
-        ymn, ymx = 0, max(y_values) * 1.25
+        vcs_min_max = vcs.minmax(self.bins)
+        if numpy.allclose(self.datawc_x1, 1e20):
+            xmn = vcs_min_max[0]
+        else:
+            xmn = self.datawc_x1
+
+        if numpy.allclose(self.datawc_x2, 1e20):
+            xmx = vcs_min_max[1]
+        else:
+            xmx = self.datawc_x2
+
+        if numpy.allclose(self.datawc_y2, 1e20):
+            # Make the y scale be slightly larger than the largest bar
+            ymx = max(y_values) * 1.25
+        else:
+            ymx = self.datawc_y2
 
-        #xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx)
+        if numpy.allclose(self.datawc_y1, 1e20):
+            ymn = 0
+        else:
+            ymn = self.datawc_y1
 
         fill.worldcoordinate = [xmn, xmx, ymn, ymx]
         line.worldcoordinate = [xmn, xmx, ymn, ymx]
@@ -166,7 +199,8 @@ class Ghg(VCSaddon):
                     else:
                         # Shouldn't ever get here since level 0 is 0
                         assert False
-
+            else:
+                assert False
             styles.append(self.fillareastyles[lev_ind])
             cols.append(self.fillareacolors[lev_ind])
             indices.append(self.fillareaindices[lev_ind])
@@ -192,7 +226,7 @@ class Ghg(VCSaddon):
         displays = []
 
         x_axis = cdms2.createAxis(self.bins, id=data_name)
-        y_axis = cdms2.createAxis(vcs.mkscale(0, ymx), id="bin_size")
+        y_axis = cdms2.createAxis(vcs.mkscale(ymn, ymx), id="bin_size")
 
         displays.append(x.plot(fill, bg=bg, render=False))
         arr = MV2.masked_array(y_values)
-- 
GitLab


From 5dd0a949f1b5b26c51cbb4513fd165f519031573 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 30 Apr 2016 21:49:57 -0400
Subject: [PATCH 045/196] Using common module for regression testing

---
 Packages/testing/__init__.py                  |  0
 .../testing/regression.py                     | 23 +++++++++-
 Packages/testing/setup.py                     | 14 ++++++
 testing/vcs/test_vcs_1D_datawc.py             | 21 +++------
 testing/vcs/test_vcs_1D_datawc_missing.py     | 29 ++----------
 testing/vcs/test_vcs_1D_with_manyDs.py        | 30 ++----------
 testing/vcs/test_vcs_1d_in_boxfill.py         | 27 ++---------
 ...cs_1d_marker_not_shown_if_xaxis_flipped.py | 33 ++-----------
 testing/vcs/test_vcs_1d_missing.py            | 26 ++---------
 testing/vcs/test_vcs_animate_boxfill.py       | 15 ++----
 testing/vcs/test_vcs_animate_isofill.py       | 23 ++++------
 testing/vcs/test_vcs_animate_isoline.py       | 27 ++++-------
 .../vcs/test_vcs_animate_isoline_colored.py   | 20 +++-----
 .../test_vcs_animate_isoline_text_labels.py   | 11 ++---
 ...vcs_animate_isoline_text_labels_colored.py | 10 ++--
 testing/vcs/test_vcs_animate_meshfill.py      | 10 +---
 testing/vcs/test_vcs_antialiasing.py          |  1 -
 testing/vcs/test_vcs_aspect_ratio.py          | 35 +++++---------
 testing/vcs/test_vcs_auto_time_labels.py      | 23 +++-------
 testing/vcs/test_vcs_autot_axis_titles.py     | 22 ++-------
 testing/vcs/test_vcs_bad_time_units.py        | 12 ++---
 testing/vcs/test_vcs_basic_gms.py             | 19 ++------
 testing/vcs/test_vcs_basic_text.py            | 17 ++-----
 testing/vcs/test_vcs_basic_vectors.py         | 19 ++------
 ...st_vcs_box_custom_as_def_vistrails_exts.py | 42 +++++------------
 .../test_vcs_boxfill_10x10_masked_numpy.py    | 28 +++--------
 testing/vcs/test_vcs_boxfill_10x10_numpy.py   | 27 +++--------
 testing/vcs/test_vcs_boxfill_custom.py        | 21 ++-------
 testing/vcs/test_vcs_boxfill_custom_ext1.py   | 22 ++-------
 .../vcs/test_vcs_boxfill_custom_ext1_ext2.py  | 22 ++-------
 testing/vcs/test_vcs_boxfill_custom_ext2.py   | 20 ++------
 ...t_vcs_boxfill_custom_non_default_levels.py | 24 ++--------
 .../test_vcs_boxfill_decreasing_latitude.py   | 27 ++---------
 testing/vcs/test_vcs_boxfill_lambert_crash.py | 17 ++-----
 testing/vcs/test_vcs_boxfill_lev1_lev2.py     | 35 ++++----------
 .../vcs/test_vcs_boxfill_lev1_lev2_ext1.py    | 37 +++++----------
 .../test_vcs_boxfill_lev1_lev2_ext1_ext2.py   | 39 +++++-----------
 .../vcs/test_vcs_boxfill_lev1_lev2_ext2.py    | 37 +++++----------
 .../test_vcs_boxfill_lev1_lev2_ta_missing.py  | 39 +++++-----------
 testing/vcs/test_vcs_boxfill_polar.py         | 31 +++----------
 testing/vcs/test_vcs_boxfill_robinson_wrap.py | 15 +-----
 testing/vcs/test_vcs_canvas_background.py     | 19 ++------
 .../vcs/test_vcs_canvas_background_update.py  | 20 ++------
 testing/vcs/test_vcs_click_info.py            | 20 ++------
 testing/vcs/test_vcs_close.py                 |  6 ---
 testing/vcs/test_vcs_colormaps_source.py      | 24 ++--------
 .../vcs/test_vcs_colorpicker_appearance.py    |  7 +--
 testing/vcs/test_vcs_configurator_resize.py   |  8 ++--
 testing/vcs/test_vcs_continents.py            | 25 ++--------
 testing/vcs/test_vcs_create_get.py            |  1 -
 testing/vcs/test_vcs_draw_logo_on.py          | 16 ++-----
 testing/vcs/test_vcs_fillarea_transparency.py | 17 ++-----
 testing/vcs/test_vcs_first_png_blank.py       | 19 ++------
 testing/vcs/test_vcs_flipNone.py              | 26 ++---------
 testing/vcs/test_vcs_flipX.py                 | 26 ++---------
 testing/vcs/test_vcs_flipXY.py                | 25 ++--------
 testing/vcs/test_vcs_flipY.py                 | 24 ++--------
 testing/vcs/test_vcs_gen_meshfill.py          | 29 ++----------
 .../test_vcs_gms_animate_projected_plots.py   | 23 ++--------
 testing/vcs/test_vcs_gms_patterns_hatches.py  | 16 ++-----
 testing/vcs/test_vcs_hatches_patterns.py      | 24 ++--------
 testing/vcs/test_vcs_import.py                |  1 -
 testing/vcs/test_vcs_infinity.py              | 20 ++------
 testing/vcs/test_vcs_iso_celine_part1.py      | 21 +++------
 testing/vcs/test_vcs_iso_celine_part2.py      | 25 +++-------
 ...st_vcs_isofill_data_read_north_to_south.py | 22 ++-------
 .../vcs/test_vcs_isofill_isoline_labels.py    | 29 ++----------
 .../vcs/test_vcs_isofill_mask_cell_shift.py   | 29 +++---------
 testing/vcs/test_vcs_isoline_labels.py        | 34 +++-----------
 .../vcs/test_vcs_isoline_labels_background.py | 26 ++---------
 ..._isoline_labels_multi_label_input_types.py | 46 +++++++------------
 .../vcs/test_vcs_isoline_labelskipdistance.py | 26 ++---------
 testing/vcs/test_vcs_isoline_numpy.py         | 22 +++------
 testing/vcs/test_vcs_isoline_width_stipple.py | 27 ++---------
 testing/vcs/test_vcs_issue_960_labels.py      | 26 ++++-------
 testing/vcs/test_vcs_lambert.py               | 23 ++--------
 testing/vcs/test_vcs_large_pattern_hatch.py   | 27 ++---------
 testing/vcs/test_vcs_legend.py                | 17 ++-----
 testing/vcs/test_vcs_lon_axes_freak_out.py    | 31 +++----------
 testing/vcs/test_vcs_markers.py               | 26 +++--------
 testing/vcs/test_vcs_matplotlib_colormap.py   | 26 ++---------
 testing/vcs/test_vcs_mercator_edge.py         | 23 ++--------
 testing/vcs/test_vcs_meshfill_draw_mesh.py    | 28 ++++-------
 testing/vcs/test_vcs_meshfill_no_wrapping.py  | 26 ++---------
 testing/vcs/test_vcs_meshfill_regular_grid.py | 23 ++--------
 testing/vcs/test_vcs_meshfill_vertices.py     | 21 ++-------
 testing/vcs/test_vcs_meshfill_zoom.py         | 27 +++--------
 testing/vcs/test_vcs_mintics.py               | 20 ++------
 testing/vcs/test_vcs_missing_colorname.py     | 24 ++--------
 ..._vcs_monotonic_decreasing_yxvsx_default.py | 29 ++++--------
 testing/vcs/test_vcs_oned_level_axis.py       | 27 +++--------
 testing/vcs/test_vcs_patterns.py              | 22 ++-------
 testing/vcs/test_vcs_plot_file_var.py         | 14 +++---
 .../test_vcs_plot_unstructured_via_boxfill.py | 20 ++------
 testing/vcs/test_vcs_png_to_base64.py         |  2 +-
 testing/vcs/test_vcs_png_window_resize.py     | 17 ++-----
 .../vcs/test_vcs_polar_set_opt_param_polar.py | 30 +++---------
 testing/vcs/test_vcs_remove_marker_none_1d.py | 24 ++--------
 testing/vcs/test_vcs_setcolormap.py           | 26 ++---------
 .../vcs/test_vcs_settings_color_name_rgba.py  | 27 +++--------
 .../test_vcs_settings_color_name_rgba_1d.py   | 22 ++-------
 ...st_vcs_settings_color_name_rgba_boxfill.py | 23 ++--------
 ...st_vcs_settings_color_name_rgba_isoline.py | 23 ++--------
 ...t_vcs_settings_color_name_rgba_meshfill.py | 23 ++--------
 testing/vcs/test_vcs_star_triangle_markers.py | 17 ++-----
 testing/vcs/test_vcs_taylor_2quads.py         | 33 ++-----------
 testing/vcs/test_vcs_taylor_template_ctl.py   | 29 ++----------
 testing/vcs/test_vcs_user_passed_date.py      | 17 ++-----
 .../test_vcs_user_passed_date_as_string.py    | 17 ++-----
 testing/vcs/test_vcs_vectors_missing.py       | 27 +++--------
 testing/vcs/test_vcs_vectors_robinson.py      | 16 ++-----
 testing/vcs/test_vcs_vectors_robinson_wrap.py | 16 ++-----
 testing/vcs/test_vcs_verify_boxfill_basics.py | 13 +++---
 testing/vcs/test_vcs_wmo_marker.py            | 17 ++-----
 testing/vcs/test_vcs_wmo_markers.py           | 25 +++-------
 testing/vcs/vtk_ui/vtk_ui_test.py             | 14 ++----
 .../vcsaddons/test_12_plot_one_leg_per_row.py | 15 ++----
 .../test_12_plot_one_leg_per_row_right.py     | 16 ++-----
 ...test_EzTemplate_12_plots_legd_direction.py | 17 ++-----
 ...t_EzTemplate_12_plots_margins_thickness.py | 15 ++----
 .../test_EzTemplate_12_plots_mix_glb_local.py | 14 +-----
 .../test_EzTemplate_12_plots_spacing.py       | 16 ++-----
 .../test_vcs_addons_EzTemplate_2x2.py         | 24 +++-------
 .../vcsaddons/test_vcsaddons_preview_2x2.py   | 16 ++-----
 124 files changed, 589 insertions(+), 2135 deletions(-)
 create mode 100644 Packages/testing/__init__.py
 rename testing/checkimage.py => Packages/testing/regression.py (84%)
 create mode 100755 Packages/testing/setup.py

diff --git a/Packages/testing/__init__.py b/Packages/testing/__init__.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/testing/checkimage.py b/Packages/testing/regression.py
similarity index 84%
rename from testing/checkimage.py
rename to Packages/testing/regression.py
index 9871f3557..68f381ce4 100644
--- a/testing/checkimage.py
+++ b/Packages/testing/regression.py
@@ -12,9 +12,28 @@ import os.path
 import re
 import sys
 import logging
+import vcs
 
 defaultThreshold=10.0
 
+def init():
+    testingDir = os.path.join(os.path.dirname(__file__), "..")
+    sys.path.append(testingDir)
+
+    vcsinst = vcs.init()
+    vcsinst.setantialiasing(0)
+    vcsinst.drawlogooff()
+    vcsinst.setbgoutputdimensions(1200,1091,units="pixels")
+    return vcsinst
+
+def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    vcsinst.png(fname)
+    sys.exit(check_result_image(fname, baseline, threshold))
+
+def run_wo_terminate(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    vcsinst.png(fname)
+    return check_result_image(fname, baseline, threshold)
+
 def image_compare(testImage, baselineImage):
     imageDiff = vtk.vtkImageDifference()
     imageDiff.SetInputData(testImage)
@@ -53,8 +72,8 @@ def find_alternates(fname):
             results.append(os.path.join(dirname, i))
     return results
 
-def check_result_image(fname, baselinefname, threshold = defaultThreshold,
-                       baseline = True, cleanup=True):
+def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
+                       baseline=True, cleanup=True):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
diff --git a/Packages/testing/setup.py b/Packages/testing/setup.py
new file mode 100755
index 000000000..70f790d7b
--- /dev/null
+++ b/Packages/testing/setup.py
@@ -0,0 +1,14 @@
+import os, sys
+from distutils.core import setup
+import cdat_info
+
+sys.path.append(os.environ.get('BUILD_DIR',"build"))
+
+setup(name="testing",
+      version=cdat_info.Version,
+      description="Testing infrastructure for cdat",
+      url="http://uvcdat.llnl.gov",
+      packages=['testing'],
+      package_dir={'testing': 'testing',},
+      install_requires=['numpy','vcs', 'vtk'],
+)
diff --git a/testing/vcs/test_vcs_1D_datawc.py b/testing/vcs/test_vcs_1D_datawc.py
index 1595a6cd7..8e671bb93 100644
--- a/testing/vcs/test_vcs_1D_datawc.py
+++ b/testing/vcs/test_vcs_1D_datawc.py
@@ -4,15 +4,13 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import testing.regression as regression
+
+x = regression.init()
 yx =x.createyxvsx()
 
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,14 +28,9 @@ data = numpy.array(data,dtype=numpy.float)
 data = MV2.array(data)
 yx.datawc_x1 = 0
 yx.datawc_x2 = 80
-yx.datawc_y1 =-12 
-yx.datawc_y2 = 12 
+yx.datawc_y1 =-12
+yx.datawc_y2 = 12
 
 
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc.png", src)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1D_datawc_missing.py b/testing/vcs/test_vcs_1D_datawc_missing.py
index 07e6f560a..d6caabc8e 100644
--- a/testing/vcs/test_vcs_1D_datawc_missing.py
+++ b/testing/vcs/test_vcs_1D_datawc_missing.py
@@ -1,19 +1,11 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-yx =x.createyxvsx()
+x = regression.init()
+yx = x.createyxvsx()
 
 data = """
--999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. 
+-999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999.
 0.059503571833625334
 0.059503571833625334 0.05664014775641405 0.05193557222118004
 0.04777129850801233 0.0407139313814465 0.029382624830271705
@@ -42,16 +34,5 @@ data = """
 """.split()
 data = numpy.array(data,dtype=numpy.float)
 data = MV2.masked_less(data,-900)
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc_missing.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc_missing.png")
diff --git a/testing/vcs/test_vcs_1D_with_manyDs.py b/testing/vcs/test_vcs_1D_with_manyDs.py
index 1caba446e..28a6a7a8b 100644
--- a/testing/vcs/test_vcs_1D_with_manyDs.py
+++ b/testing/vcs/test_vcs_1D_with_manyDs.py
@@ -1,31 +1,9 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-d=numpy.reshape(d,(10,10))
-
-
+d = numpy.reshape(d,(10,10))
 one = x.create1d()
-
 x.plot(d,one,bg=1)
-
-
-fnm = "test_1D_with_manyDs.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1D_with_manyDs.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_in_boxfill.py b/testing/vcs/test_vcs_1d_in_boxfill.py
index 292fe98a3..1da574394 100644
--- a/testing/vcs/test_vcs_1d_in_boxfill.py
+++ b/testing/vcs/test_vcs_1d_in_boxfill.py
@@ -1,29 +1,8 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-
 b = x.createboxfill()
-
 x.plot(d,b,bg=1)
-
-
-fnm = "test_1d_in_boxfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1d_in_boxfill.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
index c350e4c59..f850f977e 100644
--- a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
+++ b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
@@ -1,42 +1,15 @@
-import vcs
-import numpy
-import MV2
-import cdms2
-import sys
-import os
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 data = MV2.array([4,5,6,7,1,3,7,9,])+230.
-
 p = cdms2.createAxis([2,5,100,200,500,800,850,1000])
-
 data.setAxis(0,p)
-
 data.id="jim"
-
 gm=x.create1d()
-
 gm.linewidth=0
 gm.datawc_x1=1000
 gm.datawc_x2=0
-
 gm.markersize=30
-
 x.plot(data,gm,bg=1)
-
-fnm = "test_1d_marker_not_shown_if_xaxis_flipped.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_1d_marker_not_shown_if_xaxis_flipped.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_missing.py b/testing/vcs/test_vcs_1d_missing.py
index 3a6880bdd..8c124e09c 100644
--- a/testing/vcs/test_vcs_1d_missing.py
+++ b/testing/vcs/test_vcs_1d_missing.py
@@ -3,15 +3,11 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 yx =x.createyxvsx()
-
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,17 +26,5 @@ data = MV2.array(data)
 
 data=MV2.masked_where(MV2.logical_and(data>-4,data<-2),data)
 
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
-x.plot(data,yx,bg=1)
-fnm = "test_vcs_1d_missing.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(data, yx, bg=1)
+regression.run(x, "test_vcs_1d_missing.png", src)
diff --git a/testing/vcs/test_vcs_animate_boxfill.py b/testing/vcs/test_vcs_animate_boxfill.py
index 2bbd53c26..641b59edb 100644
--- a/testing/vcs/test_vcs_animate_boxfill.py
+++ b/testing/vcs/test_vcs_animate_boxfill.py
@@ -1,20 +1,14 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-
 gm=x.createboxfill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +20,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isofill.py b/testing/vcs/test_vcs_animate_isofill.py
index ba5ea0e8d..591d66362 100644
--- a/testing/vcs/test_vcs_animate_isofill.py
+++ b/testing/vcs/test_vcs_animate_isofill.py
@@ -1,20 +1,13 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
+x = regression.init()
 gm=x.createisofill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +19,8 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),
+         regression.defaultThreshold)
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline.py b/testing/vcs/test_vcs_animate_isoline.py
index 41e0c7318..cea333d3b 100644
--- a/testing/vcs/test_vcs_animate_isoline.py
+++ b/testing/vcs/test_vcs_animate_isoline.py
@@ -1,33 +1,24 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-iso=x.createisoline()
-x.plot(s,iso,bg=1)
+x = regression.init()
+iso = x.createisoline()
+x.plot(s,iso, bg=1)
 x.animate.create()
 print "Saving now"
 prefix= os.path.split(__file__)[1][:-3]
 x.animate.save("%s.mp4"%prefix)
-pngs = x.animate.close(preserve_pngs = True) # so we can look at them again
+pngs = x.animate.close(preserve_pngs=True) # so we can look at them again
 src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_colored.py b/testing/vcs/test_vcs_animate_isoline_colored.py
index 4fed45c86..4519ac16c 100644
--- a/testing/vcs/test_vcs_animate_isoline_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_colored.py
@@ -1,19 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 iso=x.createisoline()
 levs = range(0,101,10)
@@ -34,8 +27,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels.py b/testing/vcs/test_vcs_animate_isoline_text_labels.py
index 7a3be1fc5..0e83c02c6 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -27,8 +23,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
index cf009e4a8..02711e219 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -36,7 +32,7 @@ pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
   print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_meshfill.py b/testing/vcs/test_vcs_animate_meshfill.py
index 78d6b1551..aa89a8639 100644
--- a/testing/vcs/test_vcs_animate_meshfill.py
+++ b/testing/vcs/test_vcs_animate_meshfill.py
@@ -1,12 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import MV2
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s=f("sample")
@@ -38,8 +33,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_antialiasing.py b/testing/vcs/test_vcs_antialiasing.py
index 34f43e040..f744d3e04 100644
--- a/testing/vcs/test_vcs_antialiasing.py
+++ b/testing/vcs/test_vcs_antialiasing.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_aspect_ratio.py b/testing/vcs/test_vcs_aspect_ratio.py
index 1e59304a3..9e3cb3767 100644
--- a/testing/vcs/test_vcs_aspect_ratio.py
+++ b/testing/vcs/test_vcs_aspect_ratio.py
@@ -1,27 +1,20 @@
-
-import vcs
-import cdms2
-import sys
-import os
-import MV2
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+src = sys.argv[1]
 pth0 = os.path.dirname(_file__)
 pth = os.path.join(pth0,"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",time=slice(0,1),squeeze=1)
-gm=vcs.createisofill()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",time=slice(0,1),squeeze=1)
+gm = vcs.createisofill()
 
 def plot_a_ratio(s,gm,ratio):
     ret = 0
-    x=vcs.init()
-x.drawlogooff()
+    x = regression.init()
+    x.drawlogooff()
     x.open()
     x.geometry(400,800)
-    y=vcs.init()
+    y = regression.init()
     y.open()
     y.geometry(800,400)
     for X in [x,y]:
@@ -32,19 +25,13 @@ x.drawlogooff()
             orient = "port"
         fnm = "aspect_ratio_%s_%s.png" % (orient,ratio)
         X.png(fnm)
-        print "fnm:",fnm
         src = os.path.join(pth0,fnm)
-        print "src:",src
-        ret += checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+        ret += regression.check_result_image(fnm, src)
     return ret
 
-ret = 0 
+ret = 0
 for ratio in ["1t","2t",".5t","autot"]:
     ret  += plot_a_ratio(s,gm,ratio)
 
 
-sys.exit(ret)
-
-
-
-
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_auto_time_labels.py b/testing/vcs/test_vcs_auto_time_labels.py
index 2dfa661a9..e5422b29c 100644
--- a/testing/vcs/test_vcs_auto_time_labels.py
+++ b/testing/vcs/test_vcs_auto_time_labels.py
@@ -1,18 +1,7 @@
-import vcs,cdms2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",longitude=slice(34,35),squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, cdms2, os, sys, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",longitude=slice(34,35),squeeze=1)
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_vcs_auto_time_labels.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_auto_time_labels.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_autot_axis_titles.py b/testing/vcs/test_vcs_autot_axis_titles.py
index ec485ccc0..dcc0f00b1 100644
--- a/testing/vcs/test_vcs_autot_axis_titles.py
+++ b/testing/vcs/test_vcs_autot_axis_titles.py
@@ -1,7 +1,4 @@
-import vcs
-import cdms2
-import os
-import sys
+import vcs, cdms2, os, sys, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_mollweide_boxfill': ('clt.nc', 'clt'),
@@ -11,10 +8,6 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_robinson_isoline': ('clt.nc', 'clt')}
 
 # Tests if ratio=autot works correctly for background and foreground plots
-src = sys.argv[1]
-bg = 1
-if (sys.argv[2] == 'foreground'):
-    bg = 0
 plot = sys.argv[3]
 x_over_y = sys.argv[4]
 if (x_over_y == '0.5'):
@@ -26,12 +19,9 @@ else:
 pth = os.path.join(os.path.dirname(__file__), "..")
 sys.path.append(pth)
 
-import checkimage
-
 f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
 s = f(testConfig[plot][1])
-
-x = vcs.init(bg=bg, geometry=(xSize, ySize))
+x = regression.init(bg=bg, geometry=(xSize, ySize))
 
 # graphics method
 if (plot.find('boxfill') != -1):
@@ -50,10 +40,4 @@ x.setantialiasing(0)
 x.drawlogooff()
 x.plot(s, gm, ratio="autot")
 name = "test_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
-x.png(name)
-
-print "name:", name
-print "src:", src
-
-ret = checkimage.check_result_image(name, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, name, sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_bad_time_units.py b/testing/vcs/test_vcs_bad_time_units.py
index b89c2f7a7..0bdcd16c2 100644
--- a/testing/vcs/test_vcs_bad_time_units.py
+++ b/testing/vcs/test_vcs_bad_time_units.py
@@ -1,8 +1,8 @@
-import cdms2,vcs
-import os,sys
+import cdms2, vcs
+import os, sys
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1))
 s.getTime().units="XXX-))rvv"
-x=vcs.init()
-x.plot(s,bg=1)
+x = vcs.init()
+x.plot(s, bg=1)
diff --git a/testing/vcs/test_vcs_basic_gms.py b/testing/vcs/test_vcs_basic_gms.py
index daa144d9d..2cffb86e6 100644
--- a/testing/vcs/test_vcs_basic_gms.py
+++ b/testing/vcs/test_vcs_basic_gms.py
@@ -1,6 +1,4 @@
-
-import sys,os
-import argparse
+import argparse, os, sys, cdms2, MV2, testing.regression as regression, vcs, vtk
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -25,20 +23,9 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -143,7 +130,7 @@ print "fnm:",fnm
 print "src:",src
 if args.show:
     raw_input("Press Enter")
-ret = checkimage.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_basic_text.py b/testing/vcs/test_vcs_basic_text.py
index d46bd4f48..d69f37ce0 100644
--- a/testing/vcs/test_vcs_basic_text.py
+++ b/testing/vcs/test_vcs_basic_text.py
@@ -1,11 +1,6 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+x = regression.init()
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 txt=x.createtext()
@@ -16,10 +11,4 @@ txt.halign = "center"
 txt.valign="base"
 txt.angle=45
 x.plot(txt,bg=1)
-fnm = "test_basic_text.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_basic_text.png", sys.argv[1])
diff --git a/testing/vcs/test_vcs_basic_vectors.py b/testing/vcs/test_vcs_basic_vectors.py
index 561f7f2d8..37626e670 100644
--- a/testing/vcs/test_vcs_basic_vectors.py
+++ b/testing/vcs/test_vcs_basic_vectors.py
@@ -1,6 +1,5 @@
+import argparse, os, sys, numpy, cdms2, MV2, vcs, vtk
 
-import sys,os
-import argparse
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -18,21 +17,11 @@ args = p.parse_args(sys.argv[1:])
 
 if not args.show:
   src = args.src
-  pth = os.path.join(os.path.dirname(__file__),"..")
-  sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
+  import testing.regression as regression
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -95,7 +84,7 @@ else:
   x.png(fnm)
   print "fnm:",fnm
   print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png',src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
index c012c09ca..7d81b8981 100644
--- a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
+++ b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
@@ -1,30 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),squeeze=1)
-
-x=vcs.init()
-x.drawlogooff()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-gm=x.createboxfill()
-gm.boxfill_type="custom"
-gm.levels=[1.e20,1.e20]
-gm.ext_1="y"
-gm.ext_2="y"
-
-x.plot(s,gm,bg=1)
-fnm = "test_box_custom_as_def_vistrails_exts.png"
-src =sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+import os, sys, cdms2, vcs, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
+gm = x.createboxfill()
+gm.boxfill_type = "custom"
+gm.levels = [1.e20,1.e20]
+gm.ext_1 = "y"
+gm.ext_2 = "y"
+x.plot(s, gm, bg=1)
+regression.run(x, "test_box_custom_as_def_vistrails_exts.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
index b2cdf8319..2444cc8a8 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
@@ -1,23 +1,9 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+s = numpy.ma.masked_greater(s,.5)
 
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-s=numpy.ma.masked_greater(s,.5)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_masked_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x = regression.init()
+x.plot(s, bg=1)
+regression.run(x, "test_vcs_boxfill_10x10_masked_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
index 079105198..a45aa5889 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
@@ -1,22 +1,7 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+x = regression.init()
+x.plot(s)
+regression.run(x, "test_vcs_boxfill_10x10_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom.py b/testing/vcs/test_vcs_boxfill_custom.py
index 16685d971..5330a1103 100644
--- a/testing/vcs/test_vcs_boxfill_custom.py
+++ b/testing/vcs/test_vcs_boxfill_custom.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -24,14 +18,5 @@ boxfill.boxfill_type = 'custom'
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
 # Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1.py b/testing/vcs/test_vcs_boxfill_custom_ext1.py
index dd16ab837..7a5e2005e 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -28,14 +22,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
index 68b5a9a41..744071671 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -29,14 +23,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1_ext2.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext2.py
index 959fc2c6c..d45950bdb 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext2.py
@@ -3,6 +3,8 @@ import os
 import sys
 import vcs
 
+import testing.regression as regression
+
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt")
@@ -10,10 +12,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -27,15 +26,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 
 canvas.plot(clt, boxfill, bg=1)
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
index 7363d2250..b84db2bb5 100644
--- a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
+++ b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -25,16 +19,4 @@ boxfill.levels=levels
 boxfill.fillareacolors=vcs.getcolors(levels)
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_no_default_levels.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_no_default_levels.png")
diff --git a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
index 009b947a9..1f727e3b5 100755
--- a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
+++ b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
@@ -1,23 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import cdutil
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, cdutil, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
 ice = f("variable_6")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+x = regression.init()
 
-#gm = x.createisofill()
-#gm.label = "y"
 gm = x.createboxfill()
 gm.boxfill_type = "custom"
 
@@ -44,12 +31,8 @@ tmpl.legend.y1 = .03
 tmpl.legend.y2 = .055
 tmpl.max.priority = 1
 
-#tmpl.crdate.priority=1
-#tmpl.crdate.x=.8
-#tmpl.crdate.y=.95
 txt = x.createtext()
 txt.height = 20
-#txt.color=242
 txt.valign = "half"
 txt.halign = "center"
 
@@ -69,8 +52,4 @@ gm.datawc_y2 = 30
 gm.datawc_y1 = 90
 
 x.plot(ice, gm, tmpl, bg = 1)
-fnm = "test_boxfill_decreasing_latitude.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, "test_boxfill_decreasing_latitude.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lambert_crash.py b/testing/vcs/test_vcs_boxfill_lambert_crash.py
index f1827882f..f445ba555 100644
--- a/testing/vcs/test_vcs_boxfill_lambert_crash.py
+++ b/testing/vcs/test_vcs_boxfill_lambert_crash.py
@@ -1,17 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
-a=f("Z3")
+a = f("Z3")
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200, 900, units="pixels")
@@ -24,6 +17,4 @@ x.plot(a(latitude=(20,60),longitude=(-160,-120)),b, bg=1)
 fileName = os.path.basename(__file__)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
index 5c69d7af3..c5b5cbcbe 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
@@ -1,25 +1,10 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=.5
-b.level_2=14.5
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = .5
+b.level_2 = 14.5
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2.png")
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
index 594949238..67f87029e 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
index 9e355d1e1..dc7958c59 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
@@ -1,27 +1,12 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt", slice(0,1), squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
index 375c93d8a..398325eab 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
index 984179e5c..d2a39a1ba 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
@@ -1,28 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-f=cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
-s=f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
-s=cdms2.MV2.masked_less(s,-45.)
-b=x.createboxfill()
-b.level_1=-40
-b.level_2=40
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ta_missing.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-raw_input()
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
+s = f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
+s = cdms2.MV2.masked_less(s,-45.)
+b = x.createboxfill()
+b.level_1 = -40
+b.level_2 = 40
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ta_missing.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_polar.py b/testing/vcs/test_vcs_boxfill_polar.py
index e4f534c9d..869d09802 100644
--- a/testing/vcs/test_vcs_boxfill_polar.py
+++ b/testing/vcs/test_vcs_boxfill_polar.py
@@ -1,33 +1,16 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
 
 f = cdms2.open(vcs.sample_data + "/clt.nc")
-a=f("clt")
+a = f("clt")
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
-
-p=x.getprojection("polar")
-b=x.createboxfill()
-b.projection=p
-#b.datawc_y1 = 90
-#b.datawc_y2 = -90
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
+p = x.getprojection("polar")
+b = x.createboxfill()
+b.projection = p
 x.plot(a(latitude=(90,-90)), b, bg=1)
 
 fileName = os.path.basename(__file__)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_robinson_wrap.py b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
index 81b3206bd..de66e516c 100644
--- a/testing/vcs/test_vcs_boxfill_robinson_wrap.py
+++ b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
@@ -1,15 +1,7 @@
-#!/usr/bin/env python
-import cdms2, cdutil, genutil
-import vcs,os
-import sys
+import os, sys, cdms2, cdutil, genutil, vcs, testing.regression as regression
 
 # This tests if extending the longitude to more than 360 decrees is handled correctly by
 # proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more information.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt2 = cdmsfile('clt')
 clt3 = clt2(latitude=(-90.0, 90.0),squeeze=1,longitude=(-180, 200.0),time=('1979-01', '1988-12'),)
@@ -19,7 +11,4 @@ kwargs = {}
 kwargs[ 'cdmsfile' ] = cdmsfile.id
 kwargs['bg'] = 1
 canvas.plot(clt3, gmBoxfill, **kwargs)
-fnm = "test_robinson_wrap.png"
-canvas.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_robinson_wrap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background.py b/testing/vcs/test_vcs_canvas_background.py
index 1d39b330d..2c72b51f3 100644
--- a/testing/vcs/test_vcs_canvas_background.py
+++ b/testing/vcs/test_vcs_canvas_background.py
@@ -1,19 +1,6 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(500,500,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
-fnm = "test_backgroundcolor_white.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_backgroundcolor_white.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background_update.py b/testing/vcs/test_vcs_canvas_background_update.py
index 454f0ef09..a28c08713 100644
--- a/testing/vcs/test_vcs_canvas_background_update.py
+++ b/testing/vcs/test_vcs_canvas_background_update.py
@@ -1,22 +1,8 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-
-x.drawlogooff()
-x.setbgoutputdimensions(500,500, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
 x.backgroundcolor = (255, 255, 0)
 x.update()
-fnm = "test_backgroundcolor_yellow.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(x, "test_backgroundcolor_yellow.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_click_info.py b/testing/vcs/test_vcs_click_info.py
index 0fe587792..c16a6c0d4 100644
--- a/testing/vcs/test_vcs_click_info.py
+++ b/testing/vcs/test_vcs_click_info.py
@@ -1,7 +1,4 @@
-import cdms2
-import sys
-import vcs
-import os
+import os, sys, cdms2, vcs, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
               'a_mollweide_boxfill': ('clt.nc', 'clt', (222, 322)),
@@ -11,15 +8,7 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
 # Tests if the info produced when clicking on a map is correct.
 src = sys.argv[1]
 plot = sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-# Needs to set the size of window so it is consistent accross
-# test platforms
-x.open(814, 606)
+x = regression.init()
 
 # data
 f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
@@ -47,7 +36,4 @@ fileName = os.path.basename(src)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
 
-x.png(fileName, width=814, height= 606)
-
-ret = checkimage.check_result_image(fileName, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_close.py b/testing/vcs/test_vcs_close.py
index 7bf007224..3457a648e 100644
--- a/testing/vcs/test_vcs_close.py
+++ b/testing/vcs/test_vcs_close.py
@@ -1,7 +1,5 @@
 import os, sys, vcs, cdms2
-#import checkimage
 
-#src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
 cdmsfile = cdms2.open(vcs.sample_data+"/clt.nc")
@@ -9,8 +7,4 @@ data = cdmsfile('clt')
 x = vcs.init()
 x.plot(data, bg=1)
 x.close()
-#x.plot(data[4][1:89], bg=1)
-#fnm = "test_vcs_close.png"
-#x.png(fnm)
-#ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
 sys.exit(0)
diff --git a/testing/vcs/test_vcs_colormaps_source.py b/testing/vcs/test_vcs_colormaps_source.py
index 1dcc7d880..2cfc02721 100644
--- a/testing/vcs/test_vcs_colormaps_source.py
+++ b/testing/vcs/test_vcs_colormaps_source.py
@@ -1,27 +1,12 @@
-import vcs
-import argparse
-import cdms2
-import  os
-import sys
-
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 parser = argparse.ArgumentParser()
-
 parser.add_argument("-g",dest="gm",default="boxfill",choices = ["boxfill","isofill","meshfill","isoline","vector","1d"])
 parser.add_argument("-s",dest="src",default="vcs",choices=["vcs","canvas","gm"])
 parser.add_argument("-b",dest="baseline")
-
-
 args = parser.parse_args()
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-x.drawlogooff()
+x = regression.init()
 
 exec("gm = x.create%s()" % args.gm)
 
@@ -55,7 +40,4 @@ else:
 fnm = "test_vcs_colormaps_source_%s_%s.png" % (args.gm,args.src)
 x.png(fnm)
 baselineImage = args.baseline
-ret = checkimage.check_result_image(fnm, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+ret = regression.run(x, fnm, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_colorpicker_appearance.py b/testing/vcs/test_vcs_colorpicker_appearance.py
index c92534071..4ccba61fd 100644
--- a/testing/vcs/test_vcs_colorpicker_appearance.py
+++ b/testing/vcs/test_vcs_colorpicker_appearance.py
@@ -1,7 +1,6 @@
 import vcs, vtk
 
 picker = vcs.colorpicker.ColorPicker(500, 250, None, 0)
-
 win = picker.render_window
 
 win.Render()
@@ -16,8 +15,6 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    src = sys.argv[1]
-    sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
-    import checkimage
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+    import testing.regression as regression
+    ret = regression.check_result_image(fnm, sys.argv[1])
     sys.exit(ret)
diff --git a/testing/vcs/test_vcs_configurator_resize.py b/testing/vcs/test_vcs_configurator_resize.py
index b6179626d..7692e6218 100644
--- a/testing/vcs/test_vcs_configurator_resize.py
+++ b/testing/vcs/test_vcs_configurator_resize.py
@@ -20,9 +20,7 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    pth = os.path.join(os.path.dirname(__file__), "..")
-    sys.path.append(pth)
-    import checkimage
+    import testing.regression as regression
     src = sys.argv[1]
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-    sys.exit(ret)
+    ret = regression.check_result_image(fnm, src)
+    sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_continents.py b/testing/vcs/test_vcs_continents.py
index 9ae3d62de..c102df9de 100644
--- a/testing/vcs/test_vcs_continents.py
+++ b/testing/vcs/test_vcs_continents.py
@@ -1,22 +1,14 @@
-import cdms2
-import os
-import sys
-import vcs
-import EzTemplate
+import os, sys, EzTemplate, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt", time="1979-1-1", squeeze=1)
 
-
 # Zero out the array so we can see the continents clearly
 clt[:] = 0
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill = canvas.createboxfill()
@@ -64,15 +56,4 @@ for i in range(12):
         canvas.plot(clt, template, boxfill, continents=7, continents_line=cont_line, bg=1)
         os.environ["UVCDAT_DIR"] = current_dotdir
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_continents.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    25)
-sys.exit(ret)
+regression.run(canvas, "test_continents.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_create_get.py b/testing/vcs/test_vcs_create_get.py
index ec525d1b4..adb879d64 100644
--- a/testing/vcs/test_vcs_create_get.py
+++ b/testing/vcs/test_vcs_create_get.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_draw_logo_on.py b/testing/vcs/test_vcs_draw_logo_on.py
index 65271eb15..4a0c28d2c 100644
--- a/testing/vcs/test_vcs_draw_logo_on.py
+++ b/testing/vcs/test_vcs_draw_logo_on.py
@@ -1,19 +1,9 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = vcs.init()
 a=numpy.arange(100)
 a.shape=(10,10)
 x.plot(a,bg=1)
 fnm = "test_vcs_draw_logo_on.png"
 x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.check_result_image(fnm, sys.argv[1])
diff --git a/testing/vcs/test_vcs_fillarea_transparency.py b/testing/vcs/test_vcs_fillarea_transparency.py
index dc3a8bf4b..831b3e029 100644
--- a/testing/vcs/test_vcs_fillarea_transparency.py
+++ b/testing/vcs/test_vcs_fillarea_transparency.py
@@ -1,13 +1,6 @@
-import vcs
-import sys,os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, sys, os, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 fa1 = x.createfillarea()
 
@@ -29,8 +22,4 @@ x.plot(fa1,bg=True)
 x.plot(fa2,bg=True)
 
 fnm = os.path.split(__file__[:-2]+"png")[-1]
-x.png(fnm)
-src = sys.argv[1]
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_first_png_blank.py b/testing/vcs/test_vcs_first_png_blank.py
index d11c59e6d..1e0bd8e28 100644
--- a/testing/vcs/test_vcs_first_png_blank.py
+++ b/testing/vcs/test_vcs_first_png_blank.py
@@ -1,20 +1,7 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 T=f('clt')
-v = vcs.init()
-v.setantialiasing(0)
-v.setbgoutputdimensions(1200,1091,units="pixels")
+v = regression.init()
 v.plot(T,bg=1)
-# This will write a blank plot to a file:
-fnm = "first_png_blank.png"
-v.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(v, 'first_png_blank.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipNone.py b/testing/vcs/test_vcs_flipNone.py
index a76e271f1..79b69ffdd 100644
--- a/testing/vcs/test_vcs_flipNone.py
+++ b/testing/vcs/test_vcs_flipNone.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipNone.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipNone.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipX.py b/testing/vcs/test_vcs_flipX.py
index 0de7001aa..e211bf16a 100644
--- a/testing/vcs/test_vcs_flipX.py
+++ b/testing/vcs/test_vcs_flipX.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90),level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipX.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipX.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipXY.py b/testing/vcs/test_vcs_flipXY.py
index 8dd0f8d89..779a0fe7e 100644
--- a/testing/vcs/test_vcs_flipXY.py
+++ b/testing/vcs/test_vcs_flipXY.py
@@ -1,26 +1,9 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipXY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipXY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipY.py b/testing/vcs/test_vcs_flipY.py
index 7194f3f30..5efa57745 100644
--- a/testing/vcs/test_vcs_flipY.py
+++ b/testing/vcs/test_vcs_flipY.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1)
 x.plot(s,bg=1)
-fnm = "test_vcs_flipY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gen_meshfill.py b/testing/vcs/test_vcs_gen_meshfill.py
index b726a20a6..e59940554 100644
--- a/testing/vcs/test_vcs_gen_meshfill.py
+++ b/testing/vcs/test_vcs_gen_meshfill.py
@@ -1,39 +1,16 @@
+import os, sys, numpy, vcs, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 
 data = numpy.array([1,2,3,4])
-
 blon = numpy.array([-1,1,1,0,-1])
 blat = numpy.array([0,0,1,2,1])
-
 acell=numpy.array([blat,blon])
 bcell = numpy.array([blat,blon+2.5])
 ccell = numpy.array([blat+2.5,blon+2.5])
 dcell = numpy.array([blat+2.5,blon])
-
 mesh = numpy.array([acell,bcell,ccell,dcell])
-
 m=x.createmeshfill()
 
 x.plot(data,mesh,m,bg=1)
-
-
-fnm = "test_vcs_gen_meshfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, "test_vcs_gen_meshfill.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gms_animate_projected_plots.py b/testing/vcs/test_vcs_gms_animate_projected_plots.py
index 2e521cd4a..3de234e41 100644
--- a/testing/vcs/test_vcs_gms_animate_projected_plots.py
+++ b/testing/vcs/test_vcs_gms_animate_projected_plots.py
@@ -1,15 +1,5 @@
 # Test animation of projected plots
-
-import argparse
-import cdms2
-import MV2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Testing animation of projected plots")
 p.add_argument("--gm_type", dest="gm", help="gm to test")
@@ -19,18 +9,13 @@ p.add_argument("--source", dest="src", help="path to baseline image")
 p.add_argument("--keep", dest="keep", action="store_true", default=False,
                help="Save images, even if baseline matches.")
 p.add_argument("--threshold", dest="threshold", type=int,
-               default=checkimage.defaultThreshold,
+               default=regression.defaultThreshold,
                help="Threshold value for image differnces")
 
 args = p.parse_args(sys.argv[1:])
 
 gm_type = args.gm
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-
+x = regression.init()
 s = None
 
 if gm_type == "meshfill":
@@ -72,7 +57,7 @@ pngs = x.animate.close(preserve_pngs=True)  # so we can look at them again
 ret = 0
 pdir = os.path.split(pngs[0])[0]
 p = pdir + os.sep + "anim_0.png"
-ret = checkimage.check_result_image(p, args.src, args.threshold)
+ret = regression.check_result_image(p, args.src, args.threshold)
 if ret == 0 and not args.keep:
     for f in pngs:
         if os.path.isfile(f):
diff --git a/testing/vcs/test_vcs_gms_patterns_hatches.py b/testing/vcs/test_vcs_gms_patterns_hatches.py
index 4030429a0..a7681a420 100644
--- a/testing/vcs/test_vcs_gms_patterns_hatches.py
+++ b/testing/vcs/test_vcs_gms_patterns_hatches.py
@@ -1,14 +1,4 @@
-# Test the use of patterns/hatches for plots
-
-import argparse
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Patterns/hatches testing code for vcs gms")
 p.add_argument("--source", dest="src", help="source image file")
@@ -22,7 +12,7 @@ p.add_argument("--lat2", dest="lat2", default=90, type=float, help="Last latitud
 p.add_argument("--lon1", dest="lon1", default=-180, type=float, help="First Longitude")
 p.add_argument("--lon2", dest="lon2", default=180, type=float, help="Last Longitude")
 p.add_argument("--keep", dest="keep", action="store_true", help="Save image, even if baseline matches.")
-p.add_argument("--threshold", dest="threshold", type=int, default=checkimage.defaultThreshold,
+p.add_argument("--threshold", dest="threshold", type=int, default=regression.defaultThreshold,
         help="Default threshold")
 p.add_argument("--non-contiguous", dest="contig", default=True, action="store_false", help="use non contiguous levels")
 
@@ -98,7 +88,7 @@ fnm += nm_xtra
 x.png(fnm)
 print "fnm:", fnm
 print "src:", src
-ret = checkimage.check_result_image(fnm+'.png', src,
+ret = regression.check_result_image(fnm+'.png', src,
                                     args.threshold,
                                     cleanup=not args.keep)
 if args.show:
diff --git a/testing/vcs/test_vcs_hatches_patterns.py b/testing/vcs/test_vcs_hatches_patterns.py
index 8cd1766f0..8a467259c 100644
--- a/testing/vcs/test_vcs_hatches_patterns.py
+++ b/testing/vcs/test_vcs_hatches_patterns.py
@@ -1,17 +1,6 @@
-import os
-import sys
-import vcs
+import os, sys, vcs, testing.regression as regression
 
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-baselineImage = sys.argv[1]
-
-# Initialize the graphics canvas
-x = vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 
 # Create a test plot for listing all the hatches and patterns
 style_list = []
@@ -77,11 +66,4 @@ plot_title.y = [.9]
 x.plot(plot_title, bg=1)
 x.plot(fill_test, bg=1)
 x.plot(fill_info, bg=1)
-
-testImage = os.path.abspath("test_vcs_hatches_patterns.png")
-x.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(x, "test_vcs_hatches_patterns.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_import.py b/testing/vcs/test_vcs_import.py
index c12242125..01c3d0447 100644
--- a/testing/vcs/test_vcs_import.py
+++ b/testing/vcs/test_vcs_import.py
@@ -1,3 +1,2 @@
-
 import vcs
 
diff --git a/testing/vcs/test_vcs_infinity.py b/testing/vcs/test_vcs_infinity.py
index 235f551eb..daf406016 100644
--- a/testing/vcs/test_vcs_infinity.py
+++ b/testing/vcs/test_vcs_infinity.py
@@ -1,28 +1,16 @@
-
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import numpy
+import os, sys, numpy, MV2, cdms2, testing.regression as regression
 
 s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
+s = numpy.reshape(s,(10,10))
 
 s[4,6] = numpy.inf
 s[7,9] = numpy.NINF
 s[9,2] = numpy.nan
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1)
 fnm = "infinity.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part1.py b/testing/vcs/test_vcs_iso_celine_part1.py
index 586918d36..a8fcc5ae7 100644
--- a/testing/vcs/test_vcs_iso_celine_part1.py
+++ b/testing/vcs/test_vcs_iso_celine_part1.py
@@ -1,22 +1,13 @@
+import os,sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.setantialiasing(0)
-x.drawlogooff()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
+i = x.getisofill("celine")
 x.plot(s,i,bg=1)
 fnm = "test_celine_iso.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part2.py b/testing/vcs/test_vcs_iso_celine_part2.py
index f99f821da..c1c1df5b1 100644
--- a/testing/vcs/test_vcs_iso_celine_part2.py
+++ b/testing/vcs/test_vcs_iso_celine_part2.py
@@ -1,26 +1,15 @@
+import os, sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
-b=vcs.createboxfill()
+i = x.getisofill("celine")
+b = vcs.createboxfill()
 b.levels=i.levels
 b.fillareacolors=i.fillareacolors
 b.boxfill_type="custom"
 x.plot(s,b,bg=1)
 fnm = "test_celine_iso_2.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
index 5f12f65b0..8d1cc949c 100644
--- a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
+++ b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
@@ -1,24 +1,10 @@
-import cdms2
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 clt = f("clt",latitude=(80.0, 38.0),squeeze=1,longitude=(-180.0, 180.0),time=slice(0,1))
-x = vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.setantialiasing(0)
+x = regression.init()
 gm = vcs.createisofill()
 gm.projection="polar"
 x.plot( clt,gm,bg=1)
 fnm = os.path.split(__file__)[-1][:-2]+"png"
-src= sys.argv[1]
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_isoline_labels.py b/testing/vcs/test_vcs_isofill_isoline_labels.py
index d230fc8fc..192ffc4bf 100644
--- a/testing/vcs/test_vcs_isofill_isoline_labels.py
+++ b/testing/vcs/test_vcs_isofill_isoline_labels.py
@@ -1,24 +1,10 @@
-import vcs,cdms2,sys,os
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isofill = canvas.createisofill()
-
-# First plot the isofill
 canvas.plot(data, isofill, bg=1)
-
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -35,13 +21,6 @@ for i in range(10):
 isoline.text = texts
 isoline.linecolors = colors
 
-# Next plot the isolines with labels
+# Plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isofill_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isofill_isoline_labels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_mask_cell_shift.py b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
index 9bc867113..199b1586d 100644
--- a/testing/vcs/test_vcs_isofill_mask_cell_shift.py
+++ b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
@@ -1,25 +1,8 @@
-import os,sys
-import MV2
-import vcs
-import cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),latitude=(30,70),longitude=(-130,-60))
-s2=MV2.masked_greater(s,65.)
+import os, sys, MV2, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),latitude=(30, 70),longitude=(-130, -60))
+s2 = MV2.masked_greater(s, 65.)
 x.plot(s2,"default","isofill",bg=1)
-fnm = "test_vcs_isofill_mask_cell_shift.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_isofill_mask_cell_shift.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labels.py b/testing/vcs/test_vcs_isoline_labels.py
index 77033f2c8..460235431 100644
--- a/testing/vcs/test_vcs_isoline_labels.py
+++ b/testing/vcs/test_vcs_isoline_labels.py
@@ -1,20 +1,8 @@
-import vcs,cdms2,sys,os
-
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -33,24 +21,17 @@ isoline.text = texts
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
 
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s"%baseline
-testImage = os.path.abspath("test_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
+ret = regression.run_wo_terminate(canvas, "test_isoline_labels.png", baselineImage)
 
 # Now set isoline.linecolors and test again.
 canvas.clear()
 isoline.linecolors = colors
 canvas.plot(data, isoline, bg=1)
-
 baselineImage = "%s%d%s"%(baseline[0], 2, baseline[1])
 testImage = os.path.abspath("test_isoline_labels2.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 # Now set isoline.textcolors and test again.
 canvas.clear()
@@ -59,9 +40,6 @@ canvas.plot(data, isoline, bg=1)
 
 baselineImage = "%s%d%s"%(baseline[0], 3, baseline[1])
 testImage = os.path.abspath("test_isoline_labels3.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_isoline_labels_background.py b/testing/vcs/test_vcs_isoline_labels_background.py
index da1d1e6ac..08088836a 100644
--- a/testing/vcs/test_vcs_isoline_labels_background.py
+++ b/testing/vcs/test_vcs_isoline_labels_background.py
@@ -1,26 +1,13 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 import random
 
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
 (latmin, latmax, lonmin, lonmax) = (-90, 90, -180, 180)
 dataset = cdms2.open(os.path.join(vcs.sample_data, "tas_cru_1979.nc"))
 data = dataset("tas", time=slice(0, 1), latitude=(latmin, latmax),
                longitude=(lonmin, lonmax, 'co'), squeeze=1)
 dataset.close()
 
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 canvas.backgroundcolor = [100, 105, 105]
 
 isoline = canvas.createisoline()
@@ -50,11 +37,6 @@ isoline.labelskipdistance = 15.0
 
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
-
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s" % baseline
-testImage = os.path.abspath("test_isoline_labels_background.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
index da5e5d3c3..8c6e13a7e 100644
--- a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
+++ b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
@@ -1,30 +1,18 @@
-import vcs
-import cdms2
-import os,sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-iso=x.createisoline()
-t=x.createtext()
-t.color=243
-t.height=25
-to=x.createtextorientation()
-to.height = 55
-tt=x.createtexttable()
-tt.color=245
-iso.textcolors=[None,None,None,242,244]
-iso.text=[t,tt,to]
-iso.label="y"
-x.plot(s,iso,bg=1)
-x.png("test_vcs_isoline_labels_multi_label_input_types.png")
-
-src=sys.argv[1]
+import os, sys, cdms2, vcs, testing.regression as regression
 
-ret = checkimage.check_result_image('test_vcs_isoline_labels_multi_label_input_types.png',src,checkimage.defaultThreshold)
-sys.exit(ret)
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+x = regression.init()
+iso = x.createisoline()
+t = x.createtext()
+t.color = 243
+t.height = 25
+to = x.createtextorientation()
+to.height = 55
+tt = x.createtexttable()
+tt.color = 245
+iso.textcolors = [None,None,None,242,244]
+iso.text = [t,tt,to]
+iso.label = "y"
+x.plot(s, iso, bg=1)
+regression.run(x, "test_vcs_isoline_labels_multi_label_input_types.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labelskipdistance.py b/testing/vcs/test_vcs_isoline_labelskipdistance.py
index 04ecdfabb..4b119e0bc 100644
--- a/testing/vcs/test_vcs_isoline_labelskipdistance.py
+++ b/testing/vcs/test_vcs_isoline_labelskipdistance.py
@@ -1,21 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 isoline = canvas.createisoline()
 isoline.label = "y"
@@ -36,11 +23,4 @@ isoline.linecolors = colors
 
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_labelskipdistance.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isoline_labelskipdistance.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_numpy.py b/testing/vcs/test_vcs_isoline_numpy.py
index ced140e77..147f2f499 100644
--- a/testing/vcs/test_vcs_isoline_numpy.py
+++ b/testing/vcs/test_vcs_isoline_numpy.py
@@ -1,21 +1,13 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
 x.setantialiasing(0)
 x.setbgoutputdimensions(1200,1091,units="pixels")
 x.drawlogooff()
 fnm = os.path.join(vcs.sample_data,'clt.nc')
-f=cdms2.open(fnm)
-
-s=f("clt")
-gm=x.createisofill()
+f = cdms2.open(fnm)
+s = f("clt")
+gm = x.createisofill()
 x.plot(s.filled(),gm,bg=1)
 fnm = "test_vcs_isoline_numpy.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_width_stipple.py b/testing/vcs/test_vcs_isoline_width_stipple.py
index b563436c2..5da8f91f4 100644
--- a/testing/vcs/test_vcs_isoline_width_stipple.py
+++ b/testing/vcs/test_vcs_isoline_width_stipple.py
@@ -1,22 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label = "y"
 texts = []
@@ -35,15 +21,8 @@ for i in range(7):
 isoline.levels = levels
 isoline.text = texts
 isoline.linecolors = colors
-
 isoline.linewidths = (1, 2, 3, 4, 1)
 isoline.line = ('dot', 'dash', 'solid', 'dash-dot', 'long-dash', 'dot', 'dash')
-
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_width_stipple.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage, 30)
-sys.exit(ret)
+regression.run(canvas, "test_isoline_width_stipple.png")
diff --git a/testing/vcs/test_vcs_issue_960_labels.py b/testing/vcs/test_vcs_issue_960_labels.py
index ee45216cd..7da2104ff 100644
--- a/testing/vcs/test_vcs_issue_960_labels.py
+++ b/testing/vcs/test_vcs_issue_960_labels.py
@@ -1,29 +1,19 @@
-import sys,os,cdms2,vcs
-import vcs
-src1=sys.argv[1]
-src2=sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
+
+src1 = sys.argv[1]
+src2 = sys.argv[2]
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",time=slice(0,1),latitude=(-7,5),squeeze=1)
 x.plot(s,bg=1)
 fnm = "test_vcs_issue_960_labels_1.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src1
-ret = checkimage.check_result_image(fnm,src1,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src1)
 b=x.createboxfill()
 b.datawc_y1=-7
 b.datawc_y2=5
 x.plot(s,b,bg=1)
 fnm = "test_vcs_issue_960_labels_2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src2
-ret += checkimage.check_result_image(fnm,src2,checkimage.defaultThreshold)
-sys.exit(ret)
+ret += regression.check_result_image(fnm, src2)
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_lambert.py b/testing/vcs/test_vcs_lambert.py
index 17a50209f..c78e71e9b 100644
--- a/testing/vcs/test_vcs_lambert.py
+++ b/testing/vcs/test_vcs_lambert.py
@@ -1,27 +1,10 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 p=x.createprojection()
 p.type="lambert"
-
 iso.projection = p
 x.plot(s(latitude=(20, 60),longitude=(-140,-20)), iso, bg=True)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_lambert.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_lambert.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_large_pattern_hatch.py b/testing/vcs/test_vcs_large_pattern_hatch.py
index 3bcf0827d..782cfb14b 100644
--- a/testing/vcs/test_vcs_large_pattern_hatch.py
+++ b/testing/vcs/test_vcs_large_pattern_hatch.py
@@ -1,31 +1,12 @@
-import vcs
-import sys
-import os
-
-baseline = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1090, units="pixels")
-canvas.drawlogooff()
+import os, sys, vcs, testing.regression as regression
 
+canvas = regression.init()
 fillarea = vcs.createfillarea()
 fillarea.x = [[0, .33, .33, 0], [.33, .67, .67, .33], [.67, 1, 1, .67]]
 fillarea.y = [[0, 0, 1, 1]] * 3
 fillarea.style = ["solid", "pattern", "hatch"]
 fillarea.index = [1, 5, 5]
 fillarea.color = [50, 50, 50]
-
 canvas.plot(fillarea, bg=True)
-
-testImage = os.path.abspath("test_vcs_large_pattern_hatch.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baseline,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+fnm = os.path.abspath("test_vcs_large_pattern_hatch.png")
+regression.run(canvas, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_legend.py b/testing/vcs/test_vcs_legend.py
index 26beec7e5..a352bc080 100644
--- a/testing/vcs/test_vcs_legend.py
+++ b/testing/vcs/test_vcs_legend.py
@@ -1,5 +1,4 @@
-import sys,os
-import argparse
+import os, sys, argparse, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,20 +13,10 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -81,7 +70,7 @@ fnm = "test_vcs_legend_%s_%s_ext1_%s_ext2_%s" % (gm_type.lower(),args.orientatio
 x.png(fnm)
 print "fnm:",fnm
 print "src:",src
-ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png', src,regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_lon_axes_freak_out.py b/testing/vcs/test_vcs_lon_axes_freak_out.py
index ecb63f2a5..f18328f58 100644
--- a/testing/vcs/test_vcs_lon_axes_freak_out.py
+++ b/testing/vcs/test_vcs_lon_axes_freak_out.py
@@ -1,30 +1,11 @@
-import os,sys,vcs,cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+s3 = f("clt",longitude=(0,360))
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-s3=f("clt",longitude=(0,360))
-
-print s.shape,s3.shape
-
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 x.plot(s,bg=1)
 x.clear()
 x.plot(s3,bg=1)
-
-fnm = "test_lon_axes_freak_out.png"
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_lon_axes_freak_out.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_markers.py b/testing/vcs/test_vcs_markers.py
index 50f4f00d1..21b7a671b 100644
--- a/testing/vcs/test_vcs_markers.py
+++ b/testing/vcs/test_vcs_markers.py
@@ -1,28 +1,14 @@
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 m = x.createmarker()
-m.x=[[0.,],[5,],[10.,],[15.]]
-m.y=[[0.,],[5,],[10.,],[15.]]
+m.x = [[0.,],[5,],[10.,],[15.]]
+m.y = [[0.,],[5,],[10.,],[15.]]
 m.worldcoordinate=[-5,20,-5,20]
+
 #m.worldcoordinate=[-10,10,0,10]
 m.type=['plus','diamond','square_fill',"hurricane"]
 m.color=[242,243,244,242]
 m.size=[20,20,20,5]
 x.plot(m,bg=1)
-fnm= "test_markers.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py
index 06b9f49b9..baf0e1bb0 100644
--- a/testing/vcs/test_vcs_matplotlib_colormap.py
+++ b/testing/vcs/test_vcs_matplotlib_colormap.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,24 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 canvas.setcolormap(vcs.matplotlib2vcs("viridis"))
-
 canvas.plot(clt, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_matplotlib_colormap.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_matplotlib_colormap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_mercator_edge.py b/testing/vcs/test_vcs_mercator_edge.py
index eb6d79cdb..31f6cb83b 100644
--- a/testing/vcs/test_vcs_mercator_edge.py
+++ b/testing/vcs/test_vcs_mercator_edge.py
@@ -1,24 +1,9 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 iso.projection = "mercator"
 x.plot(s(latitude=(-90, 90)), iso, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_mercator_edge.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mercator_edge.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_draw_mesh.py b/testing/vcs/test_vcs_meshfill_draw_mesh.py
index ef214e648..08801d7a6 100644
--- a/testing/vcs/test_vcs_meshfill_draw_mesh.py
+++ b/testing/vcs/test_vcs_meshfill_draw_mesh.py
@@ -1,23 +1,11 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.drawlogooff()
-fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
-f=cdms2.open(fnmcurv)
+import os, sys, cdms2, vcs, testing.regression as regression
 
-s=f("sample")
-m=x.createmeshfill()
-m.mesh=True
+x = regression.init()
+fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
+f = cdms2.open(fnmcurv)
+s = f("sample")
+m = x.createmeshfill()
+m.mesh = True
 
 x.plot(s,m,bg=1)
-fnm = "test_meshfill_draw_mesh.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_meshfill_draw_mesh.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_no_wrapping.py b/testing/vcs/test_vcs_meshfill_no_wrapping.py
index 967758c78..9ee4a99f1 100755
--- a/testing/vcs/test_vcs_meshfill_no_wrapping.py
+++ b/testing/vcs/test_vcs_meshfill_no_wrapping.py
@@ -1,23 +1,7 @@
-#!/usr/bin/env python
-import cdms2 
-import os 
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
-f=cdms2.open(sys.argv[2])
-h=f("heat")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(sys.argv[2])
+h = f("heat")
+x = regression.init()
 x.plot(h, bg=1)
-fnm = "vcs_test_meshfill_no_wrapping.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "vcs_test_meshfill_no_wrapping.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_regular_grid.py b/testing/vcs/test_vcs_meshfill_regular_grid.py
index 9a955f9ff..77a390b22 100644
--- a/testing/vcs/test_vcs_meshfill_regular_grid.py
+++ b/testing/vcs/test_vcs_meshfill_regular_grid.py
@@ -1,22 +1,7 @@
-import vcs, cdms2, os, sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+import os, sys, cdms2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x = vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x.meshfill(s,bg=1)
-fnm = "test_meshfill_regular_grid.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.meshfill(s, bg=1)
+regression.run(x, "test_meshfill_regular_grid.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_vertices.py b/testing/vcs/test_vcs_meshfill_vertices.py
index 6317ef071..103e68109 100644
--- a/testing/vcs/test_vcs_meshfill_vertices.py
+++ b/testing/vcs/test_vcs_meshfill_vertices.py
@@ -1,19 +1,8 @@
-import numpy
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, vcs, testing.regression as regression
 
-x=vcs.init()
-
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 
 data_values = [ 25, 45, 55.]
-
 data_lon = [ 5., 10., 15.]
 data_lat = [ 5., 10., 15.]
 
@@ -50,8 +39,4 @@ m.levels = [20,30,50,70,80]
 m.mesh = True
 
 x.plot(numpy.array(data_values,),mesh,m,bg=True)
-x.png("test_vcs_meshfill_vertices_issue.png")
-src = sys.argv[1]
-ret = checkimage.check_result_image("test_vcs_meshfill_vertices_issue.png",
-                                    src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_meshfill_vertices_issue.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_zoom.py b/testing/vcs/test_vcs_meshfill_zoom.py
index 1026f0ff0..c366d8010 100644
--- a/testing/vcs/test_vcs_meshfill_zoom.py
+++ b/testing/vcs/test_vcs_meshfill_zoom.py
@@ -1,14 +1,4 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-# We test if gm.datawc zooms in correctly into the plot. This works only for
-# data using a linear projection. It does not work for geographic projections.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 flip = False
 if (len(sys.argv) == 3):
@@ -19,12 +9,10 @@ fileName = os.path.splitext(fileName)[0]
 if (flip):
     fileName = fileName + '_flip'
 fileName = fileName + '.png'
-f=cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
-s=f("sample")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-m=x.createmeshfill()
+f = cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
+s = f("sample")
+x = regression.init()
+m = x.createmeshfill()
 # m.mesh = True
 m.datawc_x1 = -20
 m.datawc_x2 = 20
@@ -33,7 +21,4 @@ if (flip):
 m.datawc_y1 = -20
 m.datawc_y2 = 20
 x.plot(s,m, bg=1)
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_mintics.py b/testing/vcs/test_vcs_mintics.py
index 01f65848b..302c22fd4 100644
--- a/testing/vcs/test_vcs_mintics.py
+++ b/testing/vcs/test_vcs_mintics.py
@@ -1,16 +1,10 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
 box = x.createboxfill()
+
 # Should ignore the string here
 box.xmtics1 = {i:"Test" for i in range(-180, 180, 15) if i % 30 != 0}
 box.ymtics1 = {i:"Test" for i in range(-90, 90, 5) if i % 10 != 0}
@@ -23,10 +17,4 @@ template.xmintic2.priority = 1
 template.xmintic2.y2 += template.xmintic1.y1 - template.xmintic1.y2
 template.ymintic2.priority = 1
 x.plot(s, template, box, bg=1)
-fnm = "test_vcs_mintics.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mintics.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_missing_colorname.py b/testing/vcs/test_vcs_missing_colorname.py
index 1ed68b475..9b3db7545 100644
--- a/testing/vcs/test_vcs_missing_colorname.py
+++ b/testing/vcs/test_vcs_missing_colorname.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -11,10 +8,7 @@ height, width = clt.shape
 clt.mask = [[True if i % 2 else False for i in range(width)] for _ in range(height)]
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 # Only have to test boxfill because all 2D methods use the same code
@@ -25,16 +19,4 @@ boxfill = canvas.createboxfill()
 boxfill.missing = "Medium Aquamarine"
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_missing_colorname.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_missing_colorname.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
index 9f30f40ae..4dcd059a0 100644
--- a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
+++ b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
@@ -1,27 +1,14 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-t=cdms2.createAxis(numpy.arange(120))
+x = regression.init()
+t = cdms2.createAxis(numpy.arange(120))
 t.designateTime()
-t.id="time"
-t.units="months since 2014"
+t.id = "time"
+t.units = "months since 2014"
 data = MV2.arange(120,0,-1)
-data.id="data"
+data.id = "data"
 data.setAxis(0,t)
-x=vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 x.plot(data,bg=1)
 fnm = 'test_vcs_monotonic_decreasing_yxvsx_default.png'
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_oned_level_axis.py b/testing/vcs/test_vcs_oned_level_axis.py
index 25479a56b..ecb708c77 100644
--- a/testing/vcs/test_vcs_oned_level_axis.py
+++ b/testing/vcs/test_vcs_oned_level_axis.py
@@ -1,24 +1,9 @@
+import os, sys, vcs, cdms2, cdutil, testing.regression as regression
 
-import sys,cdutil
-import vcs
-import os
-import cdms2
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-ta=f("ta",time=slice(0,1),squeeze=1)
-ta=cdutil.averager(ta,axis="yx")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
+ta = f("ta",time=slice(0,1),squeeze=1)
+ta = cdutil.averager(ta,axis="yx")
+x = regression.init()
 x.plot(ta,bg=1)
 fnm = "test_oned_level_axis.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_patterns.py b/testing/vcs/test_vcs_patterns.py
index fed4f41a8..c407f7cfe 100644
--- a/testing/vcs/test_vcs_patterns.py
+++ b/testing/vcs/test_vcs_patterns.py
@@ -1,19 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, testing.regression as regression
 
 f = cdms2.open(vcs.sample_data+"/clt.nc")
 s = f("clt", time=slice(0, 1), squeeze=1)
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 iso = vcs.createisofill("isoleg")
 iso.levels = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
 iso.fillareastyle = "pattern"
@@ -21,9 +10,4 @@ iso.fillareacolors = vcs.getcolors([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100])
 iso.fillareaindices = [1, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
 x.plot(s, iso, bg=1)
 fnm = "test_vcs_patterns.png"
-x.png(fnm)
-
-print "fnm:", fnm
-print "src:", src
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold+5.)
-sys.exit(ret)
+regression.run(x, fnm, threshold=regression.defaultThreshold+5.)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_plot_file_var.py b/testing/vcs/test_vcs_plot_file_var.py
index e20f947a6..0dd68945d 100644
--- a/testing/vcs/test_vcs_plot_file_var.py
+++ b/testing/vcs/test_vcs_plot_file_var.py
@@ -1,8 +1,6 @@
-import vcs
-import os
-import sys
-import cdms2
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-V=f("clt")
-x=vcs.init()
-x.plot(V,bg=1)
+import os, sys, vcs, cdms2
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+V = f("clt")
+x = vcs.init()
+x.plot(V, bg=1)
diff --git a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
index 4aa1d24eb..535ff432e 100644
--- a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
+++ b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
@@ -1,21 +1,7 @@
-import vcs
-import os,sys
-import cdms2
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-
-import checkimage
+import vcs, os, sys, cdms2, testing.regression as regression
 
 f = cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s = f("sample")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_plot_unstructured_via_boxfill.png"
-src = sys.argv[1]
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_plot_unstructured_via_boxfill.png")
diff --git a/testing/vcs/test_vcs_png_to_base64.py b/testing/vcs/test_vcs_png_to_base64.py
index 6b88b3511..f6d78e2fa 100644
--- a/testing/vcs/test_vcs_png_to_base64.py
+++ b/testing/vcs/test_vcs_png_to_base64.py
@@ -1,4 +1,4 @@
-import vcs,numpy,cdms2,MV2,os,sys
+import vcs, numpy, cdms2, MV2, os, sys
 
 x = vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_png_window_resize.py b/testing/vcs/test_vcs_png_window_resize.py
index 0adeeb66b..a6346ca28 100644
--- a/testing/vcs/test_vcs_png_window_resize.py
+++ b/testing/vcs/test_vcs_png_window_resize.py
@@ -1,20 +1,9 @@
-import vcs
-import sys
-import os
+import vcs, sys, os, testing.regression as regression
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.open(814,628)
 x.plot([1,2,3,4,5,6,7])
 fnm = __file__[:-3]+".png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_polar_set_opt_param_polar.py b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
index 8c508ca3c..4e777fb2b 100644
--- a/testing/vcs/test_vcs_polar_set_opt_param_polar.py
+++ b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
@@ -1,29 +1,13 @@
+import vcs, cdms2, sys, os, testing.regression as regression
 
-import vcs
-import cdms2
-import sys
-import os
-
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",slice(0,1),squeeze=1)
-x=vcs.init()
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-i=x.createisofill()
-p=x.getprojection("polar")
+i = x.createisofill()
+p = x.getprojection("polar")
 i.projection=p
 x.plot(s,i,bg=1)
-fnm= "test_polar_set_opt_param_polar.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, "test_polar_set_opt_param_polar.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_remove_marker_none_1d.py b/testing/vcs/test_vcs_remove_marker_none_1d.py
index f868361b0..e112f86bc 100644
--- a/testing/vcs/test_vcs_remove_marker_none_1d.py
+++ b/testing/vcs/test_vcs_remove_marker_none_1d.py
@@ -3,31 +3,13 @@
 #
 # J-Y Peterschmitt - LSCE - 03/2015
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 dummy_data = numpy.arange(50, dtype=numpy.float32)
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 gm = x.createyxvsx('test_yxvsx')
 
-
-# Remove the marker
 gm.marker = None
-
 x.plot(gm, dummy_data,bg=1)
-
 fnm = "test_remove_marker_none_1d.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-# The end
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_setcolormap.py b/testing/vcs/test_vcs_setcolormap.py
index c3e639660..78d7fad81 100644
--- a/testing/vcs/test_vcs_setcolormap.py
+++ b/testing/vcs/test_vcs_setcolormap.py
@@ -1,32 +1,12 @@
 
-import cdms2
-import os
-import sys
-import vcs
-
-baselineFilename = sys.argv[1]
-checkImagePath = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(checkImagePath)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = cdmsfile('clt')
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 t=x.gettemplate('default')
 x.plot(data, t, bg=True)
 
 # This should force the image to update
 x.setcolormap('bl_to_drkorang')
-
-testFilename = "test_vcs_setcolormap.png"
-x.png(testFilename)
-
-ret = checkimage.check_result_image(testFilename,
-                                    baselineFilename,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_setcolormap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba.py b/testing/vcs/test_vcs_settings_color_name_rgba.py
index 0b6aeef22..0fa6ec288 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba.py
@@ -1,28 +1,13 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-data=f("clt",slice(0,1,))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = f("clt",slice(0,1,))
 gm = x.createisofill()
 gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isofill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+fnm = "test_vcs_settings_color_name_rgba_isofill.png"
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
index 0fe844c20..8bca782a6 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
@@ -1,16 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt")[:,5,8]
@@ -18,10 +8,4 @@ gm = x.create1d()
 gm.linecolor="salmon"
 gm.markercolor = [0,0,100]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_1d.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_1d.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
index 043d331c8..34228513e 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createboxfill()
@@ -20,10 +9,4 @@ gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_boxfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_boxfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
index 200293dbe..c23edc2a6 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x=regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createisoline()
@@ -19,10 +8,4 @@ gm.levels = range(0,110,10)
 gm.linecolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isoline.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_isoline.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
index 4a0858d1d..0b3ffc871 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 data=f("sample")
 gm = x.createmeshfill()
@@ -19,10 +8,4 @@ gm.levels = range(0,1501,150)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_meshfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, 'test_vcs_settings_color_name_rgba_meshfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_star_triangle_markers.py b/testing/vcs/test_vcs_star_triangle_markers.py
index 4564e4059..2d75e05d0 100644
--- a/testing/vcs/test_vcs_star_triangle_markers.py
+++ b/testing/vcs/test_vcs_star_triangle_markers.py
@@ -1,9 +1,5 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 x=vcs.init()
 x.drawlogooff()
@@ -16,12 +12,5 @@ m.x = [[.1], [.3], [.5], [.7], [.9]]
 m.y = [[.1], [.3], [.5], [.7], [.9]]
 m.color = [200, 150, 160, 175, 125]
 m.size = [50, 50, 50, 50, 50]
-x.plot(m,bg=1)
-fnm = "test_star_triangle_markers.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "test_star_triangle_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_taylor_2quads.py b/testing/vcs/test_vcs_taylor_2quads.py
index 3458acb91..84f102615 100644
--- a/testing/vcs/test_vcs_taylor_2quads.py
+++ b/testing/vcs/test_vcs_taylor_2quads.py
@@ -1,17 +1,6 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs, MV2
+import sys, os, vcs, MV2, testing.regression as regression
 
-
-bg=True
-
-#
-# First create some sample data
-#
 data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.70056821,-0.27235352,],
  [ 0.05106154, 0.23012322,],
@@ -19,20 +8,8 @@ data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.85760801,-0.08336641,],
  [ 1.14083397,-0.78326507,]])
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-
-td=x.createtaylordiagram('new')
-
+x = regression.init()
+td = x.createtaylordiagram('new')
 td.quadrans = 2
-x.plot(data,td,skill = td.defaultSkillFunction,bg=bg)
-fnm = "test_vcs_taylor_2quads.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, td, skill = td.defaultSkillFunction, bg=1)
+regression.run(x, "test_vcs_taylor_2quads.png")
diff --git a/testing/vcs/test_vcs_taylor_template_ctl.py b/testing/vcs/test_vcs_taylor_template_ctl.py
index 40b78f0dc..b6f610c7d 100644
--- a/testing/vcs/test_vcs_taylor_template_ctl.py
+++ b/testing/vcs/test_vcs_taylor_template_ctl.py
@@ -1,20 +1,8 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs,MV2
+import sys, os, vcs, MV2
+import testing.regression as regression
 
-bg=True
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-if not bg:
-    x.open()
+x = regression.init()
 
 ## Create a template from the default taylor diagram
 t=x.createtemplate('mytaylor','deftaylor')
@@ -61,12 +49,5 @@ t.xmintic2.priority=1
 # Create some dummy data for display purposes
 data=MV2.array([[1.52,.52,],[.83,.84]])
 
-x.plot(data,t,td,bg=bg)
-fnm="test_vcs_taylor_template_ctl.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, t, td, bg=1)
+regression.run(x, "test_vcs_taylor_template_ctl.png")
diff --git a/testing/vcs/test_vcs_user_passed_date.py b/testing/vcs/test_vcs_user_passed_date.py
index 643d2019c..bb5b18a0d 100644
--- a/testing/vcs/test_vcs_user_passed_date.py
+++ b/testing/vcs/test_vcs_user_passed_date.py
@@ -1,18 +1,9 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 x.plot(s,bg=1,time=cdtime.comptime(2015))
 fnm = os.path.split(__file__)[1][:-3]+".png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_user_passed_date_as_string.py b/testing/vcs/test_vcs_user_passed_date_as_string.py
index cd5d777f3..e9bdf83e6 100644
--- a/testing/vcs/test_vcs_user_passed_date_as_string.py
+++ b/testing/vcs/test_vcs_user_passed_date_as_string.py
@@ -1,18 +1,11 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
+x = regression.init()
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1,time='2015-02-23')
 fnm = os.path.split(__file__)[1][:-3]+".png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_vectors_missing.py b/testing/vcs/test_vcs_vectors_missing.py
index fd53c97e1..5e8b08e5b 100644
--- a/testing/vcs/test_vcs_vectors_missing.py
+++ b/testing/vcs/test_vcs_vectors_missing.py
@@ -1,6 +1,6 @@
 
-import sys,os
-import argparse
+import sys, os, argparse
+import vcs, cdms2, vtk, MV2, numpy, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,28 +14,17 @@ if not args.show:
   src = args.src
   pth = os.path.join(os.path.dirname(__file__),"..")
   sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
-
 
 bg = not args.show
-
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
   x.setbgoutputdimensions(1200,1091,units="pixels")
 x.setcolormap("rainbow")
-gm=vcs.createvector()
+gm = vcs.createvector()
 gm.scale = args.scale
-nm_xtra=""
+nm_xtra = ""
 xtra = {}
 import cdms2
 import os
@@ -52,14 +41,12 @@ if args.show:
   pass
   #x.interact()
 else:
-  fnm = "test_vcs_vectors_missing" 
+  fnm = "test_vcs_vectors_missing"
   if args.scale!=1.:
     fnm+="_%.1g" % args.scale
   fnm+=nm_xtra
   x.png(fnm)
-  print "fnm:",fnm
-  print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png', src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_vectors_robinson.py b/testing/vcs/test_vcs_vectors_robinson.py
index 49052c679..5cde92c82 100644
--- a/testing/vcs/test_vcs_vectors_robinson.py
+++ b/testing/vcs/test_vcs_vectors_robinson.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 u = f("u")
 v = f("v")
@@ -16,8 +10,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson.png")
diff --git a/testing/vcs/test_vcs_vectors_robinson_wrap.py b/testing/vcs/test_vcs_vectors_robinson_wrap.py
index 86491cc2f..3cb30f06f 100644
--- a/testing/vcs/test_vcs_vectors_robinson_wrap.py
+++ b/testing/vcs/test_vcs_vectors_robinson_wrap.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 lon1 = -180
 u = f("clt")
@@ -19,8 +13,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson_wrap.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson_wrap.png")
diff --git a/testing/vcs/test_vcs_verify_boxfill_basics.py b/testing/vcs/test_vcs_verify_boxfill_basics.py
index 7d08a2b75..0d7c4c2d0 100644
--- a/testing/vcs/test_vcs_verify_boxfill_basics.py
+++ b/testing/vcs/test_vcs_verify_boxfill_basics.py
@@ -1,4 +1,3 @@
-
 import vcs
 import numpy
 import cdtime
@@ -12,12 +11,12 @@ b=x.createboxfill()
 assert(b.projection == "linear")
 assert(b.xticlabels1 == "*")
 assert(b.xticlabels2 == "*")
-assert(b.xmtics1 == "") 
+assert(b.xmtics1 == "")
 assert(b.xmtics2 == "")
 assert(b.yticlabels1 == "*")
 assert(b.yticlabels2 == "*")
-assert(b.ymtics1 == "")  
-assert(b.ymtics2 == "")  
+assert(b.ymtics1 == "")
+assert(b.ymtics2 == "")
 assert(numpy.allclose(b.datawc_x1, 1e+20))
 assert(numpy.allclose(b.datawc_x2, 1e+20))
 assert(numpy.allclose(b.datawc_y1, 1e+20))
@@ -71,12 +70,12 @@ assert(b.name == "test_b_ok")
 assert(b.projection == "test_bfill")
 assert(b.xticlabels1 == {23:"Hi"})
 assert(b.xticlabels2 == {23:"Hi"})
-assert(b.xmtics1 == {23:"Hi"}) 
+assert(b.xmtics1 == {23:"Hi"})
 assert(b.xmtics2 == {23:"Hi"})
 assert(b.yticlabels1 == {23:"Hi"})
 assert(b.yticlabels2 == {23:"Hi"})
-assert(b.ymtics1 == {23:"Hi"})  
-assert(b.ymtics2 == {23:"Hi"})  
+assert(b.ymtics1 == {23:"Hi"})
+assert(b.ymtics2 == {23:"Hi"})
 assert(numpy.allclose(b.datawc_x1, 56.7))
 assert(numpy.allclose(b.datawc_x2, 56.7))
 assert(numpy.allclose(b.datawc_y1, 56.7))
diff --git a/testing/vcs/test_vcs_wmo_marker.py b/testing/vcs/test_vcs_wmo_marker.py
index b4478372f..854e4a9be 100644
--- a/testing/vcs/test_vcs_wmo_marker.py
+++ b/testing/vcs/test_vcs_wmo_marker.py
@@ -1,14 +1,10 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+
+import testing.regression as regression
+x = regression.init()
+
 
 m = x.createmarker()
 M=1
@@ -22,8 +18,5 @@ x.plot(m,bg=1)
 fnm = 'wmo_marker.png'
 x.png(fnm)
 
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "wmo_marker.png")
 
diff --git a/testing/vcs/test_vcs_wmo_markers.py b/testing/vcs/test_vcs_wmo_markers.py
index 5785e0925..5162eeeee 100644
--- a/testing/vcs/test_vcs_wmo_markers.py
+++ b/testing/vcs/test_vcs_wmo_markers.py
@@ -1,26 +1,20 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 wmo = ['w00', 'w01', 'w02', 'w03', 'w04', 'w05', 'w06', 'w07', 'w08', 'w09',
        'w10', 'w11', 'w12', 'w13', 'w14', 'w15', 'w16', 'w17', 'w18', 'w19',
-       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29', 
+       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29',
        'w30', 'w31', 'w32', 'w33', 'w34', 'w35', 'w36', 'w37', 'w38', 'w39',
        'w40', 'w41', 'w42', 'w43', 'w44', 'w45', 'w46', 'w47', 'w48', 'w49',
        'w50', 'w51', 'w52', 'w53', 'w54', 'w55', 'w56', 'w57', 'w58', 'w59',
-       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69', 
+       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69',
        'w70', 'w71', 'w72', 'w73', 'w74', 'w75', 'w76', 'w77', 'w78', 'w79',
        'w80', 'w81', 'w82', 'w83', 'w84', 'w85', 'w86', 'w87', 'w88', 'w89',
        'w90', 'w91', 'w92', 'w93', 'w94', 'w95', 'w96', 'w97', 'w98', 'w99',
        'w100', 'w101', 'w102']
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 m = x.createmarker()
 M=7
@@ -37,13 +31,6 @@ for Y in range(7):
 m.x = xs
 m.y = ys
 m.list()
-x.plot(m,bg=1)
-fnm = "wmo_markers.png"
-x.png(fnm)
-
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "wmo_markers.png");
 
diff --git a/testing/vcs/vtk_ui/vtk_ui_test.py b/testing/vcs/vtk_ui/vtk_ui_test.py
index facfd7f45..40bbeaef1 100644
--- a/testing/vcs/vtk_ui/vtk_ui_test.py
+++ b/testing/vcs/vtk_ui/vtk_ui_test.py
@@ -1,8 +1,4 @@
-import vtk
-import vcs.vtk_ui
-import os
-import sys
-import time
+import, os, sys, time, vcs.vtk_ui, vtk
 
 
 def init():
@@ -101,15 +97,13 @@ class vtk_ui_test(object):
     def check_image(self, compare_against):
         """
         Checks the current render window's output against the image specified in the argument,
-        returns the result of checkimage.check_result_image
+        returns the result of regression.check_result_image
         """
         generate_png(self.win, self.test_file)
         pth = os.path.join(os.path.dirname(__file__), "../..")
         sys.path.append(pth)
-        import checkimage
-        print "fnm:", self.test_file
-        print "src:", compare_against
-        return checkimage.check_result_image(self.test_file, compare_against, checkimage.defaultThreshold)
+        import regression
+        return regression.check_result_image(self.test_file, compare_against)
 
     def test(self):
         self.do_test()
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
index b3cfa0fba..0c0298527 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -20,9 +13,7 @@ for i in range(12):
     t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
index 42e0f64e4..b57b76724 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
@@ -1,17 +1,13 @@
+import os, sys, testing.regression as regression
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 import EzTemplate,vcs
 import cdms,EzTemplate,vcs,sys
 ## 12 plots 1 legend per row on the right
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
-bg=True
-M=EzTemplate.Multi(rows=4,columns=3)
+bg = True
+M = EzTemplate.Multi(rows=4,columns=3)
 M.legend.direction='vertical'
 for i in range(12):
     t=M.get(legend='local')
@@ -19,9 +15,7 @@ for i in range(12):
         t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row_right.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
index b6ca9eb34..d46e7b9ac 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -26,10 +19,8 @@ for i in range(12):
       t=M.get()
 
 fnm = "test_EzTemplate_12_plots_legd_direction.png"
-M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+M.preview(fnm, bg=bg)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
index 40899f264..73b7c8dbf 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -25,9 +18,7 @@ for i in range(12):
       t=M.get()
 fnm = "test_EzTemplate_12_plots_margins_thickness.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
index 043e03de4..6e9398fe8 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
@@ -1,12 +1,4 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, EzTemplate, vcs, testing.regression as regression
 ## Initialize VCS
 x=vcs.init()
 x.drawlogooff()
@@ -24,9 +16,7 @@ for i in range(12):
 
 fnm = "test_EzTemplate_12_plots_mix_glb_local.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
index d17eb1a69..5d4cd293b 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
@@ -1,14 +1,6 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, EzTemplate, vcs, testing.regression as regression
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -18,9 +10,7 @@ M.spacing.vertical=.1
 
 fnm = "test_EzTemplate_12_plots_spacing.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
index 2619fe5ef..ee645d16c 100644
--- a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
+++ b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
@@ -1,21 +1,11 @@
+import os, sys, cdms2, testing.regression as regression, vcs, vcsaddons
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import vcsaddons
-import cdms2
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",time=slice(0,1),squeeze=1)
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",time=slice(0,1),squeeze=1)
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
-x=vcs.init()
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -25,9 +15,7 @@ for i in range(4):
 
 fnm = "test_vcs_addons_EzTemplate_2x2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcsaddons_preview_2x2.py b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
index 754aa5cea..a0318f25a 100644
--- a/testing/vcsaddons/test_vcsaddons_preview_2x2.py
+++ b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
@@ -1,22 +1,12 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcsaddons
+import os, sys, testing.regression as regression, vcsaddons
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
 if bg:
   M.x.setbgoutputdimensions(1200,1091,units="pixels")
 fnm = "test_vcsaddons_preview_2x2.png"
 M.preview(out=fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
-- 
GitLab


From 569843aec62da106d9a7452983f49c999adcf4aa Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 07:34:01 -0400
Subject: [PATCH 046/196] Updated to current master

---
 testing/vcs/test_fewer_colors_than_levels.py | 20 +++--------------
 testing/vcs/test_vcs_no_continents.py        | 23 +++-----------------
 2 files changed, 6 insertions(+), 37 deletions(-)

diff --git a/testing/vcs/test_fewer_colors_than_levels.py b/testing/vcs/test_fewer_colors_than_levels.py
index 2779d4b6d..c500cf19e 100644
--- a/testing/vcs/test_fewer_colors_than_levels.py
+++ b/testing/vcs/test_fewer_colors_than_levels.py
@@ -1,18 +1,9 @@
-import vcs,cdms2,sys,os
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
 
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 boxfill = canvas.createboxfill()
 
@@ -21,9 +12,4 @@ boxfill.color_2 = 250
 
 canvas.plot(data, boxfill, bg=1)
 
-testImage = os.path.abspath("test_fewer_colors_than_levels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage, checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_fewer_colors_than_levels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_no_continents.py b/testing/vcs/test_vcs_no_continents.py
index 03630d457..a5c3e8d04 100644
--- a/testing/vcs/test_vcs_no_continents.py
+++ b/testing/vcs/test_vcs_no_continents.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 t1 = vcs.createtemplate()
 t1.scale(.5, "y")
@@ -24,15 +18,4 @@ t2.move(.5, 'y')
 canvas.plot(clt, t1, continents=0, bg=True)
 canvas.plot(clt, t2, continents=1, bg=True)
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_no_continents.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_no_continents.png")
\ No newline at end of file
-- 
GitLab


From 9990b92bbd4c29b95800d8a9feb3f14060158074 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 07:36:49 -0400
Subject: [PATCH 047/196] Ported common code to testing common

---
 Packages/testing/common.py                    | 22 +++++++++++++++++++
 Packages/testing/regression.py                |  1 +
 .../vcs/test_vcs_fillarea_basics_no_plot.py   |  8 ++-----
 testing/vcs/test_vcs_verify_boxfill_basics.py |  7 ++----
 testing/vcs/test_vcs_verify_proj_basics.py    |  8 ++-----
 5 files changed, 29 insertions(+), 17 deletions(-)
 create mode 100644 Packages/testing/common.py

diff --git a/Packages/testing/common.py b/Packages/testing/common.py
new file mode 100644
index 000000000..7a32bcec5
--- /dev/null
+++ b/Packages/testing/common.py
@@ -0,0 +1,22 @@
+def test_values_setting(gm,attributes,good_values=[],bad_values=[]):
+  if isinstance(attributes,str):
+    attributes=[attributes,]
+  for att in attributes:
+    for val in good_values:
+      setattr(gm,att,val)
+    for val in bad_values:
+      try:
+        setattr(gm,att,val)
+        success = True
+      except:
+        success = False
+      else:
+        if success:
+          if hasattr(gm,"g_name"):
+            nm = gm.g_name
+          elif hasattr(gm,"s_name"):
+            nm = gm.s_name
+          else:
+            nm=gm.p_name
+          raise Exception,"Should not be able to set %s attribute '%s' to %s" % (nm,att,repr(val))
+          sys.exit(1)
diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 68f381ce4..bcc088202 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -138,6 +138,7 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh
     printDart("ValidImage", "image/png", os.path.abspath(bestFilename), "File")
     return -1
 
+
 def main():
     if len(sys.argv) != 4:
         print "Error:"
diff --git a/testing/vcs/test_vcs_fillarea_basics_no_plot.py b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
index 870aae9be..9a8e38ce7 100644
--- a/testing/vcs/test_vcs_fillarea_basics_no_plot.py
+++ b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_verify_boxfill_basics.py b/testing/vcs/test_vcs_verify_boxfill_basics.py
index 0d7c4c2d0..0768ff9f9 100644
--- a/testing/vcs/test_vcs_verify_boxfill_basics.py
+++ b/testing/vcs/test_vcs_verify_boxfill_basics.py
@@ -1,8 +1,5 @@
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_verify_proj_basics.py b/testing/vcs/test_vcs_verify_proj_basics.py
index f1e1fa0a9..822a6fc0b 100644
--- a/testing/vcs/test_vcs_verify_proj_basics.py
+++ b/testing/vcs/test_vcs_verify_proj_basics.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
-- 
GitLab


From a2ce6ac2f478aae2a184d4f618bbd3542325a1e1 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 08:01:52 -0400
Subject: [PATCH 048/196] Added testing to the modules to be built

---
 Packages/testing/regression.py |  2 ++
 Packages/testing/setup.py      |  3 +--
 installation/control.py        | 11 ++++++-----
 3 files changed, 9 insertions(+), 7 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index bcc088202..b272d592f 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -27,10 +27,12 @@ def init():
     return vcsinst
 
 def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and exit after comparsion."""
     vcsinst.png(fname)
     sys.exit(check_result_image(fname, baseline, threshold))
 
 def run_wo_terminate(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and return comparison with baseline."""
     vcsinst.png(fname)
     return check_result_image(fname, baseline, threshold)
 
diff --git a/Packages/testing/setup.py b/Packages/testing/setup.py
index 70f790d7b..85c270ed8 100755
--- a/Packages/testing/setup.py
+++ b/Packages/testing/setup.py
@@ -9,6 +9,5 @@ setup(name="testing",
       description="Testing infrastructure for cdat",
       url="http://uvcdat.llnl.gov",
       packages=['testing'],
-      package_dir={'testing': 'testing',},
-      install_requires=['numpy','vcs', 'vtk'],
+      package_dir = {'testing': '', }
 )
diff --git a/installation/control.py b/installation/control.py
index 7a3a52091..49ed5d9af 100644
--- a/installation/control.py
+++ b/installation/control.py
@@ -1,7 +1,7 @@
 # This file is used to control the behavior of install.py.
 
 # The search path is used if the X11 directories aren't configured.
-x11search = ['/usr/X11R6', '/usr/X11R6.5.1', 
+x11search = ['/usr/X11R6', '/usr/X11R6.5.1',
              '/usr/X11R6.4','/usr','/usr/openwin','/opt']
 # Here is where they are on OSF1 and perhaps similar systems
 x11OSF1lib = ['/usr/lib/X11', '/usr/lib']
@@ -48,14 +48,14 @@ else:
     make_code = 'make'
 
 # List of packages to be built
-packages = [ 
+packages = [
     "Packages/pydebug",
     "Packages/cdtime",
     "Packages/demo",
     "Packages/help",
     "Packages/regrid2",
-    "Packages/cdms2", 
-    "Packages/esg", 
+    "Packages/cdms2",
+    "Packages/esg",
     "Packages/ncml",
     "Packages/DV3D",
     "Packages/vcs",
@@ -63,9 +63,10 @@ packages = [
     "Packages/cdutil",
     "Packages/unidata",
     "Packages/xmgrace",
-    "Packages/genutil", 
+    "Packages/genutil",
     "Packages/Thermo",
     "Packages/WK",
     "Packages/gui_support",
     "Packages/distarray",
+    "Packages/testing",
     ]
-- 
GitLab


From 477e09750e554da288341d72ffd1d0cf39bce308 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 14:21:05 -0400
Subject: [PATCH 049/196] Marked export to gs as no longer supported

---
 Packages/vcs/vcs/Canvas.py | 63 ++++++++++++++++++--------------------
 1 file changed, 29 insertions(+), 34 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index d98270c55..8de655fbc 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -5309,47 +5309,42 @@ Options:::
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
         """
- Function: gs
+        Function: gs
 
- Description of Function:
-    This routine allows the user to save the VCS canvas in one of the many
-    GhostScript (gs) file types (also known as devices). To view other
-    GhostScript devices, issue the command "gs --help" at the terminal
-    prompt. Device names include: bmp256, epswrite, jpeg, jpeggray,
-    pdfwrite, png256, png16m, sgirgb, tiffpack, and tifflzw. By default
-    the device = 'png256'.
-
-    If no path/file name is given and no previously created gs file has been
-    designated, then file
+        Description of Function:
+        This routine allows the user to save the VCS canvas in one of the many
+        GhostScript (gs) file types (also known as devices). To view other
+        GhostScript devices, issue the command "gs --help" at the terminal
+        prompt. Device names include: bmp256, epswrite, jpeg, jpeggray,
+        pdfwrite, png256, png16m, sgirgb, tiffpack, and tifflzw. By default
+        the device = 'png256'.
+
+        If no path/file name is given and no previously created gs file has been
+        designated, then file
 
         /$HOME/%s/default.gs
 
-    will be used for storing gs images. However, if a previously created gs
-    file exist, then this output file will be used for storage.
+        will be used for storing gs images. However, if a previously created gs
+        file exist, then this output file will be used for storage.
 
-    By default, the page orientation is the canvas' orientation.
-    To translate the page orientation to portrait mode (p), set the parameter orientation = 'p'.
-    To translate the page orientation to landscape mode (l), set the parameter orientation = 'l'.
+        By default, the page orientation is the canvas' orientation.
+        To translate the page orientation to portrait mode (p), set the parameter orientation = 'p'.
+        To translate the page orientation to landscape mode (l), set the parameter orientation = 'l'.
 
-    The gs command is used to create a single gs file at this point. The user
-    can use other tools to append separate image files.
+        The gs command is used to create a single gs file at this point. The user
+        can use other tools to append separate image files.
 
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.gs('example') #defaults: device='png256', orientation='l' and resolution='792x612'
-    a.gs(filename='example.tif', device='tiffpack', orientation='l', resolution='800x600')
-    a.gs(filename='example.pdf', device='pdfwrite', orientation='l', resolution='200x200')
-    a.gs(filename='example.jpg', device='jpeg', orientation='p', resolution='1000x1000')
-""" % (self._dotdir)
-        if orientation is None:
-            orientation = self.orientation()[0]
-        r = resolution.split('x')
-        f1 = f1 = float(r[0]) / 1100.0 * 100.0
-        f2 = f2 = float(r[1]) / 849.85 * 100.0
-        resolution = "%4.1fx%4.1f" % (f2, f1)
-        nargs = (filename, device, orientation, resolution)
-        return self.canvas.gs(*nargs)
+        Example of Use:
+        a=vcs.init()
+        a.plot(array)
+        a.gs('example') #defaults: device='png256', orientation='l' and resolution='792x612'
+        a.gs(filename='example.tif', device='tiffpack', orientation='l', resolution='800x600')
+        a.gs(filename='example.pdf', device='pdfwrite', orientation='l', resolution='200x200')
+        a.gs(filename='example.jpg', device='jpeg', orientation='p', resolution='1000x1000')
+
+        NOTE: This method is marked as deprecated
+        """ % (self._dotdir)
+        warnings.warn("Export to GhostScript is no longer supported", Warning)
 
     ##########################################################################
     #                                                                           #
-- 
GitLab


From 3ef4b77b6e8bc3f21b44acb7cad9cfcd3351bfc5 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 15:15:07 -0400
Subject: [PATCH 050/196] Cleaned docs

---
 docs/Legal.htm => LEGAL.htm |   0
 docs/Legal.txt => LEGAL.txt |   0
 TODO.txt                    |   5 -
 docs/ChangeLog.txt          |  20 ----
 docs/Changes_3.3_to_4.0.sxc | Bin 12334 -> 0 bytes
 docs/Changes_3.3_to_4.csv   | 133 ---------------------
 docs/Changes_3.3_to_4.pdf   | Bin 26249 -> 0 bytes
 docs/Changes_3.3_to_4.xls   | Bin 45056 -> 0 bytes
 docs/DeveloperHowTo.html    |   6 -
 docs/HELP.html              |   6 -
 docs/README.txt             |   2 -
 docs/ReleaseNotes.txt       | 226 ------------------------------------
 12 files changed, 398 deletions(-)
 rename docs/Legal.htm => LEGAL.htm (100%)
 rename docs/Legal.txt => LEGAL.txt (100%)
 delete mode 100644 TODO.txt
 delete mode 100644 docs/ChangeLog.txt
 delete mode 100644 docs/Changes_3.3_to_4.0.sxc
 delete mode 100644 docs/Changes_3.3_to_4.csv
 delete mode 100644 docs/Changes_3.3_to_4.pdf
 delete mode 100644 docs/Changes_3.3_to_4.xls
 delete mode 100644 docs/DeveloperHowTo.html
 delete mode 100644 docs/HELP.html
 delete mode 100644 docs/README.txt
 delete mode 100644 docs/ReleaseNotes.txt

diff --git a/docs/Legal.htm b/LEGAL.htm
similarity index 100%
rename from docs/Legal.htm
rename to LEGAL.htm
diff --git a/docs/Legal.txt b/LEGAL.txt
similarity index 100%
rename from docs/Legal.txt
rename to LEGAL.txt
diff --git a/TODO.txt b/TODO.txt
deleted file mode 100644
index fb03af1f8..000000000
--- a/TODO.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-- Fix ESMF build
-- Verify individual packages
-- Verify if we can build using system
-- Consistent install and build directories
-- Install headers and lib under their own package name
diff --git a/docs/ChangeLog.txt b/docs/ChangeLog.txt
deleted file mode 100644
index 5f7d49505..000000000
--- a/docs/ChangeLog.txt
+++ /dev/null
@@ -1,20 +0,0 @@
-2011-06-22 : recap (probably forgot many things):
- 		- grib2 support
-		- restAPI support
-		- GUI code moved to uv-cdat repo
-		- forecast support
-		- gridspec support
-2010-06-17 : got Qt to build automatically, (or use the binaries) 
-2010-06-17 : got --enable-vistrails to work and build SIP and PyQt
-2010-06-15 : got the right LDFLAGS going to compile Fortran extensions under mac
-2010-04-08 : build system fetches sources, no more gz in repository!
-2010-04-08 : preliminary merge with Qt branch. You should be able to build against Qt (which needs to be pre-installed independently for now)
-2010-02-11 : added netcdf 4.1
-2010-02-02 : added correct link calls for icc under linux
-2010-01-28 : fixed tiny-tiny leak in cdms2 and cdtime (setting dictionary item
-w/o decref tmp pyobject for int/string)
-2010-01-28 : latest netcdf4 daily from Ed, fixes a LOT of memory leaks.
-2010-01-13 : change default cdms2 compress value to no shuffle/deflate 1/defalteLevel 1
-2010-01-13 : newer NetCDF4, daily snapshot that has better chunking parameter, etc..
-2010-01-13 : latest Scientific Python  (SP)
-2010-01-13 : got vtk to build in framework mode under Mac
diff --git a/docs/Changes_3.3_to_4.0.sxc b/docs/Changes_3.3_to_4.0.sxc
deleted file mode 100644
index e811c67769e89d740d242f887021e5ac6dfc5dac..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 12334
zcma)i1ymisw>9oithf~`?(XjHZWniVcXw}#OL2EC?(TAN*W#1{AN=eu>%DKi+%>Z%
z$v%6MGnw3z%vO*Fhd>8;PeDma45IC5$#`$q?+47=VPj!q;_P8>VrXw~Z2>TJwy?9M
zceOR9b8@kzbGNak0~lHZ6#ig<dBeXmyczwI;NPwQcDBwYw$AV9Gu?H^RgT(De))i%
zicH5+n$bk-K{@he$5Tmivz<PZ&&%J+Xv2d^Wh4T@{Zs4JuaUgko*b`9i`xXHk^!8x
zV9Uv&CCN4kL@>GhAkQ28FD~ffr&{JDs#iv34Q);RBB<XL$e%H_v-vu0@ob)N_g;TL
zbeR##@xM6!L{sj_Lg=egfYhWxU4>CG+G1~exH(>5Rc@R&XX7s*eiRsYO*v=(Vn#Tb
z{^Xs?-@nN_zAt~#w3K>c$6mj+FiLpff&`EmV|KgmU+7-B(X1=kWVQU2c?SgQ&H4aG
z2pL@Q*4MMN(8nvVmbtY7L6QLAv)IO5o;iUfUwa)Zn?EQ0%Kcoytv$yYzUz0BlZS`q
z$G9@L@kOaYZ>Cg5(j<DP)!Fqti^P82+zSHOCT~-{F_+iIiFF7)`A|HB=1#Ad@BPkV
zTwCm>1!x#CTcb$(-aqp9IoApZ4=##ki+8xW2ceQKFwHZ7ypD^;WdeT44gfvt3>A2S
z%mCzBZDwhUK=XTVr>hp|Zrfnu@Uc-si^^j4^rx?vSev(WxRS2@XtOIuyn8se?lk#7
zqbdm9OUJGy;qM)yQd*@85e-%<TT+Aj_XFv*_jM~x^661?O+cKOlLx%5SjM%BNV@u%
z%lwa-%oQ+`Jo2)cdy!PEuClzF>u(J1!)rN~sj_5P71XpT5qvx7;?Je?sX<_-pgSuL
z`*;_yMvxhsn!Khg_){bqazN9d92Ub@sPU(_e!O&u&uMOjYqdRzhn+pen)`*2Z~<-3
z*1ZIvsmWHR9iBFQNoKD7E9Ng7x0>PY>ll%1`Kuom?tqG6K+k&vW)d8#2Bv+*{I!7j
zjOI!IgUuHLa9dtpbN)^L1DYr=gohk~6=8&>tJP16I?-FZ7_A9co#xG-Cq&}R5<1RS
zr;;XxEM~&v4r2Nk5&CdhI|(MA7J$HU&`+<MnYQD6Sx!z5kyV}@0NqHB)wb}wZMhNF
ztEii;Ree0zLT4DXc~y*9Y|<UQGN)i;V=+-CW54Bmzu50Q4>OBY(n4$S==Sn{N4H2H
zxba)1Hv+_Kvo|%iaF2}?X1NjbnPSSV<YOD9s^UcA80r_7`M+LYrJr=lM5Zd|v>-VL
z^l*C)ebGws&EH~`lLvVi>yoAEMb1@a02h0K?-VMFiB@p1N$+hg%ey4-sz`Se;G>d_
zi~I1QOk|cjlwW%G5%FN^h9Eg}kmI3n4*#-(vVo{@pHU^QxoG8^>t}>gkrLL`&j&vX
z;v{A}I(BySs-##d`i{BFfqa_E$7)i%72BEH#9i7bQWeX59xA+<u0qqIh^8*)S`qVZ
zIvqZ>MRRA4_v(4iJM^W!wrx)B`eaHK%VNu=Ic$)HEjda`LaYlLy%dt?4XO(Tt>08W
z<dd~5mZFAr=#4A;PO51NhDcR~l^#%`G~k?E7~YWH&ndm(V%~5REuX<1y>%*-ZPpt{
z;Eq+m9@MS6I?3mIzf1j1RyLJF!}`Oi=<KP>=&8m9LGO~S%j?3u?cUdq(5PI5y_)Lj
z%#r!RFqMiT^{FwOpD0JYC7_W|mqy=R&*uW>KBF*YsHKcaoNuMJDkuDCwY!aS)R&BS
zzw}fr4(c#0VVPhawTaSSFJi2b>g**Sv^B8EWaBOo5!Bg?%xI(3xjGK=aH4m~l2Em*
z;~l2ZOeMSg4x8BRbcy={cBl#yLJ-=3N_i?beEdavpRd=?d1vtoE@D2T;G8+&BMVDP
zvfT&94r<-ok2m=hzi#$t13|NdGlDIbh?Fh)ydz6#vWcnc5@aqxwh}TrKAM|E%lul}
z?^nx99JV=juub;09bTu_c0A%d6&d8o%(6A@HVm%&Hj9|<kh|W!)&WFJvM>3JMnQ{1
zz8Anyw_(pT0mYv9xegXVO4hzl6pj7-N>)hCx5?e+s1tV@AJH5nAaYhB`kbJrVEU_|
z%=skYz*lmX5@I>%HN^vJOG^~V=~25ivj{bXk&0ia9rDQQ09;bSi{Q>l8r7+&2kO)_
zMgjM1&T<0;XR`gNS)t0D@)pTiZNY=3#9M>2;`^uFwf26NkK*~sRe*UX{yCmPozLk|
z@ch<LqEuL$ZIb>~IR^Sn!1CuCwW$-Qq@sJn;93)5(FGsR4+LTq+6%~ycW-PKmBy)>
zPe&Cwy@xBZc22f>Q@_R3Lw3^mTfe&AFi+YYkYB?k)q{<#H(ap*q&hMVu7cy5SPEtG
zJr;Px@$$A1=7+4hTI|DYHt=1&-I}`u!oEFwIlC)3A0~qA%gPEzsn}EZU}~QQi5Bgh
zq*=1I@5Q8xT@P&L)UAe+YdM9QDrJ#!)IQ@PZpIY0-4HO=PR^tq{8leqKJx$UqfvEv
zP*y&vH=4k}&vs4D&t?}}f@P9ppc-ENbKd-5a>-_>=rD4hox6jvEKEPaG2tG(Ka;%^
zWo+DN*MeRu>(1GCV}SC*DVWn^1)VfYo4-xseaMDfm56`f<-OaA2iMu$mSqMX;kUrY
zJu3o}Ji|dsoF3wkmN98|(^REVdO^8f_XJk=I0AXG8R0mSU%tsT2A_UQFr;z7e?0vZ
zQ5#)r1>T{7`N64|JnU0k9^=k1hniM3zwayiXtV*S_pC0-DQvx`=m+bzzI7^xuIy^D
zm|RPPLzdY^)bT2vy3W*GGXcZ<ZN$4#u&ipj>hGGFVoJd>J(Rr2(cs~lq%rFipMDnm
z_eo-b-GZm=euT$@fx!TpW-%g~VR-!%-rzF>1^fKUJ$a_T191B3pt5biUza<hkAN%1
zO@!xtKWd3y4h=T~4gD2EW4G^i+*7MRIx?esKE1g7bAE_OCJy{bTu5j2K@Woe727PV
zefF4T7S<-|7E#1cq|W4Fg8BzqG^im>W;8z}b8q{$V3}WgF@bDh_!tNxmm#;wrgNAz
zcqA5sZKoIDpjVOj_M@IUVxcaBG>Z@2y(E@)Yd+u_!sA4YWnSgOD<#fG$csBTrhZ82
zoLLP9DjBNQ-^s#zk(@43GGV`64heoR!FAKP{jR4KlW3W<YjYiB<`c1R0XM{uH)(#)
zfB=z6l$QjX+pTi1j%+E@p~yoAv0s*YfQH$yKcwF$6{ku1kd_%as`yiemaJ{^;u2t*
zlFD<{KH7!Ju(&bnB72-ICgw$cw;WbSJ?R`8ac;^4XZFAcCt`SEoEYP(ks2z&$+N<L
zU|BkG?i&+f0`tHLlLiPl@9s03{EhEq3}%>znY)Fs3rj+5Ce+(gp1GqR{S_W7>rn?c
zwYxVQc@cF=DH*H6p*EM}kz$(#tQKn^DZT^r+>Qv`mF`p^q}l~*Rh49*O0b|Wc8M*p
zc+o5Hs`~&B_k3EJ022EKqLvaGv=K^16y#c&B7nTJqLkAzJqL*W0n9L3Bsjix`}$~k
zSiBKmgy})(5Qg=FnL1D~k9eMtzhtXs__DbsX7_?ZBOX81(t)<Wx+DZ8wdDb7K#@do
zCy%(<UHAnhL0{*zP9tO++lrM5TnGBHgb9?KWE+sOd{Dk_P&I`pBNLl?P(Ev5w&v(M
zftIoT2&UjMafjQlXtI=1)~ZcXOf~d)vp{bNMB&j8$kM9zB4xM$w}p`|fR`sRIU-GD
zJG!Wr&@gh_qQv;_ye3nV%#1I*6dr+6&AQZ$QH%wQw5TdoR6#-41(D^I7Lm*=NApxl
zVXJi>Svcny{XJGQ2a!R6!w#M$aKTE$hJk_EPOs6~@7>o(@oMDbQbEin;+4TdQ+0}D
zmg2~#mn~l<uq|21iL=L{Gs-eFLy9}?n*8MXU4+3X&<cbWnrP#tFe2Il{&MWHb))EU
zrluL@6Gz3+0GG;PogA=WXt&l`vg;5B(JCl3qWHsb&(|?i)?xOn-BA76kt$fdPk~iS
zG23P<d%eq-0hu=`M9(2=U6sVfLoE|G;9q(S<s9UikE_59AXmj{1e2&`vJK%heheck
zEQNDeN-QGde-jrHUuEZ#!k0rJB<ObF(PswYbAt+yKOZCeZc$8p5zHJFcj$~!E~J$|
z-4S<mJ;xtz=~7>pm<SSGG=+V#`5<kv(Il^Y&|Rm!>U*-EB}*5pemD>gs>9t^AkUYG
zr*FDiB}c!ZSDju=Nd>={T)Zmb-k!AHEnRF9t`mvn8Wf+wGOJ{Q1;m8Ci7*wiDzsYR
zCpa7BRT7}TNB!KcZwk%uA{5zY1jBR!@1>>C6+S(b6fRRaO3)iz=Z}dpl!d-U_r&3E
zw&e$MRmg(j>15fxSnX!e)mG@+v^Wi=2P*Qd#NR_KnSW!HDR0O+!VGSutkv8Y=L4k#
zcrYl7Uu(BRFU6x%Oo$1^y%=dm3PJSTo}7IBJ|H95sGf9^j}Y}R7PpK+I0`L}cryii
zu&HL-g<IzIF8f>OVH<tp=qTXFI^fCpK()g`1)8S1PJ64@rw9RUu>4ME5v?1m-_!o|
zJ1I`x+awz|kqGgqgoKK45%re4K(cDkHXcP{0;uZr7BDfi2=&(0%!HxlM<)|NI@>Qm
z3TlzlEIL_EFCNB1LnD6WfUU#fK-!hQo4S>9@lY3=iJg{Da?H&^OI4*wRbk2v9cM!K
z{elIr-FlEeBpznc?mZF&B7(A(_SrHooSz$6iFqbMGqt#uq51;m2b~`mC-<v`jqn`%
zN5Vss6HKB59s|P4CH~9cTx);xN&mW##5D?=#Tj8I!0?4Q^bB1cGl{FGtM6W5#8At5
z1IG6Jyr%3OB6%Bt0~b-Hlr{Xvm`^Pw5|60qcJ_=<iVFhPx(bmyob1;G1;a|p3hWBM
zc%Hq6F(CtVIO)%)Fh=MiniK*=5mp`jV)?n#-7Kyr1UZUmzX>1+!zpNC=zj7e{?1N*
z>ZL1muPC_~{m?qM2gF`=MYBp(f}RKi$zUzQRz|ddjfX^zSQo@*G&J=W!S047O7bOL
zVx8PH%X+?|`EZ;e7S^g1%j!%8_H6;%<NK57c$if#3EzOgm4s0d)mUa!<%EYlW&|qS
zW$rg8m6*`ToFC#mkJ0necb;JI49%A`^Ir;((~Ag)?qf{9imDYgXp5S3Z<0AzMME7Z
z3myoS1=MhJmw$Ub-|f0TVwW~F)0mwKAMXn!#4Mjt?@ZKKPjfNpxm_wm6}F=u<5Pmi
z_#*Ql$swaR3=y+}H9Xo3TTmVY_eJ)ZIyzD^7_kRP>ewc&_<}PZ<<sBVg9I7=F}Kh9
zNNN-Z2^Rs{4NfM_*9S%=;rxJjn8zx$KYq9mZhZ$7a!dg(9t&8vlLu%xHXTTOg{vPe
zNLgfM*Z9a2pVoeRj(mV1pAb#vN!EQ=#bVl7v_y%H<Ebfxns2yz_?>B7xTyLna<74U
zEfM7BY4WpFi~;@qX}Qu;-q#>j^70Oh1jDXvI+S7h0*7QV+NW(CD4^k`tB2(pBDgUQ
z$+S2-n~Ac@C!0}SMWHY}<^vr>=uuK^;Y>}jq~@=&?NH(L{xq3otul9ROen(GMyM%?
zkBr4kTwIBU#PV~f6=Wyw4-)8nm$F+y{gi8zoFv5H;nGWm5o63J3QY9IU{5_p%LNn&
z=njwq8RRWJ2Ueb8T0gr@c^uP9!2?3Tt+PGht2+{(Me=&}@QqHqz7f#2%1LjYyKAV0
zb^3IU@cv3Tl@1eyi^9UFue+j<^?0CwCIogXaITb_@b@lBH3gT|87#uXXMv$h`537b
zekkriCGbVjQKp=akC_m##bt(DcivNG-0xQER>lFvWAL-L%!oJs#74BU1#GZRHbYEM
zHjXl0v@5P!JyI&M*%$7Mc&%E+>^jW}0$C6M2vZ5v#cnNp%xUB2uTv`K&+fRBz7qp)
z^;W#Rb(e{!V^C$v6(DVf$MaRcW@Ux6rb3p0J;a1lr<w(gboiup#MW$K@*OTX8$C1i
ziLkQJT^$tbVFn_t^=6wvMX0+_f`mYU%QXH4LI#!`7(QeeT^TJRpzgZ-w06co4Iv(I
z2Z2xy4bsnOk25bQSu8f}mS0d%+}&LOhLWV?b?Sowz_!_DoRlB=*t~_#`5Fz&=~$1R
zYBmuVkSLh2i<!Clr9+0GaITA6xet*Hfd!>9G!f0|aQ{P>$p{~RM)dK`0K=NkL)%7n
z>u_rnrdE8D4v`EH4$%lbIs!I?b|38s6puNL4a|qatK-<;yQ5KP60AO3Wp1zHvTh1+
zS9khhsgOZ{(kL!IB@LEKrL#!rs|r_EpnDaWncW0`B7yS&Q|JRuWMT6w&S#**Hskar
z!U2RCG3=D2A$hO2YPBS}E5676j(Fa0<&EU*l0=rP8sX&Xn^JuVS>eJimGB98>UM1}
zAPex~V?$ZPy07}%-qE2F&6$|=$HaEV7;)hIdLhB{_crOMC)!^%{S*NPmMO|K@$JvX
z`1$$Nf_8SlMwo_ujs!-G`6)mDDq=WtDG$+u%zUCM*FL_w9_)H7WOE*BoSrTp3cDtC
z6Rs{*P(O>$iu6~g?6A64i8o3Pw2ye|bL^k9%yAzZQa!#QbkL{IaC6|(fs1QM#*O}R
zW+vhG0Dqf~$^go-WEgrM3?JL8qhlYM(^jTth>-(y@@57_YZZ!JGr%r<Dpjms18zzV
zv%<^pv!ae-wFa+?fh|k!C2LDk_Z=>>=!1^a3m6ae?$e9<4F`T??N|&2Vxb(s#4<Xl
z;9v^pQSuQpI^x3*SyMdnUpWK=!|;$c3sFlapnrUkw~LA#V(myPXfh-F5i%fw?r|on
z&Ry3fKMEpCRk$cp1vRbS+!wQ#e#{>{{^Ci+5xX!~RZ;Zv{N1kPM3Ppsek7(GArtT$
zv`tWf4Dux#7qTn5a@;)Z8Q^W>-OK@;!c2^aMPQ4*7i{h|5W~D)iE+PzRE1jfrUfKx
z!7n1-AcO)o3^??7f(y%yVwGh@gHHXKbapip%Y=vf&m8gXTwB<sraAA~gMDniUbk)D
zVd~nG9h3xJ=o5b9-56+DCO|w5AYcQ|G;Vs2zCJu}MC2hbbC*vG=nIW-Zbcu?YZkFS
ztBbsfktkvmw2JlF$lmgQn70^?+x}kU(Qkg<h3A&>J52{?>zH-__K~3w-JVD@xhuZi
zKj8Vim4U@J8g94sc#{p4*q6tpa|JE_ftO~vB1Ki8CKD(<vl>U+xmH}YIg2{3Ye>V=
z{6&|9rsIcbR#sUbQXZoyngU+}LHM?P1a{etCppo&ijFr%CSJvFq5Q*?j{d`36;QMA
z$j3M-(NG)Q<v5QoQd!1Y<%M!ZaORhLlk83mm+YCv;9cg;6vvr2I<8y<InU5?@a|}k
zg`a3#HD7Eo!O_i0ze@`R9Z6M7Xm!&L<n0lbz>tguj9f{x#-<Xtd?x(in3h&%Ku2e|
zLm|Z@7q!P@7}}#f&b|(5<xV3iZI>0WC->|Dm1+<u=Ql2raG}3-XQtvz+w-kwwu%8g
z7c=KD*9Bx9CaQa>=S#KNh?0m8VV|!-ZFNeHh_{SSW;QeHV=8fW@fi4qU{@a8wG5ng
zX{5-WD;Xa6NqSuYT;J_(qw4vL^n(X1s2%uCAGw~r4Vr2bh8)FF#vJWyf$jJuh%X+?
z2dRh0FUXXuYU%9Ve1?9-U}{jp_g*+i7Bkp6_+6GM6&Ou-1fF$wS)&m9C2ZHfbj##1
zb;}AjH-$gIm+3V2#+f#KJ@b<6YQjCKhv-a_xMPI9;g0XH#@K$e29^}a6JqsGBNXCS
z&o0ufiq}>S<WOz?_#$UkG|RE4g_EN5@v6=&oI{oYI-yOTOJ4<YNBiU6oH_nkvs7qc
zFP4BaA>1;d23*$<E{Jpuit$qsIBC=|He7h5({5yDlrJ!=fd1VEnX%fM?$(=xGJDf9
zhsZ2v9dkEV$27j>e41kLawn&VBlUsrx6A-QzY<a*D{*_tvy`r$Gq3x7>Q4}SrDG-8
z$594mP6C#QY72HT^!6l4Hb>4v_xN5z+WGE}-9h}k`You_kw{<Jh5CDpgle2P$Xz$h
zC_IEDnnmn3EM$`!Qb?&mVD%G?OQh{W97fIEWlzQRj#QKyh2kl{4Wle4{4O-5We&3@
zNAjkK8$oV%l-m+HHy|=%fr!Cw@~EBtrSKhL@A)WE%fwVn%Qbg~Ml_|S=<-F<fPZzs
z8%0;}kpR*&|7Kz4z<~MZW-*)|HUgo}T_a0SV=hjj5Tome$jEQbU8p-4b%KvmSA!R?
zl!Y+QJNK^(OB$%SovNA<(MQPlV?2qe#FH*x_H}l;l1m|nBd68@Op{E@^eECTRjs-9
zKX?)mp25jsuXIOX1p1pfjaGPtd?y(4Op}Xex{wfvV5muI_@InRjA>&g;Dd_Mtsb2b
zZ<NZ^-Cq#Apvd9q;4Iq@F04RQLh?B>B>b{Ud^4F5$$rjIsCX#{GqMHh3X0@j8_2Mx
zuK1^dr@0ml;rK`;46*8zIxIQss?P7bp>uXf)8y(1Ye+2q#N&2OY{n@g%3=TthpCzW
zo>W5&w*={Ns*PmAE7AqOHB=6tl2?RCM9!=B)eiZMr{*81r0|+>fVR<wzplQyyXYyk
z1{VqrYD6g}3HZMDfk$<}B`Vd)uMT-OgP4v@+pI-Mr2I4BahQx6kgGMqrj7=D4X~fM
z#mwcM^`O7E>n3HL^OHTZRPYKJ^dq3x^bJ@hyEj#j&JZV(6{#+RNxK1BxAhwA!@3<w
zQm#+mnJ*%2CSiQvU_0%QT3{}+HRMS78X8wNA^!Cd$ccv*5NOPR6dt6KeimP!VV0)e
zr<$(onuR?IR-Vq#q1D%(LmeXp8IfNjoz}2yJwOGzGj3rt{XGKvTTwVA>p}>WrzQ2)
z<SYzYewaWMDY_&PBlTqY>li7(h^rAio~Kb2li7?-d2zTI$n&B?b3k333F1*-<mGbc
z%Xm(kv1`j$$T(066U?)sOC@R`d*1Lf;z32lo^}REGU<;<iL{Yvft|7jTyO?`1Pv#*
zr=v+@`#uonmr!7@uRYv&5v}1t7ut<pPd{&(!YhR2pT(Ra0#MRyD8iu<?vchFFG#j)
z@CN6z{HaaOx}rf#)xl&SL5W7ws<*kA>8#gVfn(n`h3<EPX@ep!)2%!$Gi*e4M!M9=
z5Hq)11@)-@9i-(~pzE~4^{0=j6|nk{J@h25S=ai*lMDU%Mr1Ziz5LmFJ}%L7^}5_E
zT+cIoPM*i2QPx5<x{b;F5dfDm^h)5{g0gg4T4`|gEQRS424ZzJ_Z+H%Ae4;E^I-K9
z6o%n$v?(&S_(L#`MQ`n##aCdjWZVy~?A34U<F3vc@O?COw9Qjvig=>6CrUP3NdC46
z0U#DL8Du%xbVPaZU;$D()^_xD*hQMessPzvgX+xkAFwOQ*f<Z^upajB@pZsHqD8%m
zMu&APFYM~;@vD5nw&W<6+2`&Di(tHue?B?k1rZIpFaDest=BW*p#_7WQKf!m?lxkT
zm*QAQo4`t=mn9Ym?i5cuZzFq{a_Fsw=CFgO_i~XI6^H^tnOnv*>ia;ee8*WGOT~D2
zkq7~Bi%^V|vwTX!SfS+Wuqpw*^ob}6n<BEpvEJMJQ6v{tH6-`_&gq<afsal8*;}WW
zBjCMc=#!w}gOWz!-0LDw9o#K%N~!;WzU@cm=i>^9XCQes?=Kj&OwE3?&xAf-(oN9w
zS|7%r<fk-tjw<sM&@<Y`&d;?RpTy%r116{#rhdi-pqApq%>7nNhAGoyD_TCM1U;$0
zD85E<7<5N^7P6n_ZFFM(a%$tDn<h1|qPYy=rX>#TM$;DdWw%gYqf^N_Dm9UgeJb!Y
zze(S;Kd6<yylu{ZK7iFh&srBTxVmCJX`=3TRa3V1Q&Q%XMz6m#)`o!nl8oWWRsu?r
zok?=zUKj4V!wV7QXZE!;@0tenn=tTEcE&8t5Ki_$N~QDTyxV8p`mTA=y5AL<O|pAm
zM;U&~&Yuhm@SK#|^E{)M`t!)+a3owl?Y)94NJGPX)qL@!#|HsX^8^L?-@miMy?tkO
za`vz`aeDvEI<H}4x5|e6(y5<YB0LQSAmy~cEp7xWx2OTNK)Zol;$uQ3Lrake$lEsV
z@*QOF&`e#e?`<tI@7?L$S$Cr6@pAE1#{klzu<lu(iHql6prl?V*Lt7%{d$1<_UquM
zd|5VqrJm34lV1&kb<yOzG<%ZDsN{;Y7w}46i`l+yzjsm&@`;*DVcKTClh9oeH9xp-
zW!#4F?*3pEy!`d;;!$i}kxH3g?`#?oqxRRBb>(wR^1LRpeRGIg6HN~T!q`d^^v#iG
zyBJ3^shcB41qdzH2HsQ!3Q`ta`!`W)5Qg(hY`f{zvpaLI@`P!}7KE4I_So%Pd3_bM
zZjdI~_uIFXLA6EgpNxR0G(Y{*ez9dM*}}Bj$jY&4j9nL_;FAm<fr`Z{LqnOdd*SgQ
z0(**H4$e%(vnk9{s0B{+iOpH<S^^&E?xE*rOl<}}0hANbUD{_LTu~>>vFSdU(S6}Q
z_USH<^`_r}O5RGuxVL&`taN{_6-_U^xSdl~A@A;|#ZdXIaT$`vyGs`&!f7ZnWqh`%
zt+g0d4oiMmO{_dIv6*b#Olh6KUAVxe%R})=bc@@ajEG4{=@9zdTS!_br?eZkWbwtw
zU^)2-($pi!DITo|64ovq-F~C^19uooC8cSvJ$AVA4stZ}_FefvNrjwK&uA%;TV1R6
zSn}Pl>?T6<?nfyLT1MpxCLT{Q&Dv?h3?g2=3}1uQ9wOAxF1K9%$Y{$CP=|4`UaqQ2
zp%h7^&@q(bCBaJ~E5Q+~TKJMA5bfOe%3>m0!?#~QxgMlqYADU87MgqRyOdsx;_7j5
zhb?bbcSMK#S;!KUEbTmsh)p<dyE^^&BGuKja!H^ntk#)azMOuhl2jQK*jsi<F`Ba)
zG6Le2;CW><mzkgT3ebwVAgf&QG@?vtP1)Ga`y`*mH?cSB^2MsT6i%86XLnmBd~A5V
zTPJ-4cd%&jDDXLKBgylne7&UF{D7RL-Hq$2m2z1yLP}?<z~(cu)m(Kl3>J|=6BK!y
z7VbIm7Xhs=FCAXCjlui4?s}g(EviuP4MsWI-Qx#^Yg>0OLhNHtHels$Del&X7YaZJ
zId`?e#3Qh#O57C2adSQ0#4fl{t$wkG=N(wWETc8p?W}U)^A1@JF4Zqmek5fSRU29W
z{2)^k3Rcyscs6OavS!G{Jw;_t>=ltfW5J`Ky0uMZXcsy;d{Rr9CYZg6pp~6>_ORR-
zQR}P`5lcwWPLYjZ;`KdI+o>1A6w*;JfWlR$rYq>4>pOw^oUO`kR8oe%1Zc>ep)5f|
zh?a_-JA3vprOpwr!As$Rw1W%Sl|7M{XTBNw>Wt@_#dQkRGuP3k?ET&Mcnv8oR+~?X
z#&L;CqKIw4$nZn-9=ncEPI8esONfkSq61ymKx_Zia+Dnlawu6LrA}5>Gy)zD+Uf4%
zR<iafp-%BOHV=!$MnEW?94#7`MT^9|nJ6-0Hjlbd)jp(bhj{J_fSQbT1096Q8^tBe
zjp;=ZwKThGY>klH(3d3>so18vDQpE19M46KS|xMeG!rOm-gKLZH9&p4@J;pZ)l)e9
zQ3X9aJG&~dW=g75kvzIoG-R6hzCM%_MXwS8^LBHGF|A{@DuvoLx16n42e0MHvlbui
zYboeK9XSLy3xUxcui}RxyqYL{qqANq$J=r$ug*lPi-;eRR8f3=<=N+#49ylqGq=pG
z`1=gCZlpm?m5`r!BZW(H6rOK)>`_FDkE7DjtOAAlyF4ef7<l0(F^bK$t{1jT=CE<@
zv-grPZvuWLDYom}1&H0z@98q=9{#v-tNO|RJ{#lO1ktNOf`AZVfc!NZze`=-RWH(&
zO0MVNARzC@AG#S6XT$fon1}!EMC5AX==7$X;U#9IV<09nu?5%}TiBZM6053+(Q**;
z^TF|a5|I~H(Nqv6vNJWc0GMzY+W}l`-efbhZ}J;MA_Y|;8A)LxVp@87d3zIE`9Cmp
zc8+HB^dc%EM1ST*{xTtYy9qtLs2nj7@t<UL#?HpXd_4an^IPj|ow)w2;w3hBcDCoD
zr?-2%$6u!J_h4dRV4(jqPfYZufxESZt<^txH#avrH|9Uu!^p|WN&iRf-%w+~KRA0A
zN9#XO#sGQ~Ym@i>InguHG1C8yeedi)=>Mq|?*QVzee@q*{ilEL5I&ywGnbi(t%;+d
zvz;T~e|qiBLvKDJqL8t$b#bTUq5m&1&nH@1HWpS2Ar}j4V=fLhPG(A4+P^3PjwXMI
zplFQ^olW@OJi|iEz(mWyqQc0?#mdUX!uS{4f8uzI0bKuKz{sJ($iT(K!NtJFL;rW>
zpN+P6w0L{$|A}T-VPxfEX5wPx{EPnIK=0hvhPGxdhGr&wCbqPy%6~BaQ}idRiLr(A
z+mNFLcmUp_1U^RAzli>;{6E3QE^pp@8@GH4Dr^#rjIu18Z`^-4>u<Av5PurFIGH%o
z8k?9}*qRs<{Q<N!v@zi&mb5jsBVr_`|I3O0Ul`N>4aWR`gR%Sr`{#{va(?TSld}cj
z&lWlx8d;mr0_<FDo!{P1;y()kCf3$}m$5RjzLWg*25`N1>tDHlSpO*g-y<}H_Xy3z
z`TZIE5u=@D*f`}$Ap3W1R9!xJrub(Xr&aXP5_KBnDPE@7fy>x$GxYu-%GCD*KRzEw
zY0ptx*CkU`2|n^@EjBAE#;xq4_tDYnW)MnVgV}SEq!8Rzz(lza!$QI_(+|05;IDjb
z_nCs4)$0%;BuoJBYMG1hkyz4%z}~4<EMZr<aH$~u8nG=5GAvB)CMPV9ZU@~|c>8=-
zq#Z$x+V81O0c*9{GOCm7`c3*kE%9)*^-(OH5yKlzbmVp_+mkAhG~&d|aMmyaCF3<-
zK5>_)#I)8#Gqu0mh5A_GD!g5N$;5${*xKi^$BN&2BU7hMFUqdlEL@NuU*6(69V9!>
zIKbcvklfSBN53adA;cCc1@prns;4ob0t^@qgJwC?@W8z*yy^8YBqvNjk~V&qOMQj;
zc=F=`Lj-7Db)^1LGjU^-|60&KG#ox{YJDCu>A5cXc8i)#r(@74{qQ_C0|ovO71`1P
z@F<*GG=<L%?eDcqE3EIzXog&S#jTcu5r%^3#Ud0<|1!*Hrye!FD}Cim_$V#t92I()
zfeKZPZP3Rx7t;2UJ$>oViX<HRRe9R2e8clF-RnmENxoYhFBF3zAJ^ZV^SlH-$nK2S
zac#vYrai}z=)U=<m$WJ+VD1&feX>_SNK?-V!v)CwQ<G-|*8Qa0>2kncD6$5z=Qqi|
zwBV#x@-OfS&f`?MToV5sT}x9-O{!KHf9P58UDaQzg3X-ylBEKBpU>JcdstkSw^oyl
zV8Ru=AUFd1=QS7#GM*K)z%T}&FWZv8!L1`qpHKTjI7-b^6m22l%sRuTP^-^N?Oh$i
zTzi65)eH2=#`zdc6+i}z*6k-z41>YOv3$-snHC;%uK^iPu~Wv&u)i1Jh(t~DHxNKX
zW^Cr39$eD2swS{u+E21zFz7qrX6L5CjO}ZC=%C!rb!TqWtrt+Nd=wA7VDCJf(cDQP
ziyZstVET#d5E}zV1hOtbwxM3%KdtG4I&_iU0cK|SSwi)<hw1OhiD)m8{oZa!gndk9
zQRyF(JBQaCzj$I_a0jQgGn3O55e+^M@TlMsZtRdz5sIg6?5b4BN*IKHoe{etV)e!y
zY^#E>M*dL-{}BbbGFa<${gq6LuJL&qE}9!yJ&$B<WH6}wlHw;kY=cm384JX~r<MwP
zovp)F{Jo-pvk?FKb=60d(FYrQJd}ksrBF<T?@oF+^RQMCyMLhoUq;<><EO(Zn>h7&
zn#WW4tF43q&0t4Dt9^J)W^7Ht<9f(!-;zfvNAVL2KD~pTf;^X0cTp1C*4m>pUh=on
zA2gNuW>TYGX9p>FO<%4?EMrldj)uP#zkHiL7F+1+xvNP2J);M@LuW?q)*GsIp}srU
z7-fy{oWqab=C1KWeSzpu1C0bf%BWr}V%&saUu@Zej<H;yPOJju%P;A23QGg_Fre<W
z<CdjmEq)pBn0%obSA);D^<=hd)$|zJ<J@sQwTRff6x_{w3vu8fg5NJRsnR%RbP^|z
zqvM9_UL4-ZuVro?olAGTV<IWhx&Hb2#*?I$b$)DD+xz)3-|YM~lJ$HLyDHackRDpD
z0e2|{{(}rxma5I<4C~0*&$H5V&KGkej<`6`7BrHiI%?%fn+Y(p!e9QA5kPBWw&U@=
zo;!TI5za4npW90Ht~w#=e`TGGDjl13L}WjvOgy69PmbL^mGJmY90Z;0FNS`8@*K@c
zZJAHH8};U@)AlBGSXDw=By`$PElz4NOStP{n<v7r(zStBgB+Z7*w*y^w!GiJj*sIB
zrO%Gt25h0k8VUrd3PnM%0|g(1F@7Xu#9^sVI1$Pl7l6S_B^L<uooR;di~4bK;li$n
zni!}LVgB`aG8bt)kiJbY9AsHGgH7e<*$dST@)ow`g!zWxceMNYmoPO$yI(Hc*EqL9
z$YK^?_BZNZJ)NP2^F-DbJJLC5puG5~@b+RpX>?Ej(mn;BW{8nXkghCC5R-TYy$M>J
zi2+Zhz`Aaw8i!h$TL5t&qRSy`g2MEB5~1Dqv9EgmV^JU8P66@w7Di$d{T)XBPvYs1
zZ+{uGqAG&4l5%47HiotqrY25*$fzr3YAkyhkfAm^<a2?x*|Q}O+yMiW=qoEh%+Ban
zoG3$yH4~qFpGt#vglZw$CAZJp^zD!CW^f#efL3PA8vU>^wIlN@1k4;N7t_?%?D9uW
zdZ(q?j`Ug<F2Y%hvx3QCO_Ooo{9$SihcIOGKlWGh6Ys0ibr!W6keqi3cHf{c>Iiq+
zjU$izI$;e$b7~TE{^3^lnn+6hH4==}v}8JzxOOe>6;-*yqz@pDJygl8C5hBEP*(xq
zU*bTBNnwH8Pc>yAKPJs)66g*iUsIQ=a6cU8FJ;ul67GH~bxak$-uQsw!W$R*d)r!?
zCT&t`%+R9BLb+YDY@Gf*3RfOQQ~v$toO7^u_kn_;gZ%IM^*7kRrN7nee_Q?S_J7}H
zdDpD}D+#?_{+ojRzYYI=LGZo_^RHC;wrTUPEt$XV-s}IT<-))0I7r?^_irJ_`{Lnm
ztG}E1@8jk@p!ipk`TU>Z#eZA=yUX6k)xVM-?f){$6r>@c-eFL0KP^xYkWRYy-2Vfs
C;-x?U

diff --git a/docs/Changes_3.3_to_4.csv b/docs/Changes_3.3_to_4.csv
deleted file mode 100644
index 762b7014b..000000000
--- a/docs/Changes_3.3_to_4.csv
+++ /dev/null
@@ -1,133 +0,0 @@
-"Impact";"Package";"Section";"Change"
-1;"Cdms";"Axis";"Time dimension does not generate bounds automatically anymore – use cdms.setAutoBounds('on') -"
-1;"Cdms";"Curvilinear grids";"CurveGrids can be supplied in the axis list."
-1;"Cdms";"Curvilinear grids";"Added conversion routines: rect => curve => generic"
-1;"Cdms";"Curvilinear grids";"MV works with curvilinear grids"
-1;"Cdms";"Curvilinear grids";"Can read grids from scrip remap files"
-1;"Cdms";"Curvilinear grids";"getMesh returns a transposed mesh grid if requested."
-1;"Cdms";"Curvilinear grids";"On reading grids from files, check for cell convexity. Where that fails by default fix cells that straddle the 0 / 2pi boundary."
-1;"Cdms";"Curvilinear grids";"Added routines to write grids to external files in scrip format"
-1;"Cdms";"I/O";"Trapped seg faults on closed file objects."
-1;"Cdms";;"Fixed memory leaks."
-1;"Doc";"Tutorials";"New tutorials"
-1;"Exsrc";"DODS";"Now builds by default under Linux, updated to Version 3.4"
-1;"Vcdat";"DV";"A number reflecting selection/operation order appears next to selected variable"
-1;"Vcdat";"DV";"Templates and Graphic Method Listbox available (on/off) in Variables and Calculator area"
-1;"Vcdat";"Editors";"Template editor accessible from VCDAT (click and drag technology)"
-1;"Vcdat";"Editors";"Graphic Method Editors updated, preview/reset option added"
-1;"Vcdat";"General";"4 mode: 1-user defined layout; 2- Browse Variable; 3- Data Manipulation; 4- Graphics Display"
-1;"Vcdat";"GUI";"Layout changed, now has mode for plot edititng, letting you copy/paste and edit existing template and method"
-1;"Vcdat";"Plot";"Annotation pop-up available, with preview/reset/apply/cancel"
-1;"Vcdat";"Plot";"Page Layout Table to control what is displayed on the VCS Canvas, can turn on/off plots with one click"
-1;"Vcs";"Animations";" update to handle two or more on a page"
-1;"Vcs";"Animations";"Can read from a file"
-1;"Vcs";"Animations";"Can save to a file"
-1;"Vcs";"Curvilinear grids";"Handled automatically"
-1;"Vcs";"Projection";"Added gctp package to vcs, 28 new types of projection avalaible"
-1;"Vcs";"Templates";"Template editor by clicking"
-2;"Cdms";"I/O";"Added writeScripGrid, readScripGrid can read from a SCRIP mapping file"
-2;"Cdms";"Libcdms";"Removed spurious calendar error"
-2;"Cdms";"Time axis";"TransientAxis getCalendar returns DefaultCalendar if none specified. This is consistent with FileAxis.getCalendar()"
-2;"Doc ";"Tutorials";"Data can now be anywhere on user's space"
-2;"Genutil";"Arrayindexing";"New module, allows array indexing, e.g:C=genutil.arrayindexing.get(A,B)"
-2;"Genutil";"Filters";"Added filters module, so far only “running average”, 121 and custom"
-2;"Genutil";"Statistics";"fixed a bug in linear regression when trend is 0, probability are not set to missing"
-2;"Install";"All";"Updated external Packages to their latest versions, see README files"
-2;"Install";"CDAT";"Mac OS X “offically” supported"
-2;"Install";"Cdms";"Fix CDMS build for FreeBSD"
-2;"Install";"Cdms";"Added --cdms-only option to express_install."
-2;"Install";"Exsrc";"Now can build indiviudal package or exclude some"
-2;"Install";"Exsrc";"--list option added"
-2;"Install";"Exsrc";"--help option in install_script "
-2;"Vcdat";"0D (numbers)";"if no dimensions are present, then show the single vaule in the Defined Variables window"
-2;"Vcdat";"1D plot";"Overlay applies, use page layout for other types"
-2;"Vcdat";"DV";"Different mode of variable selection single/multiple"
-2;"Vcdat";"GUI";"Remove the 1-Plot, 2-Plot, 3-Plot, 4-Plot per page"
-2;"Vcdat";"I/O";"Simple binary file reader added"
-2;"Vcdat";"I/O";"Can read column arranged ascii files"
-2;"Vcdat";"I/O";"Save netcdf now has browser to go between directories"
-2;"Vcdat";"I/O";"Simple ascii file reader added"
-2;"Vcdat";"Menu";"New interface to user menu, more robust and intuitive"
-2;"Vcdat";"Scripting";"Added limited recordings of the functions (not available on beta)"
-2;"Vcs";"Boxfill";"Now can specify levels just like in isofill"
-2;"Vcs";"Isofill";"Now has legend otpion (i.e can specifiy where to  put values)"
-2;"Vcs";;"Linewidth option added where line are controlled"
-2;"Vcs";;"User can click on the plot and get worldcoordinate/index values, and actual value"
-3;"Cdms";"I/O";"picker selector, let you pick non contiguous values"
-3;"Cdutil";"Times";"Yearly and xDaily Time Bounds setting, plus accept either slab or axis"
-3;"Contrib";"F2py";"Added F2PY, fortran wrapper"
-3;"Install";"HDF";"--HDF= option let user point to HDF libraries"
-3;"Install";"Scrip";"Source code distributed but not installed"
-3;"Vcdat";"DV";"Replacing the eval call for the exec call. Now the user can enter any command"
-3;"Vcdat";"DV";"Fix for the automatic Numeric and MA conversion to MV. The user now has a choice to convert Numeric and MA to MV. That is, the user will need to select the ""Automatic Conversion to MV"" toggle in the ""Preferences"" menu to turn this feature on or off"
-3;"Vcs";"Plot";"Very High Resolution Continent Files generated via GMT are available on the web"
-3;"Vcs";"Templates";"Scaling, now can scale fonts"
-3;"Vcs";"Text";"Bg option works"
-4;"Cdms";"I/O";"Implemented isClose()."
-4;"Contrib";"Install";"Automatically built"
-4;"Exsrc";"VTK";"VTK (beta version) added to distrib, not built by default this is experimental"
-4;"Genutil";"Xmgrace";"Now can pass arguments at init time"
-4;"Vcdat";"General";"Retains user settings"
-4;"Vcdat";"General";"Predefined Region"
-4;"Vcdat";"General";"Can define colors using intensiy"
-4;"Vcdat";"Taylordiagram";"GUI updated"
-4;"Vcs";"Primitives";"Projection accessible"
-4;"Vcs";"Taylordiagram";"standard dev added to xaxis"
-4;"Vcs";"Taylordiagram";"taylor control the xtic/ytic/cticks"
-4;"Vcs";"Yxvsx, Xyvsy";"Auto now works again (no more blank canvas)"
-4;"Vcs";;"Improved thread support"
-5;"Cdms";"Cdscan";"Added –nofiles, --execfile option"
-5;"Cdms";"Cdscan";"Added --notrim-lat option"
-5;"Cdms";"Cdscan";"Added --filesxml option"
-5;"Cdms";"Curvilinear grids";"Raise error if nonrectangular grid has no bounds associated"
-5;"Cdms";"I/O";"Added gridftp protocol"
-5;"Cdutil";"Times";"fixed times.setTimesBoundsMonthly for end of month storage"
-5;"Cdutil";"Vertical";"Vertical Interpolation Tools (sigma2pressure)"
-5;"Contrib";"IaGraph";"Package of interactive graphing tools. IaGraph will create line plots, scatter plots, and contour plots"
-5;"Exsrc";"R";"Added R (statistical package) sources to distrib, not built by default"
-5;"Genutil";"Xmgrace";"safe/nosafe problem solved, version controled before passing the argument"
-5;"Ncml";;"New Package"
-5;"Scrip";;"New module, interface to scrip regridder"
-5;"Vcdat";"DV";"Icons tools added"
-5;"Vcdat";"General";"Keep (or not) Windows in front of main window"
-6;"Cdms";"Axis";"Align isLatitude, isLongitude test with CF convention"
-6;"Contrib";"ComparisonStatistics";"fixed a bug for TimeComponent 18, the seasonal weights used in computing the annual means were slightly off"
-6;"Contrib";"Rpy";"Added Rpy package, works if you have R on your system"
-6;"Exsrc";"Pbm";"Pbmplus replaced with netpbm on Linux and Mac systems"
-6;"Genutil";"Statistics";"Geometric Mean, Median and Rank functions added"
-6;"Vcdat";"1D plot";"Fixed multiplier annotation for 1D plots."
-6;"Vcdat";"DV";"Support for chemistry attributes"
-6;"Vcdat";"General";"Exit Popup to retain settings (can be turned off)"
-6;"Vcdat";"Menu";"Option pull down from main menu was changed to “Preferences”"
-6;"Vcs";"Animations";"bug fix for pan and zoom"
-6;"Vcs";"Templates";"Ratio options now supported, let user definean y/x ratio or if lat/lon let vcs find a good one"
-7;"Contrib";"ComparisonStatistics";"Handles fortran NaN"
-7;"Vcdat";"Annotations";"Changed annotation so a blank text field will print nothing in on the Canvas"
-7;"Vcs";"boxfill/isofill";"Extension bug fix, if levels set after ext_1"
-7;"Vcs";"Taylordiagram";"Taylordiags single precision ok now"
-8;"Cdms";"Cdscan";"Fixed bug when file has 'bounds' dimension."
-8;"Contrib";"ComparisonStatistics";"Updated doc"
-8;"Vcs";"Markers";"0 means no marker"
-8;"Vcs";"Taylordiagram";"fixed taylordiagrams.script and also listelements('taylordiagram') now works (x.show returns None)"
-9;"Cdms";"Drs";"Fixed the cycle process for DRS files. Must close DRS files before you open them again."
-9;"Cdutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-9;"Contrib";"ComparisonStatistics";"Bug fixing"
-9;"Contrib";"Pyfort";"New version 8.4"
-9;"Vcdat";"General";"User can defined the name of its default template/graphic methods"
-9;"Vcs";"1D plots";"Bug fixes for graphics methods. That is, if the data only has 1 dimensions and 2 or more dimensions are required, then use Yxvsx to plot the data"
-9;"Vcs";"Printer";"lanscape/portrait argument bug fix"
-9;"Vcs";"Taylordiagram";"Skill drawing, bug fix"
-9;"Vcs";;"Major changes to the VCS graphics methods"
-9;"Vcs";;"Fixed attribute settings for missing attributes"
-10;"Cdms";"I/O";"Fixed bug in dataset.write when time axis is a float. This shows up with Numeric V23.1"
-10;"Cdms";;"Added xmllib to ditrib since it'll be abandoned in future python"
-10;"Cdutil";"VariableMatcher";"Added comment keyword"
-10;"Genutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-10;"Vcs";"Colormap";"Added the NCAR Color Map to the initial.attribute file."
-10;"Vcs";"Colormap";"Cleanedup"
-10;"Vcs";"Templates";"fixed JYP comments about ""inconsistency"" on comment#1 of UL1of4 template"
-10;"Vcs";;"Removed Propertied Class dependencies, replaced with standard python (object/property)"
-11;"Cdms";"Curvilinear grids";"auxcoord, gengrid modules"
-11;"Esg";;"New Package"
-11;"Install";"CDAT";"Building GNU tar on older systems..."
-11;"Install";"CDAT";"--psql option added"
diff --git a/docs/Changes_3.3_to_4.pdf b/docs/Changes_3.3_to_4.pdf
deleted file mode 100644
index 408ca428a82faabccf24180853af8b797fb397aa..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 26249
zcmb@sV~l3e)Al=U+ue7&d)l_AZQHhO+qR}{+qP}nw$A)toaf2Ghm)KydsV7x?@D&E
z*2?e7MJgvGLc>7I3<phm_`JCT2hBi0Phe|c0SC>^O+Y7QY-8$VM!@(lrvL{{KtMnz
zVs7PR?C@{3(swczGB&g|GKPcZ<%NTGbaF7(w}yju%PP_yt3Kp(*us$ev0H&_ofIbm
zV}?DnbKL0#`2=ynx$Xb+@<RszDM^wce=7gTv)z&c3uT7|H_X50>)u@HY193-eDe1>
zj`sb2=;`@>pws<%c^KL0{=RFf?73Hkzo@yQvOZdQSN195zeBK?`T2VA_4#i7xqA>@
zUDgdAZK>h=zPQ-=_m#X@-_Ng$n(2;RqOa7=dwgBr&zl+i9ii_QbJ(}Jkg(hkHE8*p
z5jE}QZYKM^*E?P9Ioti3+3s)~&&)m_zf+KFA3l5Rz*@USCbptCI^8w0cgp1Gb$YVF
zp`D-m2M9%4oG)1or}H5Ui^|2ERp{s(6_VpxuV~*1?EB4m>%|X!z{<#7AZvT_uAGJj
zh<3L~R|;b4c&A(_4KaM!L@U^FpbzbZ>Ie6_dxnu^%QPHH9jkpkDn?6LHjKB6?Eze8
z>w(OTwh0H)JB{?@=Q;r4(A1<9q!#_#8_nT55|JBWY0g}mhG@aRkkLBF=bfRFw?O&j
zDIJISjA>*)BDX5DGXKsh%GFH;{Q`=?FeU{`>0)ghXV9%V%~4ADnc8QV9Bam)UbLbd
zocDpuEgqO@#4)mscfy3Y*=ibKOEIUFOPX1=Y!KV*;pm+*Bw{7Lj7@FNv|<|0sw1g(
zIp=lVuz3TrEMdZB$|K$wU8P=}Fr{9+KRKrWUyNGh%%I9bGlXY*_b*1Ba+bDl8iC2A
z5T;1mjPGU;bzDozKeQU$H<=<%8Pdv4v+gu_el{z#t$YfPj8>J(<k5jXejhX=Mi*=z
zftl9IJ)I)^S8NEFQMF9rhbO#Hr#HpAH<)IuG>NTf5Y^155GGtFQ-n03%9W-d+UOIQ
zaCeN!c?@C6AnVefn!_;VA*}khCN9&9YDQ3;`Pdg1dd_1XyA`rfi@*GPVBH%Hq#eX>
z*BXs>*k50(>Nmu`P4OO0gv%S{-lexdxV9IMMND2IpUw@yeWrxgQO`I{+%@c~Q6HO9
z>wFd-YKM+9@vWz5<D`c;sqzpp;Ns>2hM6Dp7!bU}e&@^HN<CubjE_SUu%Nk{Nekok
zmyMzrQr^SL7>j$SggT>yTI)0Mcs|<>7D4N}KSPZX8_i6iy*IXHc62~^FWcr`XLhS^
zO>o`3joJ0JTCTg|sIOXW-DI;|ktrgabxGfeL@Jl|82`TOibhy7I}8WH79eZ2Y^+!D
zo8orop+yl5jTf7s7wu${*_7Q2tokh`PD#+-v2%A-kb3qPJkm7m1@8P)KJZc}h|YbU
zIpOnIyk${2|E9CR7WqpAA1W*Qtx%V^Y+EX~ZA+w)4SUctkA&HV7U@ZwTnE(R+4cQ~
zaEANKvjkmuo%igAjqa<J>_Os-<cDU5#6ZO3o;x%1S=P=oNAuR-h?-z>C3<NIZIZCa
zzm2V<7@SQPIMAi!rvYGW5c@YjmoA4{Bl@A*KuyC?M1&+N4t)|i@7@jpFKXF8{(zsY
zpA#~sk#Fq8d#rrxNLK)LPiZx`D+nc@duYt;mW#cw8hs{<>P0p_bdZrl+(th={%UeQ
zV^nBe=-4gNmJkB(RXk9s^TSoJTigN3!kt*#1Byk+JtAXn*HunXJD6rOOm@q=bL7-@
zpE11I=etdx5<M>)6u<6Q(uvwGcuz3cW%+d(QTAXPO8-&cy&!g!3l8Bx{cUMu<r1D0
zVPaTi4I<-(Fc!yrc0TZ!F9Zrb-*HT*+T2T?LuRWZ2a;G#ZH#^wT`oR}aM^XQr{zFk
z#5GbCDy50PktBH#-bFrXZm2ngLD)SiQ$gR&-;j3b9b1P-WX;~V;@>QVu{!xHf&X@h
zPrpdM)SmdvS*O>671fR6&#e#^Zp-|G;<F8U^e{T|(-q3zjOo3g%;V?|O2v!BLw?RX
zWPN!T1P7U$vwO5q{YbiiNv{a}BRcWecSZJ_7ZH;cH(hJ-b$PK+(urjR&gXJQes%GK
zv2VgCtzEuoq}fdN^=wJKNhi7&rF*mA@sh3SbES5_;IeMz?;w*1RcD&~M(Qy=7w<?y
zFTp{*A9ZShdPz!)C`tJ?r8g8$p=p!-H{M23LE#)5KHtH@l18@5lu<sqJwI|dKoY|u
z0d2i`d%og<ax;eSxL4eMcnCr|SvvWLMjT@`i3k_SAjI<wL0gSVix|qrMw%eI!fzIP
zj6R;Le21Z&J=0gXakbBL^(g?mp(-w$`hcqzuG5%Zpw#xMcG9A>Qa&fW9H|QF@VQfD
zJ;hmOaxzl~v(u=sv!l_I^Ty6L+?3nH&RtC0{mN!s0EKy0NsP~CJr)~bp$#Ib&?Scb
z`DmL-OTb%d!8@VCSLJ#2sYp&YagHGY0B;H`=|D%?W*Y--q8R$n4e>=D%|P(3N|pcR
z1Yv%&Z9M0aCaA3AxE@vN4M=fua1a$Kk!cE?$gor)v3z2}!ln$GF-)?vE^larDi6Kl
z@(yT{yxQT>WLQwia42xSHN@9#);ABAcyI;B!CLD$suteilUw-MaceKLF^p#FJLH~d
zY?6vxPy*$scNxVBIZ_DU$U@Dr&&0FyX!Qy=9F=!yRlHsoCt?85@%G2VC6Ax6U;-ks
za{5J$t;RuT5YxaN4gkFe5(V$A0TQO$iuy>Ac8}uocMR=wR_uiFZJEMyP#r%4=mRmE
zkkfAlF=3Ku9z(dLKIPOy0Z!5;9Fb~5SR0DMda$q`m{nZ_FtI~uP^y0Dcu)H(1W(aQ
zDg>t&4!*P?3^zoMF7S-Nf`1S=b<(@jy(+%XBmtuPu|<`{RvcCFH`~~FRhv|>?#f^&
zd6KlaJ2Wt44=oZIX7OteAJUbAu{DS*zwxQ;<doB@ey}7E8!chPnm`S=7~4u2>adHv
z8OAqvc-_e{!#Js%7)n}@D@;j|Zhk<!&^gPMGlN}s@tNOdM({nQDc`4D=hVBbjBdSy
z%lGVWss<G<O;`hjt3dZLGn$lpGm!{NO9bP91_%qm$>@rmUC#4M;NAY#F5c;*L1l^E
zf*>$hd{<M~aTtFU1U%Nl4fZE$D%xJta78=1yiYHTi%$rK&EIfHjw3SjVy2SZ%O!Kv
z(A+IMUVYG-^Q;E1a6|kPT<xZ)%z+-SVh&wdZ#q_G7*?aAg}<gvtL-dvE)kp-hDC+0
za1F~1NLBs!YdUe%EAcWayMJ3iew)lCQVU~nA2fnIxO4Sz?(@5vXoA7Vl#^;Yi`#?f
z!~5~5fnbj1kZM1?A{TS>1IOzQ1GDl5@r5droJc#56Q-(i|2aaI%dv+Vt#J&;W}{6!
zEvP%kXHUv+mIM$<jOMX%eRtKOT~hiWm_Of9qlLDnICM#B5leJdYe@!k>8NjNRF25B
z_Yd2i<r3kBeP=9mZ|R;OW}5Y49!T=cVPEZbeU_d{tFr9sD_l`0Hhda;6i4n<UwaFg
z=V8#l&CLa^YMZyeq%+Gfx(gUAbh_dvxDj=qBx>UR5?vWY)c7O{9kE>jAupv3uv6v+
zTw^I>*YS=Vu}5;9(=+Wug_$B{5%qNsIck2ZNKF<SA;=E>^~$L*kOqQO(VXJt$;flh
zADnLvn8oaeSIE&Hfs2{wXO)oNH?$9$3S)vQ3t2oEzT8JG;rL1UT5b<2#^U%j@>_x5
zD;17hWRxwR`4v7qa#k-;=TBd_YsGWF*?NOH-JOUc)y^z8vp2lw_KP})!yS?JD3UQ>
zW2rm)4lIX=8Y;l|XFe(mvmj3yZZTJu$GXCj_#qQc5Gv|H%z1iIGP{9Yd;t&;sGp$3
z4cT&|YJv8MSOhwHf<r(o1dpQy6OUuaBHDSQvJ)`5)#Vx1hG&{s7YrUynVfd(A=ZbR
zxd3L3!H_-7WRYWmKXWT`(&!{CB!?PN+JJWU5doW}D~T6&K*Ly?N65m=JWAL%%sJbP
zFHy=7D|3&c!U)!|tjDbL`LgJ-(mV!m=azJPQzcjv9F-W`05d|`qNvYH+q!rzHKst6
zUA(-~a-Y-&Fg#oAa+jOzZc!?$j7j>P{nqh-5jU!elRqE_<M$4apWa8xa*ODU=!2AZ
zA|MEn19dtfA1{JmoP5JG7PIKnP|Ih)S3IX(36eW&TVb=*bg`Zz7!<ocjXB0uvcxwK
zmS8q|q=6hymvlK^3Nc-%hgkJI%DVh!S0IqFXz&uik-5|92m2x?!^(GJo7)z5-RJE@
zQkTp(A@LJ+7{Z)BX_HJa&oCYL=utJ4!E%68>jeGkXk98(3Aegg(z585v<nmY>l>lF
zkKN3!{)b*Yy)fqqIpE(t%D?qintqJx6@r-$&X0no`Fh+d9Z?_1LeA7?MIqJX>lH>y
zoR(t%gatgxlFxmjQ4N-7VjAwuO)On$3yVRxdlGa7xic*6BL7II;7xV-+#sW%6FYtc
zT%tN5NR>-4#d+ok|Dy`RF_>558lE7}cW?GiPFgoryw;x(Nb>zm%8YnK#+|_6m%N}V
zrI(nIkDpv+cyxjFlMps4O+ETC*QR<F_nx7AE^dSU{DOV$_t9T7qQ@!%A+PfX0^TkL
z-lE@Op3?P42|K#9=Gsfh-CvNsmm{<^{4$7u^q1(HZ&R)zEroQlg1PO3>UNErH`rXd
z_^;RwQel#|*D{X)R;S)ge30-hd_k6%DGl_R{ka69OV98sLet_9X7OjiEn$|&9jow+
zW0@r~Zl$n>8TR?4ER{cCpnKI^@gu+7My?`S`ojdr8%SOvGUM9XW2nIdaAF^|tHM2>
z`0kZ>YEpzd{BKX!jU_gl8^q@6s;mFZ{mKtPK;k6Too4}4%)uz%osJej3rs7bH$#$r
zx)eQE3e5X(04PC|q)91Y1a_><m#6AQH7X@-bg$F`C7p<yZVpOu3f^=4W^sWldDpy_
z9T@AFY1uNZHy>dm(xzlXVDd+OV2SL}8)qtG;A}C>Nz-y<h^0g*SFKMGlyT0YNl=&U
zlaZ-x)oGZ8DgNwbuyE*SAcA-S%~ALsDBqEdkJd;c*dd9jw$FaNmC@588pnN-+$0`H
z*ev)>wkhbagKWA_@qh)`IAclGb}LI*4oE4-$u_FZ5*cm%B-$K`d0`3HzS&>2%gPfz
z%K2vT`gKI+^(T?Z3-N8k=MUA-iHl%_>I+|f<BU=_Y*3~Zc8Wh>V6D+@M^XkZ#(Fu-
zgzoC{NN%aVE&PZv;Y|3VlVzD%IJZ#r8RYd*uS^hf3<r~zFo@hb=rw@aiZOu|KE#4j
zrSxorCZI!XykbaP>DzLwGAEZ9ZcT!N9cBM5B_p#^kb(P22#7SF<+?K13u}1Guyvnw
zw98N^TrpXs#lhKeY09ra!j0AQ$8KD`ao)4;oTMF~m`KY1=Bd`1F^8zs2TF;(7Z4-Q
z{EJkG*nu87Au-Zerex3mIMMa}nYM+>f?1((Z_Y3!#zzqr{#3m9l(gi@yIYuKJwy6J
zGrHTDV{d=1N`CUNiZqp8FbR>#7|*3L5^IPsyowTvYv?BG39$ySfb6riq-I_OYA~nG
zu>0iDZQN^!u0JwU&7_&WtQ7I7v%3WNEAEIMO%>X)O_<0I-N4ZpIjAzmiT{@PqU<Bx
zWxT}xtzt{#-0qF^>j1SmXZ2F}S5{ibSYreEmv0v$ffZiq*9QNUsg%3R8x(8p)Gw*;
zQpqCa-o(toPg&*Sg7(KbjFlHjc^PE5OHMQeGnOwNIuwColE1vMqo`68U^pfg6BbH}
z+iJb~iH(aL1P3g?I{8$^E4wW)ojN1*+HZA1ewx}-KpPedC%RG~7O{QdVB6Fd7Mw{R
z%`VZr=>==p$2IAA_b{m?@H15QD8mly5u<_FL0(o<4V3|V?1|!p(JPH&#@5nMN~%JV
z3hU0?Nm|es;LJ3e@?rPfZG*PN{UBZ=EVOZ&Leu&J*2wodCoi}xlhW0He3tY=0Oso{
zQ@=Q%Soud>*!r>kEQZs5jmxIF6{xe(GtkzCU&bm-U7I3o4thk270Nv~VP|SqvD<_g
z!c|-3g}8%<7z6oZmuey&xg)~3+JM#()CHM0#$0hIZ(k5%Sx#*)BbzNm`^gN9>2q9i
zCTrDBchsI=*|Cote>Tn|l+Y#aE)Hln;q)_kFVhDc$6te_{5=3k;0*SYXn9VITY)B&
zu)1<6<0j#wN^W_N#g>lP@q(>yi#vp4W;teT_Em0WiJ-~sRwW@-pWn)~=o}u@Rru@x
z<+Twh+`271=DG#+tX8-TEe5(;>q#LMX}?2_)g=4$7gw4aDL%Gm;*DVC2vjv!Yy`(L
zms0Y$m4x&${cSuPHFP*mpMa(>DeaT2zVF1vJm@0h;!@GM7_M2M;o?e>jUNJoGTZqf
z{Ych@@@kyF-Zk=1GJU49(%Yy3cSkVT>aeLWre${Ns+=+QqQAPFV32A&q0wts>=0MD
z{X2KYU!fHckN(l5MDqo!+xh1mvUKubT|G&~MQIS16^bo)A>xg3jFet!fcFC!yN!bT
zZIngs#e^m(mBdk_PC<0AorBK8o)#lH01QvrCXkhbQ>$7@$`P2-G1h$0DOX|)*d|4-
zl~l#IvZwO2U;Roz-UFw$#2khU*y66*gZSzQuO=cvPk!zwZc`9GE62+6$k6$viQa5Z
zfv@ca{R2DhQi6V#JkbJ3NFzLa>3bi;>gev{S8Vww`Od$Scc*Jens>V^cxrHa!BE2;
z_r-<`VGg&PhAQ=%C>Ek!7J!GNwR<xgMmIRB(X&N>o&(d4SD;0+mtaGgb`N4&J7&5q
z*N`KU`Fc^-=+o0#lu+^*fG*O@#6m;h4(=|R<T4l9DT~Sy50e?oxksK-Xu}u<0#Ih0
z`{;NK>rkV;lyILQ%O4aW?Ewc(h`eq33_8@QRX1l9X(^7m(ed-*j5F%@0+#Svkx@a6
z{~1FI(k|Vb)A2^DkRo&IuO>qxYrczRkRIj8=iURv|A<e^WAwXT8Q_cNb{;8LRbYM=
zd=)$Nr7(*0>(<49K?#M&>-|6^lnmAQ@6eLgyot~89il{a_rkZ6u5FdPB}eR&8cFjl
zDQ#taF3uCp#P7H~Rcz0RgrlE>bOizv29jS<gOb$P{K0CebCVZ68t{C&n~sdgoLphq
z+UX)s2{_CxBt?I>K7xet_W6?hUIgF%lXB1hWGZH+eWwu}b5SIsP8CFI#Z=;xOGIW{
z^PLDhitU@B>&FUE>2(IGAq&(Z?q{l|d1tED&(yAK@Idis6ce}H#%KhlWpbNfJVZvy
zu{WQ~wTIfcYz0tFxUgLaWDa0Ah(YOMDi}PIKG6Zhb8PUNE7gLG<D6$y@W)`<FLuNv
zs#a#lMyfcTT^~C}QxD1w;(L1LzXy?Pa(ud646gM){fp;K@^0GJJ;=O0Bynuh+c9^o
zzBc0E9k0$JiyjVRm7F8)u|0~;x?~r&GnTP^1to0ctgZ3drVfTJ${AL_fzfv2t-MnP
zcK%9$glXaw%@lJ``Y0`cvq@J<`&#b=d1JucxPc4mCNulKY|=f~xlU&>mpfNF00<6g
z<=%@Ez|}c{KGz*$0<QZuWB^={cfSO#Xwwq>3UU3;5aNVLu;fw}sD*uCaoY);=1Ntq
zsmgGCw<^KmpY8B5)GvBp7$HE9X`K6pP?}1@hd%Hs57?+1A<{FTSHYP>D_YAhh&&o>
zy`gq>!nAzHW=KMwvbP}C%Mldy11n`bGgFOD2d1Sst(eK_y7SeCIXk-yJDVvCwe#9J
zU`=2oC=DyR#+1#O>VDwy)Ee21%(IQWvWS?s*I#5X1@Tzb+#07J)JUYO!dz2}1$x$9
za2#2ftGD%T=nua@aeB2MiVyHaP%L41Y5Mht{9STvtu7?#51+RXPktbLyHBHxZH)f6
zApS4>A9^|*^nYaZ{}B=pu&{IdlidHS#Pq*Q|4nsg{{N)9Z)r}%T(BZ<vCaPMI{TgN
zNfLp9f~eJZA=vqU_DTU8G4xLT&=sGfK0W4DZEF9He!OZf7hBT5TMWFs!Ek*G{kTuv
zhIq??`1(HarS184db>Fpp-a6<t^OHL1-#enC4}5B>5i*vkakCQ`+U89oQ%9)JrKiw
zsUqF&-Cymb`tp7jCF=HkxxB5pV`wGS9Jc#vcYocz*<;-4vO4Z=a0`2}39Aadvy0x;
zM`sT~m&E$I4Dp8R>VK28>%V}HK;<Jiy<#HJT^~(QVCR$D?r6UkE)+77g9j4)7WsT$
zI0^kY2Y<V%|JG!cU>I{%R5-xM2IlCI7+#%lOoSEOj_T?3T><(g*jd;G6KRPtoAD}X
zxHGLjy<p3E8&32IQ)7Rxd<>91NGDj#sG7n17Rh}Qm*1Uf-^{5|<g?|L)Sr7e-nn2>
z&pq~B_U6xh%lpdVZQNOKo_e`7TGFK2fUD-vRn7NWcT36C@HU@W{wT(G*2bx$ZSpFL
z6kLdHUmdQ1(e`A`&AeE6!*FX`&7y<)X<Q~tW;G~#J@%p~Iestf4wg6%dA8CvtCp3p
z9#pW3!PIv2R&euAz1e7hI{4Sz#;Rj4HuYvOHuV(V7Ogg!$D_N$vO?xEg~YXzBP}3o
z>1}X{-bIYF#`L9h74^_iNw}aUB}&z<v>{qt9NRCq+`AWS`5uzPV@dHW(FiBZZAtE8
zj{7tQ9o-%9NFYxcIRn_Y9k={c9!p?m(of)hJr&7)zTajGMGvl^BWw%BKk}vH5O^Mf
zs~&v`jg=LA!lroB<V+PNHP6_~PX+26kc*{n>zi1;_l+-M6A!Nc)G#`?=ga;?B`G(>
z0>i%dM(nEi1UxZ$6Y0tfyNy~grpA6?RFbzRV)Bl$AfjRy542mt?oBgz;B}x-<##_=
z)afN=?H>Fdn&@M#b2CzDyUof(>VQsFn)qDOFGHXMNGOV@EJjx!K}ZQxd%SjQq?<VV
zhME>S|M^i<Wj&?zjEDDm3=m1{OcaOa9=i}r5T=qipeD;D5tcL}@pPPodFN;9W^$7s
zQ+1=5Qz)`iUI|@evtKSClH)2OZ`RQ-^WiKL?cKqz^}XHKeowy+-|e(O<7XKm?=YR$
z4)Jza$`#OCO>zh@6_8Y@?c?kX4JGExcvoO?b>?tVs~-c4fKyJq7m;h*kf7_nRl6PA
zO%kQQrg#92lf#z?Ns%J*DIK-Z6+~1z$MJuIg9S5*cQ=t(+~P2BExxr2j47xfQDFVq
zYpciU{85p2Pz#Sn)kb=GQsPFowH6!R_pG_F=VDAnhx4)<QS}S6Ntbln_M%memEx+&
zZni|Lm96+<!|(?z3-ZPmTk;mR`ro0j|3vs1@yW(17f*&^qILmf?F<hc@<1k-pWBP0
zyxgErm)2}Vfy>nxS?V|<X&Ar<2j4dUK%E}h@K&wSKnyZ-eX>Jz00J)W=Ne$2-t$-f
zcwjmaO*rEKXR^Oo3<T#ApUS_NKm82XFGPGoDelRCW1*$g@%zICV=e}}Y-D$(75;p0
zMly)+0dE~6u+6C&QchHue}>-*RE~}F_kHrxZf75hADU{z40>+Gu8Ia{967f$-9?B?
zfVR2YtvFsMv`JhJBpdz-+FJUw-tKe!MA9wa?6s1R;485*NRZA9aL!JHW^D)#4tbb!
zJ)bMg?6n@%jE+v~%U^Itr_!d6bY5Z+rC`BPuSZEWR$X7PUs~%CI&D0>GUS8c94jc-
zEk7{JMdbQxX6Dc?O!l_LyQFQO>>CX{TmXS{=-+rde+%k<oKOY2O68!G_wX*j%x^^z
zl~GGBRgMV(Ln4Ec(rQKh22e?Pf|?D7WCyMl&{pgtKr@AF`76#>B7ur8YjzbTYbM?;
z<-PgWW=&y0!G!!b?0zgmV=xhxU8>0mHgjarwA~KJTs4ZJfS`$;_CI<Ia-a&RCK3pi
zAdG{66GU*+7vOMtDw_s}P;F$W4wj}nxCDt36>BQg8$A6BkGm3(ILS)OL~ylYO@b5T
z(Igx6a8}MRmRf<{w9`*!XFz~};*{c@!TfecK{)@1*C6;>gp@qS*|}QdQ-U#$nM5-~
zIq}O}h5)P%s+x!^N&nCT`A?3kk6?Ya0H>2_c*F(i&MwIYx+u0+uxuH=6uGrcNXER$
z)&idCZxm)qWTfts8qTo{lTe^{E+?~yEjSRuZ(`o~i7rWG9#yW6kqt-AOFXw~4T@z0
zy1qp4zt4ZTjQa#}816rCwf+hpN`IE!?``;P!SUxWc;ZuP)#KJSq=_mAMZGt%?yn-U
z=zk{vh)}^YZvmI6$(sNUQ^4$4tTZ*tWvCJt^Cx$(@QS$lxOxM{yJf~pC8#J0`ek?=
z9846~)m%XkVjaW5ehc(jz(Vr*oS&x+apW4r_mk?DSZN(ukBclN)%2n(Ft~+^G1Hv6
z0(lxmDldCDG{M#vLqV%}v#Nm##mL`cHw_W!Ryxlci>^xm@QK!jO@Hx94YliSQkC{^
zNmq<eF)nYHQd}l<az;*}ks86A%Z~S(^;v|MF+i!SS)Wq0-&cuRn%ApU0}8g2$aTEA
z`^eWhN)mc6pekmOQxPxYUk{V-$!(O}$MSk<E^IR3VLg{Nqlb5^J@NJRE*bzWN(js0
zwS}&80_InNj#OhCtytJk_e-+JDW0{<11ZWDAi&Y#q$rKgR@Re~WDuh&26C6P80yDs
z0MOmw0#*-8978-dv#~77cb{oXZT>PWOZY#ADh6mX`p$7U3S%!=-ilYEOdgzioteIQ
z$qf$fz%&-miP|yg>ob%Y1z&n3zt>Di(26PBCt~uNj45bB;41})S>Qd6*G;k<v0n{s
z>)_G;?y{K1G=+2N^H<Wh$gil>4uKpDX;ipzIn0kUJcNZsBHr8%RhuNxfWv43$aHvE
z{nJna-<ChQhSJ~yPs23atg)`&rp_<h$%xNQ8rduw0kdgR*ti$`QBkQbEomx2cDRSA
z0#XS?hfl955il>tv0m~4nDaa9AxEcKc>Pu_0R1e@diGi`D|)B%;Nc`#D89JtFM=%$
z%C*z(-k{=uY!N^v?vlqlZ|0`^UAVk!;1Rf4w`hfEpOB9yEKVoO%@*X2+Gpg+OzZ6f
zeC%ujci$9E6aKNu`!L&GFgren=5Oc9#3`K-h4+H4RVb+hN8uVS{|HU89~&}|MeKK8
zpPj_I0vERGK%@!ef9F|Ultl9jJKB$>XVy$BUziQ7YHaa|iT65S$M{H)C#+i3-(}Nu
zgz2<!OvkRTWdt4%LXV~5{d3jDCm@d71fJzYV90<aj5l`e9#<ofM9ZD){z^zKM539F
z1GF0PBgNPL&L*_e8PrxGa9c%Zh!h^6SmW+;Ny@cFKkKE3LwzEJX*`&x?v)Zkb`#Tc
z+@0YNoEf!s{t-1w9kIll)TW^CJi#;^EZN=4tJgcXhnt~FJ6W}i_eXkS>lbmZpVR1i
zy^Do@3rQuSC#Id)nl8&~!q&iD>-~YX`tQxYT9NPp+$h4|9Sy|kqmAA;GZP0%J6tBa
z4mV@nd5;B3Ga;Ur5`RcQCM7lzd5p;ty}W$|5D3aLS_ha_!4~m~nRB)AW?~Npc8DeQ
zk!@OY9K3$NwF3<*Z?pQ9YIMH6eGhF;t|b*{-p>ibfp#8UhiSSXjE8wN^o~bbYt!iT
zw8`efsacK~q~3yS^aJg1eIk;l8&QGXYI*JOTEB8Aa=X1Z9|E3P3(PwkG{n`3oaCaI
zxz9y3C&Iy;CDZEBv|I+6N|LHG6Gu6M7kRpwOOgBxwKO0O1kSj!qeFOT#=ZAKK8THN
zv{pZQ`Kr-5708Zao4r2TidaBeyLsSwZ@;ag7aRxtt8W_sy1LX04fO9;6Qm&R%`*}z
z)GJ&Pt>*izRYB{FTbsYiitek~jaWq$B<w2E4<kY2!heQ9<KXrh^>5WQa`UZ9?yCt{
zyIa_@cKI7p#S}a*CK&D26Vh0>KHB)|wbC+hnY6lwkB5H`sn5VT^m`X8iZF~}h%nq~
z6TLcp{=s=*0wLVXnXtE;{5XL5r12*!@{23{r+BANbhiUdLfCEt74KKRR$G^ivpp4G
zYLA0y`PkWO=FGTSz@c<_>cd)1)$2*6Y>m&yuCGHrz@c+d)@jKNLy@yZ!5kY7TSnbc
z5(74PLKwu~Gcj}N{!p4p={wfb{*GN;+=twl2x@So)&_LMkmf?@itF)SjCF&jdr%p)
zEg{<F_R&sXqZojLe5)P9!Uj)tmm+zUtTGhYH7Th^|2dd;0f+y@CJdPF>WZ>)<w%k9
z!|O$*d_kTd{zg6RI)WQZH9d1PE`MgLiTR~PQL+l<E7ou1{yYo74h|uewfNmN-jCLD
zEe{fFCv=NTTU<G?{uFtJSy-DLrHyA+xQX>cSM*W$i!JbzB88zc?2@W7?3JUk)xf%#
zLT9{cGGj9F-w#N%#z#=NbA$-;DwaN=-b&En=9UwhYMVn<=@&~WoPH6stbk>UwDplN
z=5SbxPPi@wmWv<KOv?D<8*@+xtvGKI_WXtl1tkA_L&l`^^#|l@7lAC7fD7Cm)Q0lo
z9AQ!en(r3E<ddgK>}Jm>Op^TS^11K*#4r<PotY;JcdQEv7|WL5sGzuvdD7vIDRuqM
zApCHqswl5s^|A}JnbdcW#f~K}PG^)?F|xFO;F}Y~x5r;_X?6%;+cegF=^SU?_&xb;
zk<)XrasnYpnByW#^oHkeT9o?C7a5A_v`}m(C~)S2VLdw3{sZLXY#jdo$e#a!$^XTk
zOe`F%|D8Qq|99y>?5X5#XG}mRr*Hbt*7YC1mz=(Xv5gY}%YU(`w6T%7zJRS8f#$y)
zJpn5NBLN!+BZ2n+>=(4Par*ayBf)=sWdA&4|FMSs7v{44Oa7ncvj5*k{Rf`^Z==}%
z`>6lrW8?UrjsLGcw*M6VXDb5%0}~Sq<Nwv#wxsE<hbp{WQ{H`?b2ce;ZTOpTEY5!u
zB0LV(wVx2Nx;6ljfCNc^AQS+kBqOhfMnr%KhNJ|)a)Tn|<jOx9<VK7aS%2DIIa1zQ
zJwXedKK)YeVUqBR^5cXpF{kYMtlC5Q=4<<-Yb86ReHhy4&kGT}yU9`#ot@4{2MFQg
zZvl3r<t@0F#A5_7vmdY+>FkET^~ckK6tM(C?*l%{N1ddO^S-a;xX-x)<U<O|^1NK6
zcQ<p8PaLp4Cn|mZ+T#ewm%cC`UF8<o=&tP1T}bDa{=6SE71_!@8%#Gf!sO1VpWPM3
zUUvbxa>?M&XKyiYM=WSK2D;ioav4B`-}o>V7dVKJ5ppUJU*7!MLjgcY8xe@(Aq?^G
ztgyE40S|RzFHj>e`Gs@mkKhGkR`G(Bte$3SZbC*%PEuA`16aAUR16erOI$-+CAJnC
z9wH_xE;2ScJ)Z#^?+E17_pktFx6xVsfk;V_C;-d5%j3@tY2!mMBn4zszDpy3C!qQ-
z!mNrC&<DgE(qaI5K6WEG4Zm_JfX7c@PQDT73#=<@Tcl$as1aUT#B^@KjI2E@=TGFU
z%jq7fM(nnL;;c^Wni2D<vqm%tG`A@IIZDcMk1c-t7~$I^F)S~Qe)sp|1LH?$FYiuC
zF7GPsfD2Ib%g3|%)y9d?4)16I2%Lcw<VfBukz4SyFz8WWbf4CrH)M%hHPCoHo&C68
z6qzJ?iHo0;ZzmYmdZUsoSWRBu>>2>#dd>3$#&7K{gyp){2X0@T)*$3rAz4;|WlR(d
zl-zA>St<H*tG+N%P^rnFxGihM#=5gA@IbJ5HJDHEW#Qgw%`J2Lx)jM2N&7mR3b^lx
z2;dK_Gk1>8h4{qmtfZ_>$!2z@fy*vrfQzP@8Rlp3x^=>7lx~#vsy~D**88dDxC8RX
zlB~qY%J>9GM2_3X%|eN-Pb-SEQSRDptT9+wdf;_m*KMG1&GfsGgN)Aet~}Gki`&IT
zS@z8@5--~Jc(kqSv(PwF6$#s9dUysi4!i+Qq^K$n+<nOG2>uzvDwZH(?$6$mmh;B%
zw!xwPT&|(x<~oX5D5EiT2OAxm6u}e}qcl;0PLiA36xbT8{^sV=Q+e5I)UED4v^C;n
z3@+M4Hr0}1?-LxcC8n|mNtrz|LbwP)VxBX|p?27_Dpo53Zt!@>eomTjzoF=;UTn^G
z3IUul{;autF!#8P`3wV&J(;?BmZl;!FtC1cDahFdZK*I>nS_QiC;hyH!yq(g6NmJy
ztgp=keL<u9Wt<roo2_<}CtU_MSJ+T*8$l(k6g<KyrUn;O1aAd<zU800k-F&?=L^Y;
z88|hiS61z@v}ro=48(}b(q#~z(Q~4+svy0N^b^oH^dMFVB`<pD0&=meAbl`6kLVx{
zHH`-Gngji@y|g*MD@aS0U*O_{UV6?05$_ElOAh@Oc?&SpQiv$0lqW_<hieU&P>gEg
zDkgYPXSoGh<`(xkJUG~#)D$9(`*}vf4J#hiYb`h3NvJ%dpI~M^AeCy7A#RN1?{37s
zBE(3rrGM+q_DJGnklm$LyTL&Oz$~z}{?N01)ED|qJ7(3#C6aSp>FT9A!25=3Pkjm!
zH?wJbw_}-nGrz`@{K*AcE}aDF-mzgatXxU2i`PgT3xiY24T3WdU-Vm+=IqB|@nA>O
z3Q|lC=QRCEtD-o^ZjF4e=$GpNsg(;;%aT!d;Y_>Y$o(zIo2x9(JlrDVj?!_b$g;pI
zHw}3eFJEACsrcAEaEPN-Pc<ccW$SU0`UzRf2O+^P$x{UR6hjD}nzfBBXmm@qhdIJk
zKr|qDWrFfbZ8YeUMb<I&sA9B&9K9@*PZT~!+dt=4A~WIWLg`wHn<>rTsHQ<f;dIu%
zi_pA21w_I~JXOcu;85RZ<>=6$r{S^&o&jgbM>;Vko?sRVlI3>kic=y*sHnK(eZQ(1
zDw~w7oZc@@N-Fr?+5quP_jWfL0r4D_TMJF6^%RQbIUJ4i(P`kBQe#g-n$wRIbczzg
zEq%fxoj&dj{ji)zG7IlEu2bFmxJ+;LTLYd<_-qeF9MzTnhh1RK6dCl8L`4mbJt&dr
zarcRRsm@=ZGG!TstWvMUe2br)%)hD}oKoVO;&`sjcnm$9{UFg%pwHMY9tS2l^6On%
zb1p%Z{pi}XynLnlYj4`YRw<lu?{<#Qy075&;k5+PcrA~^@qKCxukLNnw$|n$<u;~<
zYfj;)Z38}P;0QIb7ynk{_y&bXpP+50nSFn7B^?>^QYCwNuVHcK!@B@;idsL!6jOkK
zLBTAFqiZRO6o7$7d%=OEw;L8!8$#gEDUqnW4AM>9|HK{cGzhk36c(i)u2)Gr+G}=w
zy&mF!)n<h&FaWHRSm04It>~Pc_yN~xdDPk%RNv|EXdi&V=XCRBjj-d%r~7(5zIX|R
zwV?VkrXu6yC0*TlUpG_a6QJTs9?hLC2pPejjAvInSC=7|O4_v~z>K+3iD*|QAqeSQ
zc?<3|032-DcLe;#&uR%N6@WpnO_OGT=*GYKPkvM|U9@Io&$DE8fmx`hR&1)L_bc)2
z`huQ7=h0Mea+&J6No(^VkGV{BJ%KKgce|3FJxZZTO^%G^LFbS8;Efs#jhQSfZ2v32
zXP+jDNqP4O<ccUbNsB8)@P}krUj$?ygguEEC^VlCR^P6WdfEkIZHi!+q{$WQ_seU%
zc5an!&t9J;6+ADC;y*cW4Moz03+A88S$}KUx+`Z~&ZKdh#kG{r!v|tcF*Jg^Oe6ll
zt=&(pB;ITgl+E0rP=r1}{j_nCp!z|NV$u1H2EkN6%a4D_ZTlv<>-F$OqV4p0H+*g7
zj7#|aeH#RK3nG$Xaz+oDr`MlUF6Iwc(J@WAix{qp9sO}-M<sw6{V_?MF*VV+XXfJR
zK6ph2I%}n3Sl5KXMD4;#7IoE6($GHdQ3Xy7(h0ZxNHER))?E9uDD<8Gs=oZ01Ph&Q
z5(pHWHR0z>!p(cn+QaH-Ts9fM7Af;he4~YekK$*$+1ec@W@!HTYHB=t9QPILQz$Om
zk6J|!n1kO1nSd-R?Un44YB6Z93Wufchmi$HZmU;{-r|&-4pp!*O3s=d)`~u4ogkkT
zli!-Y_pP!VPGmCzjuZEsk%I|r*X@s-5lj)WyTIMaIE(QFerzsxxaCei;kYH8mS)Fu
zOV7}zC||V3>T*ct?fl9*ghz!dPhMw<*XLmM2k_+?pkWk~c{uULbq8tEGm<?+a5vl_
zpTe?UKp@0`D18MzzqCk5RNq<m6k4GwYDpgBTXGtQn!>+<mtE{Fi>QtC*G<z;yJN^o
zq$;9okivUUa<cocn`4?(`$xmr-~&lM@80D{NgEI|c9EmG`ib7Qe124e-n5VjS}je_
zi=gej+D%RrqP^d*SIch9Vldnp!3rVwZhCY2+{Uv|cW@=SZLfn5yq~Z6UmML`U3Fk&
ztjwLyLi6!h9ml2^Q9XVO08ggnDXFQ9@2Tc`Mm23J`ZR$*xu6$Zvj_9~y`b|Jla#>1
zK$3bsSrYo~q|9a7H3W)zX%M|+b~4k@z9bK@XJ0G0-6t+yVLx8sA_Sd+>4FYp3Ks5h
zsSdgP;KicucqKBx8DfYf5YK%;hx(w?g*|U}Wl5~<Yijgq$sCw~=!s+y-+`0~I?7P{
zBjF>%BT5Pqv9jR}FAT5TgMo2!%mwha`wgq=U_3S<0e?NFkvVsXFffl}lTUxtEZAt<
zLaIUMH>{G&N&EF|#yN8EZ{s0?R!xm+pTGH;+{4^ccCy%i)P!5aAqog&BB!Wz3`JrB
zw=YGsK-PMI*2?oK%D1ovOf^s%a9JjPEhMWG2c;g$daa>SaFym7iiL|L{-PL+0wPHx
z6xI_7B_@UhmV2#3p8DG_s=)9|_}s?zpX8kZa-gp@jgY3dnCLMqC_df401}UI%XoDC
zhyBr`16a4mW^b0@SYC2f=ngJLPq=`m%`Z|72t8NLDNfptMQtw%F+ITv8`2zoJowko
zAhL7#G9v64_*sgq(?j?)Aj0f6Ox?$CVONg^OJ%Ll>%y3K?gPA4h&zGlwO+NTNSlbP
zIy-1ijXOa3P>xQP-7L}&3#&-&6AqP1IGW?dU6^g4TiN`Oc0h|H=^}gp5adv9AMdBG
zpOGkNTYO=i5niPLkBH6GAk#ZF&U3|%h2TEzdQa0`d%(_iOQ~4}jXaV$1|LV;XIQ|3
zj|7n(MKN2&rB|v#zW{B+7j9`0VCgCdu`!3%O1^~x0UpS59>jil5|@GV8#AgBQQ1|u
zO`v8AhTH=}&M44oIr!LJ<P1=^%cM0H_ck~*tPgRBq<1wNyWnrFeI^^A5*5W<)bg9l
zP|%u_2unI2m)3D`%{Bq7%v>&3C`=x@M34qe>^lL-jGbX3K2`Dp8Y+&iyg9uV!#B#4
zMyI~i?aR@4;sljV##73CD?G7STD$SI0fcUM>dBZc8+BbYhO@J#o}R|@P<y6XDY`a8
z^Su?{%jvR}h5fumoPl7H@(4fyA{pmzIl+t)s-ETngIW))ecHIF<cnAs39nwkLy<m;
zZfTRFtrUnyVQK5;q`QT`42Q|mZMX83xjn>4+dIU5KiK}!(wwEhE4Qw@0Zfec0IoG?
zMLTH-yRIddt!JV_8>yLKPnyUU$G&5_h%CY1e)uE6X(?l-7G||4G}?LZU@YmNVD5R~
z@_Tdi?kkvwAp5>+zPPj03u@llk^@ZLc-s<TK+IULuW0i47t-Q!VGe6adkvn5YDf)o
zU0eD%6jF%3fY4ZuTueU@SluiNG|<d71kg~g`6dR&w@leD$j;tqlh-WpEMtxXYFAGY
zkkrUMDF%&<6+v0>jH=%KU;|{4FI;E@rV3VE|4$C~Ct7+a0up{hwo`i{x7itp^k`1}
zrt|qgxN&Y$Um%}@+XfsCLQ%D9uM#Jx&y%K=yZfJAyQA08?${RF0sSY@>bMi{NKudx
z&G+|<Kv|WyW-3#cD~Ul#v)(^+1B_cBRD)ndD2BbD0`$Z_lyKk1@SC_LNkBc9MRYdy
znmCWT!J-d=ZFiMFu(Cu~y7^y{7C*Bs7PgI^G0sv;>GRUrn9Y>eUZO;<c(FD%!_I#-
zqRMBfedrgcUx|7srR{i3g=SIE5E6iTZ1V7<9CxSM2{XN%z4DOr)n4}ZKoLq@>QhCG
zR3x$X##Sr|j4U|lEdxo)(;=Lu)6LHOAro>6Sa9)Rz`fn<khcoRSx8x+><unK6{Hs8
zx^C;>_`=^*jJI*d4f4L$Ieo^Kd_G}Ta!$y!sDQ9MfN-#(7&rAQPF1avrC!`;_8eQE
zz;;GvZ~lC<lWbzH!Xd}>L1W4Jlkw9rV(ZQNmjyh+0tbxUI&znMAgWM(fmj6BRcd)R
zJ(a)A_-@p6E<6X%1C6?6PF%VLPPDlsP@S<u7bxubw6KG&$+ww8)*@omBEq<VZIjzG
z0~I&Xi+EmAx84I_s$DXz&hkkRHI_0k&*W=5swIRm(p1iXg_Mx5Ds^5uLh<_7KqN(t
zQU?dKIESumb;4Cen@Nlc>9&?P8N%y4!jew>D8_n~i`}6SBO;bo)zJ}PQ5yi+a{J<e
zz${<2f4-}WZ$9Y?Z&psO3Gz1sH#Ixd_)M=PwkTQ<5PiCDE%4G8g|EG<e;t%KlivQp
z=mkmFFH+~l!Yim7EPW$!W}J)fq`Aj<5D(3e1xF_B%OwU9*N;h$>=UlZVZSSPP<q=x
zJt33Pn;Al*Qlb8N%r3@#*(BETxbZely+2)R2<7dsxUGEIwp?ov<*3nG?Qng<;|9E}
zH-rv*{Bv(VX~faTqwDmAVhT$LO6i#+8~nN-@$8jJrL*ra-&)8>%uEEyXiD5-GSTWA
zzeAG0YNo=c93fXA?hRz7FWlZNpQF)WfsezDYN4#q88Ls2S-CUge)MBJfR+0C^{Xt!
z_jO&thsLk*Zar|(X4oa?<iNAiTa|WZd3$Q(yF=@{y@~8Cw<=^M)q29UO6O~BeLo>#
z-&qpkr`V{_?rajvxTO9qn`y=;jBR(^b=%s<f==j`kEhG_EYXcpW()3O`z*M0bF>_5
z4^QwfF9z&kCCNx%mM!sfcDHSk<Ngbu`fecyViSB{rNJy}1Q-tp@M>55EHX}hQZ_5U
zD#Q1E(=vDx&$^h^T---S)VBt}D269MHFg7oqfJ<dRHAR6%2@99q;Ur8{;)H>Cg#+4
z1q8qQt|Rg3sRfOf<1rhR@p`t}^kn9_*&2}5^4ew&gYjvvEWC8wzvGEOIIo^3HL$3P
z4KyHS5wpG2+q;2e%RJiwTrb{hLfJS3OHq{Z?6(pk1Q|278{e7}E_>_p$1;M2fF*T+
zIT3_<V?U}TK=>NKRmn|VD9s`7(k!k1on|@c_hDWon*RIx!9(9U2853X(!rb%BCs*+
z#4pu?M9l0<UtmL6{0tY0?+p|Uh`n8v*%Nh_aIIj&{#s$=<gm0x$jnxrn#-H1mx#~?
z%C*l8TaUAdIj}CIH;l7N9Zo7kFM1hKoT`KqZZEnMU1nihuKr5r>!#dXz8<I>*p0SH
zmamI+CyTSJk^JYHAnyiErgF1K(d-3{5~1r(1O_X#<cpPb@H)0EAhBwQKsF>KgRrFN
zju~-*M1UPm6oUc)qcJrzqR^r-JJ3iC?EgsPaCN)Ei}b5KM;CNg%gMT{{a%1Ms#w(U
zv@5PU*)+1RjOW6({nG;%-oo#P6psCB<Si;@t8ie%5kY~5<oeNjX+M{W4_gF7uA598
zSKC;E4Sj5BGiNhxb-`%be8)4zi@Z_y>n~WfSK|Zs3izm$qU97)wOTY4OBYM>AW>9e
zeu03QOgWU&Sii>zP9w!xmAG9RTk)W%{Aoy^TK`Nj8(Z1UY$i{+;C%^;dt2igO7k};
zIpMcFg@SG(H@)e`C_h?*ZnyVnpZU>`8K=u(pRdYJx8Ce{dw`+kve#m%Z}@^EZ8mqF
z+tl>@JWCpm^T)gFFn(v1&;BqOC4|w?@XXia6RBihhCkP>d2Iw|?-OoLo`?=f7Nle_
ztcN(Wqgt1-2z^Nb4EgP7p=jI@(Ju6!5IuGj<un1!in*}=g%01@_~E7n6xhN(xExan
z>}6{Tw^wD=R12hTuP5Ff-nm+{X1Yj7+*A_2Aa%i~8VsZK9jZaJ*n<W7f*LM1mb0E}
zWemt2+^QG-AQs%>&MQq+2nQkNF|b65E1vrK7TP_gKOXO9q-Gc5E3z)s!?oh{{l!0M
zX+$vS0a+#kR<rm$IE%?OOCCTx3Epo}^xLB&ex9O$5ReJ!QXa>cX$N{PUv}!#u`n)y
zs?RT5^1C?+aLI*(CC~k_wqQ8Ym6WB`b(Y}Zav+k|b-)%IOFo9}a?)7SDu|o7eykKz
zxmpWk>Ll8U7oG3gLvfF}6gV_r!Q!RyY=B&Ti}J+T8?0fVa<`&Ao^;*&cQ;hlZ$$K_
z`$7^t_5o4dt_k0Lhz%9p6J6-o-!F-M5d$sY%7`%O0$SF=d|q<^%8ftkyw+(c5oWV_
za(j;AH}5v6Jn<5ebpxhbQbvl%kAgDuCqUrT`zGtMGgFVBCkHdkfyleluexY!Ik?2A
z)5m1Ewz#-C6`eMV=daJjsXGpvmxs^Px;x(H>vS9;+yexHjv69IkF1ILS-a;*GWt>F
zVL}K(XM>5X@>5cM=ye4?r-zgIKyyKKbW@Y4YZc~GZ!xax@zcZ@ZADIkhl_&zUFU}l
zK9k$+<{5~iD6y~8YG1w)d&U`7Il~-mvPn^#b4K4Er44$8XH>gL6&e66lOnh<2{A$;
z+olooW`C7|Z*WjL0%ne4N+9oGTk2-;h9v~+-VLg7o1psFRA_@qEY3&7efMlNw^ysa
zS&l79*@N}|B(X^IW@4gbF7+OY`jg;9P9ZrgLuQmm_BaWgNPGxMjJ-ICeXpVu<LQz?
zam%N1Mmm@c3Zu;$nuL-|_?1OY97+PfD!zViVB?l}AjG}yRE0lOkqHDYt_1Z^{nhf`
zkEA{ZruRqX=q$iTtyWnAx0hJyH!s632qOrbFPJ8|o>+%D7|xaf7|sXP>fl+&J+Tia
zKKs=NMEJLSdO0j%ZbHw84ykZ=0DQ32{Z&ap{<OOnk3fq?WS!vt^a@)=a++EkdoDwA
zUrsjxJ(|Km1dNoHPJovxK^%!fZh5>-AsLy>EY~LxFkQv`1j&HwnP(?PF6WVFwvyw5
z_rX|RjCc2xk4W5TzIc2|jmi}2YSrK`ptq;YaP7mu=##;Fd%7~O8P~YZ{Lc|pIGIu5
z={IPu>Vz9FLZ=i@wZUS%_LS{|W39bKAoDt5gw6#aOfu1=2<$q3Z+23#Qfm<lCJ~mD
z*`22?Ed3S$)9Wg<HzvX9@W7`QT3GquC^Y`9q-XO^gYhS5k**H!+wu$LvK7zw?eX;6
z{zHp&MkiQo@@RVCxI(vs^GREgG{L4)cN}w_#owR=ZkO}gJaLQ@)AEsjjM<Gt+_|?M
z)(qK=sH;%v6!`ty&-6Ixrq@8*j4@gxoK`z3GN?`97qXb>&C{B7;RGAm3wm~=0@Zrl
zm`O_=S)5F1!_A$QXs;wwF5q$4e~?lQ`B(e_2rcHS!Z=8YZe_Jj^X24FGL`G~AR!eV
z593LH5@=u$acVR+kI7`jMAr3geR1q}oBcsqq8pKp^S4(|pdlYc1w|UW`Fu*r@$^v3
zLEGO!q`w;&SX7w1;=+ylBuIE}p+cC;O_dIT6hmOxz-JHy_+G)|bXz<UCTZJKeKH@A
z3I`v$Hh*nl)aNIIS=n5@q${*uK-wsb*k>L|{w$&fllSL1<;UXq(_}sZ_lgw?iseIh
z+hl_{P8fa>UnizzWj(d7kvY)4%OG$x8oYts_tqKWjwJa=iQ>O3#xDpPXmKiw3WZj$
z$Qu+=RI_<=j~JnVa01hO)yD)NG5#NweFapMTlc<{grrh}z#t$<$S}je3?(Jq4bt5m
z(jna-sI&so-AI>6HwXwwDBUOx{~5&lyI$}2ee3_Ob>@9%pXVg|oMAnC@ALMgq2|kG
z8!C8(EE-7yG;KYVDIh7w<YO^=GY!*GDnN>dY1)k<mFzFicE6MvQF%7mjpQnJ3mDxU
z8+tAKV1m&0!B@0*cYECiKGzaP-0g-Hz3ynzcMD1gQ+5zef6I?LAng-4cr53V(XhH%
z@<owzqD@K~6O7Aa56C!Fr`zf(jtr*+8&>Pqov_l~RAEOx_(4tz!y@g{7%;rkt~Sf?
zBp`_ThnSkCWiT^VI-o;%#6+Z-3!_JAb>BkJP?#Z_%NL-iX({EXWGD=+v|=hbSx?)L
zI*)(V6mBlAC;LME&fX|Xm_zGJI=T`T-&s~Cjg%gXxm%ad_NoakHEm^CUYc(!yR>=x
zyxddV6e&n3I1Bb}!j?5_ZBh*!EEIQjOPIQUzC-vUoVCAuO3rPJedhD!+{XgFf%lZ>
zKPEAKm$j*F(ikDZLinD<uaEFrorWLBm(EZ|bxiO+;tpKO@X))hfjwk`Zuvs@@y*62
z9|PI8L&r~91Sjm~d|b?1Cd(UN2oh1JaXrXpM2p{c`j6!JRuY)|SQPG1^yvt5+^)%@
zw9;QjA@FlXJE$h_pT5;p+`2Bu=JBdkMZ}wAfATf!KDs>7jPB<KTWeNQA-vAT73hV7
zvn07Z>P8gZaf<d)>T_8amFAkx`OgiON&b;-yA370SWoY5l0<H2G__nXpWrNh8`+)z
zY)Td|&pqxvL%B!4GyUBZ9-D^TxP4d+{zA@7K$re~g;}QKaB-2=-}3(BSgKke{68&U
zm));t#uv%>^vs0XcPJK=?DC(H)f?9y6gX;}$luB9n&8Q<&Dk{rS1wjaR(OvEtYA42
zz0Y>qUU53fJ$Z$5Q^;4Z^zDr85VW~w_9(B*lFV4cz-@K|G?AcFX38wYVzSg|pK*qM
z+!;L>%|^X0)L0d$_Uh?n=LzSu+O$YrpqV>1qkdGh;&}JS4a_&FBYLv(NN6460!gn)
zEAX&qUDBIygz;xqZ5$W~m5-)*V&z{nu{7`ETvTDqJYKbtWWd`ztcHAy<lC>V^qILl
znszgMCmT~%h4e&^q+v+t)<e_Blp8h%gnq2Y(>Js~!|TbxV>_AV2}PgDvqFlTo3{!3
zC^&vReLGRM>Uf$gt`+m5ikQgW?4pX2&JJ=>h4j__>CK5pY6_}a>SJGQ`_?u;O34n`
z_G&JWDru>+6+6e!bjfRS7iXo<%7eUB7(GTgM<M;@IYlWt+pnEQZqdJ#ajsJog*+;9
z4rfi%(b15fgcjF7s@dM5Vz9-QU~HT>6*14sXl6535!&nx<q{P!rW$AAjY#c~DK2t}
z<6dKA%wH#GTw^rmCR^(^Q5}t&=yMwFudN;B<b{^ua_w!u8c5qD)l}Bz#GAT!kxxcO
zOqltOMWceDvebIW4=uNx$yjw%Rki=HS4q~#BJa~!Zn?R|psrFF@f*4vT;&W2r2w*M
z6yycwQSwHxJXbyV4dhP0?HhrOWGr^PB6ZYL@e(2-cgj59jPE6?e&5<Eb1zhQ4^)E4
z=EYDn)q5hLy4&NT$-xpshD(7ijAUrMEP)pHOnDehJmXt);%yx%HzsD5Tsoah2uonJ
zzP(Le);GgnV~Vaq3_f9-3uIv(bs#kvrSr{X;v5jK<mVrnk2}CDoeB=ks~!+p+Hl-^
zx2}j=h@~dLwWbMG3Hx@YYT>zV5@~3m?cKJmWX{;9`=gk|X(!CB-ef>Y5$8=Ua@@`B
zCmShP;KJwIXge}S`>*?o-c9t0QmU11@#ZWlIgxY6s|(@mowlTTp38E1VZQP&o+N!f
zc!`8ZmFs=wa`31BJ;MBd<$VBU1491pmcWK^WcY_)f(YD^LD<N_z~0Q((Z>Gz@Ef>H
zM$ZcFAR#D#xR|7zo|!4?Svy>3Tk4rO0N{T8R}Kbntl&>K2^J6=1OWd40$@;10PO#q
zCBz-|EX@oAtW7M90KlKc1RM+y{uVIy>uL~PKnM`Ue<FnRY{iVsOibY*7~DYt(Uzj4
zk(CMndJRDI9DYoX5eR_rf%wBQg6-c<_<?Zz>h_=K`v87F-bX7=IxK@9KXBXc@@-Uk
zkU(AjC^jnf3n@U*xi0t=CEU5JRixv!(8c-M=@0B4?c0=f`32b$jFXhkK}9K2dj=cp
z8EL(>n~6hnC#P!e4tuYv!f4i|M#e|b7m6EIgznKfobB2roqZonaWB{IjA82<IBvBf
zqxxnxazKBHhSwAoK{dUlpXy~5<>6S5*GA?2ea3JY$|yN<v>&@~0rAK$nx)0?=40QF
zaCYMv@V(S@^~Ros@|6@6ZjpPF=S@W^yX@Lk$czjZ9G-$AJtoe)H3~?bxF(wN?P{%-
zr_B<@Ykg3bn0)X-KFC;ThBr!mk}(zdTS!w2FwnlhZ5CE2gCxyN#hW*tHU>>N^XOUG
z_^URj`k->qZ6F<V&D}VVr!8qg$Kp8J@#hQ8WAVf~Xdb3F!1lyKe<LgNM%Gtl@=G}y
zDi!hjqngpZcL#RNaeSeck$kvB&=V;8&`>9JQbv@}oaRm!QcDoqK@5%$Un}Ve2BYnP
zeCi;*0mlhpuSi-leX^`?BkY7B>&wdGAj(_WKvY4h79YAJfXE8Xitix3=-6$K9fh}1
z`m};n0e*v`Y-pH#1wMR!A8vX2W3JE~<@xj7y@N_6C9pzs1BZ`6iNz%JFo)(%h(uaA
z+d-FZ-a!{o1cxt&zs*C=cerzx_kiGlzwIz^%XCM;9dZPOc*ae%9zM{-rn;p&NuXwt
zMxN6cwOH>_a$e&2(rLu`!a)f4+z0MqNrJY@p!BZ}NpQEJe_eCe8ttU-c;$(tWbb5z
za1{C}7SwYvLa6tDT7YKu4vs>mdiK9o6WojK=XXx%&k9t{3>{4!;I2y`PEG&^2L}KG
z`I!*k5zp7MD>OVz2QES6AQBV|{)q+x0T6fsfqt&OpC|;969oDTaa9i-#c{oC0RZr?
z@`ySSNJKr?^~2HEEh12CFyIyXsstRxdEEmD6#T1SQ1DfM5Un6u{nh(x<TVP${<Fuw
zauKb<`*T$SUg{ckJ>s7|zwQsB&g;=!qY&Q^{XwK(B@p-P=W7%g!v5C`AjWo;uE%-p
zAolyd3b@*O;d>Nf8&k4Twl+hYC<eINqyF5v{w+$1?SF+y{oSdH6AXka!teXpjGFs1
zLgkl_t5eUniX2_Iww1D9$c#|z(dq&oNDHS5rl+UxNpMNu{usuCfdRtEr>V&ROWwG<
zTNw&|6TE_Pe`^g|@PVjhAV$mh9u;^ch-+_tBZom;^~dMDwJd@56z}R6d3=ZCh3*6U
zF8!;GsTT}tx)de&k&f)nbLAe3TsNS|{0jG(PL|pdlKRBF{T)0d-#_Id-oms-(*3qj
zQ~AzwEEZfXE#A2+d85CwaOi_^@mw-yv_`~0XZ1q6Jzbm6BHz8`YUhJOkLlgfO=u%k
z=lWFL{fPd_w7wfVne#P88Fog87)tYQ10MH!QX(kiCoZxzq1q_TDH~gB9iD0)ah0$6
zwzoo@brKp2i96#=?r5W4u5|ik@^@nQ?E0Ak6MKpC`?jJLk{`L`(X;T<fSI`INiOER
zkEpF5_#kIS`s*@T`WAm-<`K_o)_aC|L1ayTyUX7@i251&RzRgKHxK$ib6W`}pWkBv
z^bK0o=Kaj%aHN18+eBcA?^0{h4y+zYYY?MToUXJvEHkm!$N3XlTB{qBtZKS|xV+00
zA1&43F7lJq*tOCp2eAryD}B#hg6LJGw{>z4569pEr<85$O!c#!>ocWKf;x#CKvfv6
z4=R|cvD?a`HNw(HOPl$LPAI#BWvKmFH@=_++TOp#(^=vt_9pX0cI-d`pD0$9u{}Su
zUi%$S7hXzTdT7(`XX}GR-W(=0m$K#!+uX_Kvi6UO;qCXann8|2$OV0d!miJT!ud?`
zJMm}rIq-FBhDpR$e46hZ@ZI#Qg&k30xtixce_N)&6NAr6DefUWCA39<oaxUj&!=*6
z`t&P`m+28UeM+6*Q{a8u5Ay<q-`0i{Khr)!o|MB#Y8AQRJs7?4eUPRVjZ$A%?Linm
zjFX;;W@gj=lhw0FpN0mIEN{5RKG8~O%f1zEX@E)+hOIHeLMmJf!>t1bO45@x-Cb?p
zl-OB^ZeRoo9(j_vy1d_MLUoerbj&KKXr?*Z!}2`%R_yqUiLvu|uf2PhsEoFjvHKRI
zr>pfV`pWcVa(WJ#8SbI?9?PE9pIcwArOkSL8O%4Fu9PvF87*&No6pjcHJovo>)YR=
zLFtp_pr{p@d}sK1=zPAs(!kHOdnl*OXJc`|mxy-7i}LwQe%A{bshfdFejG-H#5XCP
zmfa1}x<j{N_X(2}2bBl_d*)}n5R#KXXIV8JERdO{x0sb%h@0y_Vh3Gu3`|ZOHF{@B
zhXvI0pd;&<?{iRbo55qI@Yc!-LXC6coQpc-*2ym@)NJXy=OvWzkjL%^vr}V&Y4N5h
zEB8_>t$8tfamzf~cgrLsb&;$x!+yGazm0NRIy$?;z*r)wZr<EjAtfSa*n{xHr4YX?
zg(XwXw`uBL*5qCykB^RRs;s#u2tLTVXBzh}b!i%Q;u@(P|L~<)r=)}V&y_CfKAm<r
zefcP*?jXX`%IeKA#~mTcLLj+Q=#+eI&)1NE6IjXp(gnsqIqgE?16yK8`XTmCTC@IL
zi5!k1;DOcQQK)^?lBfy)GH&m5%uA;yZ`>(49WL1fj>q?V)Yy2N^bUBvSiH$T+p>&B
z_jGXi8TZ*6hd<((knlm72~zA$Kk21iVD|Ly<i@41`-bM`a7$ZVF`@eNV*g>6X}!JI
zrcRto{Fvw*mvgw~slD*=fil|b<QKHdOAD7C&#|MYFBdEGnpi?{I<zy+aP*xMSpAzZ
z9poQJMmu}mFp@1LRmX)0$1k!cF%SZRh{3Yq{e;T$ZU;0<v`maEShcZZv=M1<K7^_Y
zDk>P^#PXjXyhB-0q6ZDHpKKEemR6y4D8{AG%z;>0Z=t3}Mo8KQzI14<6i+4!#{8g^
z<v4N<eQMeiR&YpT`Ifq=lL!T;+oOoG7UV-^{iJbEbCOZ#tBk%ezTY`zhJY@=W5yGL
zyt2DuPIt&V1s)j;e9v4U+^vha`9;lqQVw%3p!BGliJeqZ!u-y?+e_m1x~MfypA+~S
z`j~2k1qkb11KjG%0z8Ry9#79*e)82})v7WZSq-cc1JEyj6Zo3s&3)X064m><<zhJ3
zQt1aT*wNB<hdqYU4eGhnF<(>!a~Zwd&B|4z8qH7>4*497!l^*jcaW$hpc2nGCY2L~
zY><tMg<BVrxz<ec7O1<Q$)%Pb3<xI=z*iLHs`pPO0We?*P&nOFuB9Jam44SEUGJs5
zk4)cKuL?UK6iVXo5|py~ZfRKwaq17Li@$%{s2rJ-GpgtKWHT{{l6H9IeA>HO-PPM}
z^3mQhP9<lhlHz0ah_k2Bm&j`TwJz=y1)MrYY&2<h_>QxL;U&*riXBe2^Qe1TR!=JU
z#3=66eq$J=#Pm7Qu8__NM^SjIBaKRG&v=TVOiB20GaaMqB#fdb@581QpMs*$?3C9=
zn^8o(QPk|we1&=vhv!Az*`58b$-53&H;eLX?dcK}kBpUNj5{jGwj&g?W`2A*L#t@c
zsC~xrO&{GbJqX8t1X}?~-^*BpfcnDk{(U2p_&3`Y9JbkF?E>nZ*b!x}eJ_>N-D6dv
zQ(_i?!1S?J@z-MIpm%I`hVGFnq13$JDR5JRmdtsTMvtPERl|Co)N>J^0@LU}^7ayd
z96L8yn%*DBOsjYnpo;aMSQrl^)Jwypd5ZgA-!+@cOP&vkBcZY>%w@=1J=q)yBQ6W0
z58|%dD+`K>!hE!z1PRmFv98cX88?@yp`__19tKVgfN@`lK~Y4w9!bOidrHj{X`0I)
ziid37(7+$!B3Mhkr9bmDgkVvTMyuzcScxwTJf-r`8{m6zkbkmG=X<^{nHBw1UC0uI
z<Mi$0ju(rI?LY=gxExg#O<y_=wm!l9_p`*n??JouClNPOnIf2AmLHl40>Wa8MNE$t
zm5X_ksa4vA4e)k^Y#1S;`c2LJQ^68%<8KshWcim?k-%u?yN-p_H|4NA(xc<bQ<s&S
zakUnpA-XbiVl5G<z_9IQ^~5&JxO7dY9Cxr3hP(8AQRq!IFf!pS<f!{4RZHeo(AA9G
zyPTb51FM-XFIhVFtT}6c;B_`C;l>1X&%Q`w3ka8WP412IyyqUotXMS&o>;VM(x|=J
z`mi9;EB8VzE6!u?$wOO<+XeKCrek@_H}!6P@ZhT$dVLt!UJ<R;rpHYz(YS&A%Fx)X
zBQ7m|RSHRa;$=_QormDIUa<NxpR%1BZ30_tS^1+!@sDy87GNl0L<1eQG_VkV3z8zW
z$L^N>?NewLVuALpn|azZ;W}I5)ht8y-N~h#EX=hUJ<KT=g1yYUHi>#S<Grl$jU4#7
z4&+vOs9RPmb1mIU3+QOI1Puny7Wi4^PWdWuB$i)FuQ8%qp^wm^e<P)2a*)m-{IDoc
zOhU*&{cSC*B$0T-+~~#HxC98xJr*T^hE;yqbt7QLN|JN-+^9{~or!I~K<?yt@ifY6
zW-nr|izX`b@gnVd*IC1;$o!CEuXE##{q9)XM?E#cx7HP#IReniLrV4YEviHO;vp-M
zn-v-d7%rdKQp)XuTukGmyMix*<@@QL)IN}j&g$3T?GcMDE=w<K)|&reHs5c#o%9YH
z0*Pt1D6XAxLSv=u1r$onv^;m+rU>^e`=BG{8WDaG!>J!n?Ef_9gW~hs<k+S#+Gep&
z?zPF{${Hf6DV!D$Kg4cVWT?{J4$hjUk^pE1NcoogcQrwMGrxhjM)j?A1kci9EsS*f
zK6J*-yY9dKxC%Czb2nZa<$(>CpC&yC`@!?hvUXd2UCeN$d%=F7Ch;RnHJ=1!?$H^S
z_xyrJ+JughyG7=zYW5U2DA#f{m+y8K;C&U%dyRSxim&M@$R-_u1Tpc{D$If31O5C8
z+xkoM9wt9@eu}I;SQ?HxulnS!W38cLyB5>4#`g5U;9ILIID5jfc#}X*&|x<>E~TNj
zT}diW!fxo{C=0si`yDBbppWJh>B`;2<Hl^}%y-8a+x2C-7ja?Q4;Ps|9pT3wY5S~f
zd`z^k-8;o&pyE9PjvTIgcD`b2GSjTL=%BZWx0;sF?9CkXmR8uBp^8{Lw+l=}FTzE~
zCPY`K*^F2$EDdfk)iv`2;?J7aSnAvxKGhd**rayO`dVPm^#kyjUEk=^YYXegHjN4q
znjHCawO7;BI2uZ6*q9Hwuncn<q62rOhqPG+gEf-t9B-5;-Vjq<OskVR%a3=9eOX_j
zDvNvGBg$7ez36-S;kZV%oW#CgQ14Snb{UrgM)fPd!#6j#UMKnwx^#YzG^78JPO_D5
zZ&ItKD%bYo9yby3Ogyo~r){=cb7jV5;uzua?|zAr!@Y^?YT=;G3&{aieyq}2CVC&f
zw~@lDZZR6c?efb*WGt0_9V)llRP3KAloAU*UUp%mx=@}x?%l-c!K=ixK2FSRk9+GP
z%29B+?2aMz)FBc^oW$Lbf6*`0F8HmXJg**^yrl0k-6P9`t!7fhxIz3BtHOoXJR;pI
zDexqGBtHQ>Elhe-e$H>p#ssMaKgmlu+gg1PS#c15&Rn3qdR5}X)@3KNcJyaWvC*^L
zs^=^o8$Q-sb*5u)D(^B5zK}w7+gsj_9GTTG+&j{482F~sOv)yp1=S@Sot-koA3zV$
z5ZX}*<d_$yleA|elfuFe84UFk5%V#lh@db^Rq?v*9!wP)4;D2jq_F|h>m#{Y<D_!?
zeGfx>5a?;VsTtN4yi6wS2Aa2eNtLI2n=bs73ecS3IRrbQ$U~?X-CI)kaB#g_Cr*~~
zaeLpryGC6ffjxNJCKtqQ8gl1_@k&a}8Y3m<F%2gcCFG-X_wYPQEmY19JBM;WI-mSL
zN=9>wq_rPnuSy6F55z5{r?D7iVdeSHlmx8@<R|0}F}U0|E}JYtU-tE;AhYGk(ZQ)P
zexRLi=Y<!!{UNMqI@saeQG<5x_&BJJx_1tP;EYmqZfF8;xKc3wy$MQ+5E&%2L5bW$
z;=zl)rNgDe@TgT|+|uKX$EDt1(?9cVv2QPu=)T$u7zFT8zi(IBb=?i0CljihkF!g(
zd%kO=#due6O?I4c09v0hD<>&BmWU=4cXX$0E_x6}Ag)0md-drpr^Jr4vCNTs@4cF2
z_M@@L9W`z+Sx%S{JZQedW!0R#*C<0yO>!Zc-S>r7VL^3YDppo6pzYK1nM{@J(p^B>
zDm%(ePyyVpi@<|D@j0_`xXiv>yJq1CvLUd?NyA^}8)XUyfAq0Tc7$BpiQ^QuSCxD=
z3o!M3Rat8$-)%}m<8+DNFkDkxL*oHwVeI;pM*5}}N|{dT?AR>9X1s}`Na*hP^u!3&
zs>S-+Mm)_ckKMp^Ypj6K;#Hl=2Rd_~CZA4>%_ft6bYzz=+Lvvz%`2$UOl6o5J$av)
z&TFsk8uOl9OXyX&^v2LQ6K6N?(95dXMpM}e^{sU?%SdPTAIvZ6UTP*TO3L)FwL~1)
z8JLuNC|elFau1&@j5juJh0xKVprCY|^P<embo32<GqrgtX;LHp)MvXQ&xS<wJ=Yh;
z6+de=G~ShdmO(7fl~|<UX411qo#WYr-O=W)M4-`Ji}b<BZR{_fq9z<Or{AKOxZAO#
zaP7|Mr>u3%xDDda9L)r-bB#e_yuY-Qf8naZ6r%Na*M81j*zRIjLrFH)ur^wr#JVdi
z!;_RV?6tISpWkhJX@75I>^ozV)<*$C*SKAE7$My*@f>|)&nL;9p{%XhU{$=W;q=JU
z(ev~g8I|wD!AgXXiQofJ^12`0Wpw1_Hj_#2`>pd?hwjH;zcE%Ie@)j)YqB*lf6G_#
zNtmIf3F&(=Dcxqvw)Gj4%2!jBxp&)6f}~Eb>YqEySPV~g<dHb~M0!3xnpBn027rgR
z&eS=YFOfsev&a7fw|+&#Tv6!k>_E2P$aO^Q;Qtr7F7!LO&LU`IY50>Nf}cA79~gEJ
z_!q+tg>y=9a`BpO`Y#kaBD@bl4*fTZ9Z}U!)PLgG+5R>9knOji%Iok$FbC)LF#b2m
z9@}I7oS!i8&~F>q<4E8bK7=YPCWT7UAUBN;Tx26!gN7T&R2Qet#SKU7_0{%u-eYBw
z39uHOZ<y4MXAjL9t9<viN&U%!;DkzDB)0HO-mdSNqLz_nXVO~FDNsRmC5uFSflKh&
zwqh%As!ok!?Ltu2<H+2itgtb3%tU1&r~dA~G{vg?QPGR6^ep48W6gwDc{KBO$M18p
z!=3Akvs)iOq;e6u-|X8yqogyU%lM>fSk*qI{ZKJx$#sn@bNzCtPr=D-RMlwN5@vMj
z^-RJ9gY<q^3DP}MLH_;{qhOT4VCtQ_`i|4U4Y_iIPbmG^=={C5sFrs@6(Y~?yw~f$
zOOW|BSQsouh4TY@C&V?!UpDyTLR;2tKDIfHM|{*M9#tMFm}wXe#8kpx?vzDodK{(O
z?{<;GpL6|X{b4(pQSf30^;3&Z-_Vy*PPOoy%AKdMwWVi+T}d>go7OGd+MIfp(KB7E
z6aJl_{+C!)1b2C@4gZUu{x3TGA4zI3eBB@v@;8$D7wwE7aS`M#7z(?hd;dn@!lj5D
zMEXU#BIw#{*;So@zbIlvJy3WZS9M*Jxo}eV8igqTt1JSEAe0eEgbe<CjrtE%?%#rh
z|3>8^R>GfD?r-6_BJgnAzrSMw1YkqVKD-osI)0BJX8X&C$HQ}#&3a`IV7oSTA*N5x
z-o`-D$PrHRv%+6)f!GLsui{_!BSgIOwGmIj$ic?R-oOYR!Th&y;eP}@Ltqd9l>M(w
zM$*g>VN<vsDZCHAuKydwFUj@P3E4Qotr+mF<@c_E{>2a}qzAVW+L-(t>{Um9MPvV?
z>~D5G_#_xQ8NlDv!6WiSL<9(AhbK-T5Xb>HJ8(St`JICU{)|X4cml%{JP!oV<3M0R
z@H@N!{00QG{gnT82f~B8S%u*5`*1X~u@;8E2n4_&%mqiX13_$X+bReOVrKzD>48A{
zhyNVmGkYUr94v(O1pfKg1>k@{zz_f;=lVh@0OA5z|B^vqF!%)jF5>{g_x*p!V4Og>
zN#Hje2m%JfXXiH=5XSj0GAJ0XNdLgW*x?%Un+!fk_&WJR2K{FnY*2QDV*Xwh2A}6Y
zWE`M>;y~<d;D5FOmvO+?$#3<6;J>&3oGTDJ6!M1*0{RmN0df2}SM1O~+J!*a|0Z*^
z*Mpmv?SH-^M$yd8=<2Tt*Kk$^8ymP*Bb4~(+GQ2DHnstP5Sj@WAoSJ2QP1A-rxJi5
QoQNM+I=UyaA~;z853qar_5c6?

diff --git a/docs/Changes_3.3_to_4.xls b/docs/Changes_3.3_to_4.xls
deleted file mode 100644
index aabf7d192034c44d1ff0f6151d443b30e2aba7ea..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 45056
zcmeHw34B~vdG8&`yA#KDoW)t3YuT}-L>g`0;&_oPc}px?v1G>y4)$o~N*a4KGnqRh
zX+j_(5Flj1*_=2e!C_5kY17w77ofGn(x%0I<+Y_ipsl+A1-cjtG|+(F|NotH&$(9~
zn*`|l0nONF=AQ4K|5?8CZReajn$P|DoRd%g)+K-MOn9wxs+}K?Pj{w-=rLR~$!oUb
z^x%T|ACHfZE287z^0n755eI&RzV624RU=J8nvBFbpNcdMX*$vjq?t(PAf1ae3+X&0
zh=4N(X)e<FNb`^`K)Mj=BBYCv<|AE#bScthNS7lmK)M3SMOuhdgR}@~G18StOOTc#
z)gsj)U4?WtQaw@wQXHufsR^kWsRgMOX&KUTq!majkyasHgR~myTBJ2dYmwF=tw(A@
zYDe0D)PdBAv=M0&(q^PBNL!JvL%JSm8&Vh2cBCCh-AFee?L?vuFzpIm|1yYybRK>F
zX<t>4m|&b$sKlMdcgHy{@4`xTh4-<MrVmxW{FzuUDHJ%oZgg@^!8z#k;WLNNSNodl
zRK+N_K_6ZTo<02LQB`s>tXRnz!oLLiQO0~{oqp*PDMcrZ>+D%7twsB^fgBF|aF(Kx
zTYCt?YcixR=Om>M9$G5C%KofE`@z^>Y5UaglWZKX>C>RaQzknLm!FtAdo1Qm!9f1r
zsdAp1^gP}j=O2*vI(h1O$6%t6@qD*4S1K0LiPEV1-}u@feMrREhqZW*IZuEx3-*K*
z%sSrBm-pY4GM`W-apz<#<-D|W_C{(y{nMVRHYs<Xyw}M4EWF2@-xbV@LTN_LFQEMR
z9fvqGRZe=kv#m6mc6$<8zZqsNbklLtlbv?3AoJ7QnD3lI+v9h9f<u_t4m&XkhL)W`
z@1mVhH`i9WxnR;|5G5Ghn#DDX>K83)+E;h&-r69uw{F3tg)r+dzyS&OU$@gs-4ZQS
z1G^6k9TbW;Me9MHRV{^QsF_%5LT$R}N*HgL=Y8zTRc_ETo!wh2guQjwYC=sz4f?gO
zwrw9;iNp58`tPd^<F0bUpsU;nVm&+rY<;*fHG*h+(zQ^N>(|>xSHRK0rI<zoylFQ8
zOVbv4Qk<XTd~e*vtOl>+F6opfp?~+4(J2>|%AbkOS}T?RRi?)rm&4PqfX`PeKAf6J
zuR8<1{tWc1D$=K-+V9f9PnbuV(@OLhf+v}m3H@R3Vho<}U%gcSc^zXgH9B|Bv`Nz@
zIg_5eFpw)npHqR3N=6q}prfAr+=XYTr!qYsv|D>#<#xk#*Kr1e{!F9?{Rz{<{S4E?
z{hUA_oum61rb|5)=u(g7!#x*Q?thpb?oU8(2LBPX-X8QnpzC=t{G32H{S4E?{S4E?
z{rsJomHQc{hx-|(hxs;vZu%3Zhx-$zhx>E&xt049ric3zric4;8ePxJ6=xW4m>#Y_
zOb^#Tfo{edriaHHrlTI*Qm?SyftU~GeS*9hdRQOB<tNmiucR03e?Yh8r?WGreoGg9
zH|4|f6D}X7Lm&4C^@saUdZlrN`xBn8aQ%wTC84rEHFK3cId=}_<aGLDmsX~KbXH~h
zx+#_E4_sE6K5s7Rd`0;amgjK)!*q;ybWRNY$2vF6H<TY;Sb-kq&uR0n?N694<u&~@
zej55iRg;|AW7XsQoO5W3e9k>INj}d%G*v$59hxqm7aW=<pUxpiJ}(@bJpLd8auwKe
ztwCqB_FG)ijyqpNMkt_<f#UoI-^QG=F~>Q$=wV!rImeG5$H$m+<j4u<NNYb9EGL{3
zDBq8N<45`(ryuzftaVK4lI4ulhk8Jj%Gm0WalxGcetf*&IReZ$>O2AZ_=zJ<YyY^@
zdID{nK%bb7AoaI8M^OLa!-uh2?{`|+9#-wGNJk{~JBt>51^HHI@9bH6eHg5$_9GaM
zId@?J@P4l75zF=rdS0>lUS|^MG3WW%G$hA)?jtehWyf{Df$JZk+y)1$O%JQPypxgj
zoQJ@{b5dB>6=jv@V&z!|Es6VSydT7S))_z~>B>5gE3znpck);(=9~klDT(`Pjd*$~
z5BHMJ5;x8rhtZ*dLL%w8iA=#uq{?n7@7BA0rJ|cmWZgc`EqM8K-b=Z~oZFu(47rJH
zt~lrwTt8Vz=ZlN5>)?&J#Y7(qBy-tfA(wIUnOqSCQ8H0XXLCLxlufxpv5?3@M*LDf
zj|^*b2k=I+n@FY5Mmmd1y2~U2o<cX$nUtF^<dUB6L#<aV>q{3A;it3dB33!Bs>&_0
zD!JzlC6a^btmk8ECMc9oBo8JAJZY~#ohf>Sbanuv%P0EM8Pwyk=B>FzKD{1WhtfG!
zqtNQnDHj9QePOTi<>Fv2>kj2orHsd6Cz9-+n=Z!pI8LuOl+PrJSdX&o=0YMrm`=J~
z9xy3)TiP%7<wo7aa3Y;a^Z{O*%Qoiv`|H@u?gZ9uAS!4_B9knkCX6kCsR0i+rqamy
z?Y0ru6O!&hnicjJazpO!j?VU8w>FtcCl5+LQia5TTlA8H*<2<!P_DyrWJ1?;<(U5z
zdY`HX+r8nmH_}+}e6Q%{^2Ia;E%St~Z1%EVA(7dO%`Gy*TkSSClzfOq%IgRJQ*I_v
z#&Y~xx23_|P{@t=o*CV>Zfk?vnJ6aQu0%GS=MYizweGTpfLmC~r+qN7j5Yt}U0XBQ
z;gZ_i<X|E@z`kX3Bko|rm!@T6MdHYebTK<n?`FIr1aH7aA0U+Za$`Q>7a`-A5f*a2
zQA`926!mP$ts$ADN!UPx{9;qCT}v>=ozLYPpc!V~>fMoaaWKlG#za1!DK|oIl3oTz
zq?=Nwd)_OP9a52z!9>wb`)*2&6dXe<l(XF(J#I%LJDl+AMFoqcLKgQZB%)WoYB`tn
zT$x_*Xm`?|!QQ5agq8kM$I;VB=>*SGl+%%%3sD-%6+AYTaP#OMoEF-Gjwez|a1t(5
z#DmWu*-s2B`4%E}P`5}`DK}9n=Gay;k;#<Nbax?lz)Nx(p+fDV$^*$_9!)6KLobGt
z7*ES8H%n_#&O^p?7>~h`vzY`1c)FEt9~-h;hiEJk*`kYW9!x%P1$RSfz}=J{_54ji
zb0dXx(d(f}+FVFOKsX|k6+VW?*we9dYd35~K2I6oh#}J3(tXL)kbedCO}(MqFlNNh
zmkQ}z$p=2;WmAcQ>lF&Q0=AL2X8j_@F6?$8qDn8Y7rl8f=NFY}M6vA<)3hoo)(vv!
z7m|w|2P&4JRwb2<^+ysgvGtf7FdWmK%cs3m-f_sRL}8#b<YD}nv>{aP_Ls84>eU#)
z=)QcixNj)!`y5CulrK@r6xTL4HC<VU>Jt7z7g`8z_#nG!a6E~M`rQBl0bB>ELodMo
zl6RZseb6=GM7>WAr0OL@gXd;)Ni}$I!G$E}AoDbp&@zY;TJT_qT`%KF*L4B6R_c*z
z<PB8?qw(wDYat{p`LcE`il^j8cGxQveTtC2<x{Se8OGKUxs-%iD_}z1zA{Y^xiAZZ
zNt8iOr4w**Lw-FhC~7LZ2|u5tNmB~N2I#dgKzBHuFeYJ<;}pDpui#;5Qa#3r{YAJ&
zPaF@J>U$|z3)-G59mgnzTa!&Ey&9OnJ}={oc`BxdV0|F4QyeFmDizZi_#6^_j2eQ_
zi_5-TDVy?d^01j%u6H$YgAn&@$(2a;ib=&41dHSe6%d{d7G3OD29_5>3NPvv@;NP?
zG?m6|p||V>w$h5ZogPaJdu-cGgQ^}w&*vah^e5o7*a0xC;AL`{U3jYDBqSI*bEDVi
zL-BB^p|5Dt*kUpVFX$D$Oc|)*#E?V~gM|`J7%Z1`u^(Z_GMsLt`-RIzWz8`w!`csm
z2^nQpT*`OGYaGJ7rcrTAprm3ln;U4bVP5oStNvUjpKcy*iN+3^-IHdw+`e=+0Xd-J
zqkD-I3W#HaW*T#dDK8DdP8JZJz~?|{lIgTSe4OR<&@^T-AE|!hjQYgS709uH973n!
zi05Tt;0qW-u7EfL=1h0MRu*X=)6?Z;OQ@egm0=QV6z2+MD$z>7gcovsB^U#GG?XjB
z@eR{-ip`^ELy({haV9;48A_p@Bvm0hAeNV&#;BfKi%8812`vqJpQytGZ^(^;OBtMU
z(U#F%`FSsy?k~f|4pX`huos#1LCz^AKgZI*ZB<uT0uV=6o$&@>L)~04&p4ts9f$ui
z)uqes2!cwHHa8E8CFaPlYob|(ey3A#w&4&&7pKHCItuA94!(9V5Mkvi{y}pbF4a`{
zH2_f_$rUoGWKKAOaHlbyO?jib)p{wFNES;7F{L9eCvG5{?uWvi!4pA`^9Z)kuoBu_
zL81_eKzIXEg8snLqz6iLDQbW)2OWVqpdQkpDGMNd80|!uSawB2l_j8E;PfIq%b*rU
zo_T~bh(*{kEDqgc0?`fTpo4ZT-3OuC1gnaJP9+eVTDotlr&W@`djtV06r>u?y>k-|
zF&i52xi;W6R9Hz~J_jkI9O5pfyRQHhmB3j9?h?pt;s9E%<fAc0IB~EOUFuc=0j_$K
zVv_;l*U`%kcwc<9qxY}p@+ghv5tl_=6!9wS_-09y!ZOE0-UT%$OQ8WN;b8&9<1ylR
zbXUUtl$1;0noFzT_$-NX8T{iSD}hpF+~JWQk>|TEO8au11bztDamg2PFstg>5URb_
zS&Cd1?{z>o$e~oJFpTAe7~}zjB=kNVpm~f>7*WXLiVK(wd_9$iGRm0)d;MY@@}#Cq
zxj&yuQ-Ci=js9vXbJ1P9&P}pLUO>07u1Uhgb?v5bAJkzY3q-mBN7aG0B^r``skl!<
z7EtLzX%>ALX6Q+m=MRF)7!<<hQeu2~26Py*I_R<jMim9`){+O;pNfy+1X|S&A(i`Z
zfLxJCrEunh9&~~0HN?mnD#S2i@nSjd-l+7SVR1jC-Vb?lfJdb~{66pd=toLoTCl9i
zZFF1mX;<7Qta{vppaHumwh|xe{vhj(!bN7G-qI~{Uoik3LtG1G*xI-Qj<r{m1A(B2
zD3LJ*Y$vUXaDY%Z*T=xukDm`ELqJNtz&z7BCJy>>RMeSEGNMJm%yT&(k15c!7lS;)
zfQ^g^A@`j-I(x8(Kzk|c-UtT}8bHBe3rnN*!4i$*+U*rYG+N`!P}Od_WgN$J?#5#P
z?QXU-)CUxz7;&SR5aUoAb5ME}!NCTi5UoI>0Ot+Xuwqpb0}7lm><40AM!_jWSi{O;
zvg9a*b`c^93AO@DEaE(h2&J*iH$<$-gZ??NZll9KhOnnR?3dwH+QA5Mph{*F0}0Fp
z+>vr}eVIh|Aaw<+p*lo*up<SD%!`8}z<MiTA>!^FR@bQNdIle7utFUvgrC`kPOKd&
zoq}?wMAd0P`e9fyp!l-C(f}3qoKd8tx#OZ#9tK41smu?wv!Ma5vydKYKrE$=Ci<x~
zx1m9#Yjh~1@8DUForyG-!m^B@;=n0Tl@VcJ8I0`^>JyzM7@)#IgdKh^ndXWd>j_ma
zOZ9VgRmc@{2ve|@hk{_Liu53H9Xa~SVRXR>48n&{<;4FY_$LWUl*68Yhe#4KB#|Vn
ze{kt;jl__>xf~R-)=v)%C0ejA;QOV5$M9flLUs`bjAds~EpZF%Jt7i}i2EOqydtZJ
zyBdHsEVUn#h+tZrIK4Agh?t=wR8yQavJ7wNG!gzPQVcAFk5r7<i7s)62}^kd!og~@
z&h=3oJ2I39EU)PUxnociwZgkuA#8<6C;Gic>`>u@{6xSz1b7Ckzj_xDw&)f+fb{_w
z+~<LfD2PdsG0|e{<rx%sdvYj)B_Q=%`?*?`Q#(am9${#VdfBtU;&L{jfOw8!D;i>_
z$!pj*FNF!Y-t+R<BPy`}b?%J_Q?Qppdj+G)H6dn`8Z#m{=!UV~R@n_BsBF(5h(VCH
z4LmEBQe3e4+j7|f$za`sNKSS&IyNc8j9?{~rS;+pfm-g1NzD3btVijl;dY80!P+=*
z6P;@LW#~%Gv1XFM5za2|_6`Dz{~RzTh^t#^coR%2>y3x%t*xVN%q3kgQ+AD@!{7#t
zB>I#stFRGPnE~=rED#ZCa5aw&DlW5NZ`%>UmnFW#oNbQI2v)D;CxiSlxQZAMW$Ilo
zKCrrDEqp4gh|AsJCc9SKUcZ6TkM3eZ{f?+bQMQC3NCEc5m9Qd(pn>*<d$c|*3HNaM
zTo0{ERtF)MxdoKU;-A{-WmqySwIs0b>p7RZL^tEHg=Ty*WTE}y6FR;QY-S*=3iQG6
z4Xi=}az8+k(a}i38=wX=paAm_cVLAJFYdcd-1E!zscliLk|F8T8rT!ctKKreu=VB#
zoLhS)(}{3f?HF38s-<5P5K&FKfTgfVm)0oix#zh%H@0_nZB#B8bIdJENHE-}+tJz8
zqc>b>w>A|#Z$nQfBB;Qya-*gpmxW!^ivo1e8x_m6Pj9%yv38&@rMRelWK3atI9*Cv
zQ^8=_8%<`Q3()!@4_>^Xf#FwBt*9ZQ|AvM^FOv_b;H7S4pJ;K>g=1^#gx?Z}>(^l$
zSfn222TL}xnleTUgT2%&R;u6%LXJUnk5Qjm+``jxKN*#i1}#FUmomGijOVEQ;D2+c
z*g}kN2N;TNQaW!$kzgy7K$g0C`&F#I+OC&9x4u9ol&=mpM&f!Dw6=&*auR{+y&J!F
zX{7MdI#E95u4_l*GKfI+w_|Nz$DpU+-xvf$989vD#K<6AvQN$_?cCX;CO+<VaRn(c
zm8AxjmixDeRD>lsdSG-A-yN*|Q*L*GVGOqLQ*K8FMj3$#3a7ARBiz8zMF!R&rHqS-
zUiAKbt%}_m(JfX|uESbHojshVj6`7nSH+jLhZdFCUXq#Nz_JNgM!GyihI-q{>>o8M
z-<L#4$i06bo>tGC{{R-&&`El2wLhebVnbV%rHMp9k;arlkk}G;3=w=n;u#+_M<ff=
z)?)T{Cx{$FbL>FCouL-7lCFecRB2fe2bDvxx^XfBgN6eZ#9i>jd9RV1uZ0A5%mY6s
z#snQlG}3dBP8=r-kW3g&S{dA^3)X^3y<dxl*ar8tCl0eQ5R%Z&np)o~wglUh=C+T?
zhk>9gRyK`nfq{yv_+Htwz+2I#CER}OSrxU*p|JWq;Sz7N9s8<SGr7B4TI0=afpI~&
zhV?DkgM$Ku4%AL~>~wJ{21}pQ2Q`75%bKmfgryNATTX-^Ho*x_SE7iOP61oy%3@(V
zOg2IfdSz%%3Zl@VH!N2MMaZM=9ql_^xr4KjJo`YZKFEq2E{NvD;g~zH%s@{{c`&_4
zf*!b3*fJgUV5gFdDvBA`7RyU@QGciyK?sBWImBYz$fahoMQ_wg3MKB+gpwrtPqd}5
zUtBI^%V=t72)uYn+3Wyow?PB`(yVl~tB^uPR2W#-ZqA9Wm+qWmoxu7qhrLsd9I*iu
zu;@;x#Xz-sn7&Cwne{MY6j7@e>39v77;;zv&az7Jd{RvfiVk2~F|fHc-DbnruYoFN
zOELvKfGIL!=|{NA>4xV7qi`UzLn7v~yCpp^=<f6oI3i%cT{OgnI>H)8Gz^n?TE^Mn
z2J_~wUhxKISC$oW93x&Io`medRER>6E95vd5q<1Oo!lPj#R3hIwqYRfMq-MA+m6##
zZrD~@{Xko>c2c|Xlw7}qYuMU4u3HU%AOU5F3xXP$@ZG)F<8gAlyS3a441(c0yY4${
zXss9qp1~_a$te5@?hEcwr_CO_t47CQ+E^D{!Rq03SJpBJ6Bw8Ce9;8wuuugYywa*8
zke?7&&=d#f*)*;xDBw=?9<r|lSnr<sh_*4;fhFJR4RF@f&WIvH{BUwW6t#qA5pKMU
zXfkr7zylu~{^%o9gkP|r*loqa$4ATXPWn_VbUwQ!aKYEz)GZnW&S2D7?G@dcbasvN
zp{b4ui<9LV*>F(SV^K55w`*H-u78>FGFXY?iD%i#tf!pG5>hQVd`#m;e_%144bIUy
zkj>k7;jA9A3kl9}+3%MTeh>NaI3=N>A@AQBELPP-gW8oyx;uK@J+3}(rx=4(<pG#|
zx2BYZ9;C5Ch49pbig5p^ls4NTWD6#er)shtv|?G8*w~ikD{x_#WDpUIo^}`B81|ts
zl*1}W2b5O!0?&opsWO}}tT>FGI_5zYS_ZD;ko4kC$BEP_?u27A_crzgdfbNqNppji
z3cJnVOJFCgKw*1DPVa8DqN~D6o@ZduA<r17kW^M{qDef}H7lQ_DJJd*+*T+Z$QIm&
z2Ht=q1b4KG^VCRf2OzN2`vQ#2tywWfS9BQ0nP&u8$BfGMFh>bZQOi7z5oa)Z(PyIt
zY}M~56=f|R4P3+qDGQASy%doB_(q15jiM)9BY^FtR0*o<FR>E_Gn?|@U1Y#=6EYtd
zVIQYBa-yjsTZt<Nd9H%9>F~1$`b3?cteDF}sBsVoj&#VjknB?;bT4AlEy++H?8bsc
zuhbbkLhc(WFWfe#--0R64(!9>9ypS7wMW*X1@=jVzykzvBSjS?;lz+Tu6oC;Rx`8_
zeAM^3jisBXLGw~|^Uy;af?=QB&t>hh>|mh(YFWmbxV&ajhzdML^C}6|Fp30#<edla
z0viDjEHMY%E>1`|82Su6Wy52NAp`<gBJc>s$0~oY$fZ$#KbW;MU*^(Z4&`wj$RHkI
zkX>SgmkhFn?>lK1kPFNL(A|AQK;vDG8=?U<j=%DKL&R+3z9hN=KM!M}ZAv2-;W9Jw
z{7WKRkEdXuq&QPU!JXXMvnNop&_WcWf;)Z8mzF>SlFpgJmIrthM9^`VAoZ+{dXNaw
z7J@|zfr*mv?Fj6x!t8YC@(B4bL0G16vm|f=cs@o647azzTkxxHjT74u+UIj|&6Rrw
zD{US*j9}I6V(nhQ^$@Ojn84LKuXsRF!qo_9T=yrPL0P49?aq}t4<B6TGPfIO4|Q;V
zfS4rSeXQ;;hHk46T1J}JjJhqC8+tvaQKVH_F?GbMH$NHLn`0W~ynrk>jw)$oq=Gqy
z`ir!(B+t>Yguz-5=akqsh1|i+Z^5iERsol}&F^kcY=?WZkHGlbAkY%h185(N!D<nb
zEc>)YZXNfhJv_1{!C0Ql3C5nX0SLxp4k@xqaS_AdNhs`)W*YUeA=dXBu`e&{WhlF>
zCS?7ij~c1hiGD_(XlP&aqOdc@#t$??7MVDnln^h+Gq;he1DRrlvOYkQo<y5Vl__0n
z*HUqeIM~fd<&rRIU5NtD^cXKRX&vL<v=GOgAL60Szr)i19Ou}029FyYiN$i0XXAT+
zP@H~19-8E=&*52`Cng^o{|;Du#nfZtCt(-PofdoQBM1;)#5&+*gl2V!9InCi_oMk6
z;LzWMI=_z3d5AamVBh_&S;xjdiwN(f^A0%=;OzbnXUCp;3Wv#`MH?T%k<W{BkBv`S
zh~?!x=eEsQy%jHTlFxuWpTF>E><{4nzj@KI@!MgQpGBQtU5alhq0Tsz?RAVA!Js!R
zI5u7Y_uqrMu7kVn!FX23G2O94{>P!lpRNKEp-np`#d0Gs+GFsAH88wagF*6&9UFi2
zOGq#Bi$ll84>zfAeAWGV)w2s;cCPvN=Z=j(l$Vn0&t1d&EQjZvU7_Kr*)Tr#)K1j#
z(3spS|KP1toB#B$<@!xh#Uzyd;?(AIPR53(IkEhuhaNbvXcXm-<N4VIxW1S7k^h|Z
zr~J~t#D*t7gZ!yjdcRb|x6|<tJE8jK9UFgm_ObEbJ02_U{|fudynxF;{B>3T!;ho?
zdqKn8JbVbLoA|1kA3KRY5Z}%A*gvGlkl&B*;<Y*DAEu-pyzxHjKF&)%SQ^XEeuC{G
zznFD8>t~&c9r)ZHUQca~y#V@yRdb#!#<EXid=EcId{ykp6S_Y#KH@co^*TpmlP*5w
z?7bc3PvKG5aa@zWn2+o770bSN@KEW&Pahk9qzTt!sOxVO{!#WL=3u_0Ag*!mZ*e_U
z%}+4jxFPfPNYj{e#n-Ezy&^N_EWQi%?O$9~vjOcMe}(u%C>L{Xn{iZ?Upm{td~@zl
ze--NE{VCwr*c}iq<d44nu7@6~n)$nf{6zlv#QveW=Yg4~_;a{T%Fp@KW2*dNN9FI(
z_`_}F=O?d!{t>p1cG|-Ak%re@NSJTRm)w7m*J?fvx3NI%egr1?0R789Ek9WSWN$w{
zCRNS3?iqYS-%u&~#}}S^X8tCm7}7mR=d2$i=FPAn<~}<TbFg2+*Rk=Z#*n(Fq*p=?
zK#x^bPntYs>a^)IW}b5{OxJ{0)tvTIomDlp9B06nSn1oaCBbV3F4T84XGgxHIkRe2
zP{g?!{ksYMj3*uE`x(c%a~PsG<~V<JH_R#K^QDLJecU6C^Ny$RUCyKY4tEv0b`IWC
z_#QW}=O{aY@AbX>?6?1-tFL?gKA~SF^sD9lPN9s+x58D^SK=XdFfaCI+<AWTJbZT>
zMsZ2Ul9~;OPO;sczyq>akIPpm>#_Lb*;~z$_6zWxKv>70R8Su4T+;m(Y5cA7z66cq
z`Eb0xv7TQ-46bW&{rrU){6Dq64%hwiI<>yb`K!y%ch;=u(V$*wtgUH|H`QQw1sgc<
zWNT}7^=@idRkMEGj5P~8cXagL)V&d*Cby=$cWv0VwFBQUY;0`DzAyZ#1cY9pv9Yta
z)9v22wWrrbWsQv+x7WBegT-Qgbz|em$Vi;q#9kcnBMbVCdiS)<hmsorh^LCF8nmaH
ziS!H9R61E)H)Hx51kCGDg|19|e4H%VBuD4*?yfC3>AnqXxI_j!B5N919xx5BjOXDn
zHu2;<FjojL)7S9qE;}%>IgqgLgeFl^mzXjAtQF${s3AE9vQ^8W4mt*4(6uwv@skI~
zN+4C4b*)WJYZ~Q>6`!?Bcr*#ozUdDNzna0Gy>2XuaZ=`mE5|;mx2~nBX?a6)OGDGD
z-j?Ro%U7;$y5{CJjdJTJ_Lm2(-lj}qAY3c=MKBHPFzVn^INBJnbWLNxXSDN51!xBz
zErR$k<_{V(b5v3MetE>F!q6xu0rQ%pe0)uSrvFp|q<2t%)r4olu#DA#)ck_m9GIP^
z<-IK}tCwH1y0!Ti(dHl?kcUWq1zlE!p&8M6&0(n!AaG7UoxKS-JV8N!Rh-rthG&H6
zHTy$WylN5hOMSg6?#<(*y@2Od`g8kKDD%oZS0pfPPw`27FGb=`>{SO<gu>fb6Fa@i
zaP(|dpB{WqUlT-yWLyv{^7<z(F3vbiqC>SJCqA($9*M|*0=czm`Kq{Dys7u)Vb>af
zTb4z`{E*Q=FDefr2vH|S{jZYGXRrPAxF6hoC4v80`&=pVw;}6<&_ARHVYY;{<5f&C
zwBmn+C{G;c8DzO(Wkqp31G%-OWjX#RkbPAIw=A1bsSUValVvNluS%9Nom*kcu@%xy
z!@eEQi2XNN<9L23-i_~s_Q-ef!@D78)##tyBA;!DlwQ4;kxew<zzHWdk*=NHh@G}E
zVrrYX>vZP9pS^{%naS3Mrj-p%&ArVlS1-d_WkqDIa#p6E-AtagvNz$vPbpfvvWfpc
z9{(3W#?CU<GnmR(=k`~&F016{m#<tAA^&oi#<Q%oe+K#eIk|ajH^OrY>VKo3{{<7^
ziGw|Z1UEFViU^;MXU{-hxw5hZUlS$2Cc&~@tv0)VHVd53r~W56_%jVN<m7ktxZ7TH
z@;nClxj1>fQf%+uihUP8$YNth9aS|o#am#HU)j+Y4(Nul1l*QO9<&wp<69nHBe%XP
zuDEp<_<WzmR6d@<(Vw>Iy|R}P>XwO~&(_B2!wh1_iK9GQEfog2#*Fc7!0-@*h9`!Z
zcAI755Q7n}!SjVULtQtc%ef7Q*8CG;vHBPusKK8n|BvbW$3Hs$(Q_W3{%h3(lWVJA
zmILE0@;AJlsxMa65`gn=9?Dl&1L?%>7{@iK<~2CYnIGc=5RT)@d&Re>am*b<RRmW#
zcRJ`oEI5e{K4Z=<+?(ON5y$L&eR&EG+MPR@CvIpO^LNVqDqL+n-`=mrz!neN`#5Be
zJvdX{KjbV#ZPTVr!$XhqcQe|9iw(g=2YRKRG~jaqIG-n=YBn!%-A39w$6^-=jT!u%
zQGA@dJD~B2Aw_eXfJS~P+H8ZBvwy)pdU;Te4@{_Xmc|Dp6s^f%`9PDRxuy*~q@`)6
z?m>NR?DKR(J7lnYB0^y&6FbIoc%Vzu98(_|ugb-Tg8J~Fm!^3G0gb#;<xZM5@X(y5
z{pD1Y6JC*ns+?omz=L6$cFJJ!AeyFS3}5h2+G(_ThBlkyl{WCOnl5*#X#)?gX<D1%
z%SDEkH&{NDqWbroAJjMB&>T}AzWb-kS-#*wHBEEw4ft}Ip&d7E;GsBOE@$e)19Ab@
z(C~m8Xq2`6_pvWOf-3NJz0-^LKNO{iA;*8!a+i{LF_eaXwr_^0$b5OIM&+vT*ST*r
z_p0%W0(+2qQ`HPAmHf`Ysr4+$XIXk2JnAB~P_cTPl_pn>zr)9;W{9IiUyMH^s7h7?
zH(wt0VJ+3b&3A4#_t@U+G>%elag!prNrKy}ag$KbCC=+Lj+$X{lOwpv5!__KB{hyc
zw74k|+>{7zis1HZ95u}1rbcj6Be<!8yG7%ug%*b=|3ZAhb9cxE<C`Y9gvL=*EpB=Q
zH$8%zE;u|GF7rUSusF`Vod?dRZIAPE3I5oK!cpTX@oL^@MsPC)m%?4n=1jqP8b_;O
zaWuxNy;#*0NG01N<{ZKFYYeT1#hfdc0nk{I&r;3D6Uaf1eN1Z{EsMp?65Ih@l7E4I
zzWm++YvG@}yaZ1_tFh7gSloGnJFIbZ1M`i9p9kDJ=Pxvl7Rusg1J{Fxp;XUi1J@(J
zT*3Bc3yyO}U7!`SxH;0^kjC-Ju=#j;TGcW~`kB)>S~`oH8^Q4}^jq9q!EvtH9<8Cp
zogcyRZ~D&{d(M8IFF5j_I9g1L;~!R^kLSTv&*nvN^90AaCXQCs;x35bE)X1NitSw>
zIP#x3T3(C0FoL^KaO5~~7YdGiB#zeE;x3BdE)pC$N8Ck%BR_~E#^NrH;4Y5fE*2d5
zK^(2T#myHS$IKS^dkOPpBq~QaWB-VwC$PGANd$LE1b2zF$2N$gx3IWNBe+W=xJw1c
z{t-uyVsV#6aF+=#uq~Gfj{PH!UdQ4tkKisB9DBs^T`oAbLmWNRrN+iD0PZr81(sX@
z++|X;%2kVJQnk=~Z4)#-)fK>PlYiCD7o1LK3)Ci4wE&)^(!XUDsS`^wW^vJqjwej)
zu+Y$KJXT|9j`6RH49&)4iw(`jURPG2Eip73kKu7l-51xiQEO;69;-7n8;@OOXf__h
z!|uV@Onvo+X5)?qL$mQ%+|X=1)@W!p9&0i*%a>+Dv+-Dqq1kw>wE}IKq1kwBxuMy3
zY=xoOc#JWInoApxtuizlk6mMEw$G~#&GO}1L$mSN8bh=3*jhug@z^>;vu&(5G!>6g
zJ6zDHSxy@@z=f2znFz>*c5HOMf}84IHGatFZ;&I7GI}`>>fS=&78u+@;I5EW7I6!q
zjjsIC6LFMdi{lc?;`pm9S4fYDs}UUAA&#<caf>3jMZhhPN3)4r1l&UBUX7zZ*zqlv
z@qHdNmRt;6oAcKuSB>9=`hvz$XKZ^{N_+R|l2=N5fw#X>+Ph!ls8<%ZL~!rYxFv!+
zWOD510~$x&v$&-Z+)}|ksBufBy@xc8`e|`o-!5>1_VBAj$UUrawSs#@<EX=yFLl7R
z8C)H3ZO*4Pt`4}hxUc4cdT!ghO4<ve5<Vxsz<E^Tu9Eg1(>U4!+uqd?9G@&-V4|6;
z1^1U4M|)v$_0rFvy?W_q5a-uRKPg|77upot&xS~Q4bt9CDDfjmL4&mSZjGbevAB2y
z7mwiLf_q%!XqzmqF@kFp+yRtedyRs7O5<q1EUqbnYl`5S1ox!I(Z*REf3jkM6Y!-u
zf@>Ds6B<W5XmKrq3!<kM!Mz71*v}Th9o0D6N{efa;93Ruw8pgx?!6jEdunmZBDiIO
z%WB**!O=Ey{%CXUye|i?%|v<2foqfBZ(u){1Gg5xR;6$&1Q*z)6_NH<NP9uFwnEyY
z-DP{U-L{`ABe<1<3sw><1xGtf9PPWst%~6At6Z3s0JlnTYc-DJwBx%bg1aVyyGC%d
z=WLJtwe78r;8sU)s|80NLL5JBd)G#A*9wl_f&9EyaBUjLaatU{u!#``^0G#7?Had6
za2qs^<Fq*1j|C>`S}V8?jaw_YPL1O@EpA-|w@z>yHIBbcxey*njgP&zxb?ubnfBHL
z*X9J#_Ilvf$}b8sjvJFWZenEi>hg@^+R*;QIF6iAG#kf}Kf0WWiO2~>vvC~xp=dUa
z<H#q{IDSR5aolF1wZ$CAHEnD$<!l_c)zEAlcb%cxIPQ8wb4-2P49&(XU4~}kxb22!
z<G3A$X5+YSL$ht%U}!du+i7Svj_VN`e{#^$dQCYS$L%sS8^`T7G#kg=XlOQ$+hb@p
zj-v-rytjS6S!fr7_rjO`l#SO3jq_pSxV;8z<G5Q4&Bk%BH#FPEK0~u{TpMUs{@Wt*
z-xiVoHj)3YqCD+Fdj!`W!L>(l?Sf;RLLBATwznaI+aNgBOnb6HaEw!kqpVw8M+Da)
zI7S%6bqJ1e198*`i|dTwIwQDF!7*+ijyhxQ<woF?9pX;UM&K4mTN{B{D8Kee40Wmo
zJy8AF1k56_CB$q3W|3ev0kc?67T8<rT$lV#C|8S{f$5Te)zS-U-<9%<sVut%cbAxG
zc?)V>V!Xr_)VkE67P2SQ&?}LqwGp=!xDNTvQ<bYij`4%aRf9*23Wy`#;;xhSsKqRK
zowP^&RXMgt%_WXyEbe;2QG;3Xdcje5RgO4nEpe>P;<iaYgD7+xa2-w%y>7$!I^}m|
ziDSE#FI^E_m*5z$u)Qw9F$y7$eX{Ls7hDkiY!}>QjoU7`DH_M#TilKaZinDr)VLjj
z`?AJyoEFy|!F3C+TI0F}H%a4Y!7T0u;Fg#u?FQhM$S=ood^Z5M)S-3b`1omYJ0rNA
zf}_nNZl~aWPvc0nxE{d;-nvI{LA2EaT&?pljia@+xZVh^H-hVx_I_XEXi+V07jPXW
zI^fY)hw-|*fTNzO@zLs9+-|{L02)WMTW}2~M_gRvXqheU#t81lNP9O*dw;8OwB8oC
zN5&Vlw+FZ-rk{Icd>_|1T6l}Q3Ah_fRB;n<H;7GQuWtfwr}GJoqZg<%`hGKTR~g*R
zz+L710ZI^eGjLbqEJWq#DYiSu@J=i8I^ed;KV?PeGwMxG_u_7Yu_Aj>YlE>Odr@oL
zIgav_rduMoTLc$q^(}%6bmtbqJ+E=}J+?jkm2k9h05tad^@97P$+5jpX&n6%?eFY1
zmfQ#24zsr8H;Q+dHR(RI*A4DFvZvNLAr@PP-*P_F4%@^CEhtBSyC3ywzijr@5{71D
zu|7kyzBOrRHWo`6nvKQ03bcMhv$5EKp}8h@8Z<N;i=_?C#$x2P8i$R=4jP*6b4F-w
zF&%TXkq6Xzg|)F5bzjkJES57g8;j))&9-r?q1jlhU}!e(@+;7ahGt{2lA+mHY}n9j
zEH+|jHWnK-G#iVR4bArXHbb*9>KhEr#$vY{nvKQYXlOPTdy}ErHtsMq8;k7+&C36N
z(VM^<;;%?Ri$0AK)Q$b3Z=XSqdP*6!xP<6UpqUBLn?)Lz5WQKfag<|=>l0jH_4@>O
zp~m$IZiU8C)-5h6{S0tP>E|qsOG-aq&^YRYwF{{TE)~J01oy`pN1d@aFM{(TI8ShY
zq;b?M+t2<8u3vC}f)eCszu^8<<EVQUHz2sc>JJF+vl=%bxDy&j{j|8j2yQUa-k{*9
zFPuN>u*Ib#xO4=U794eoIO@5@9f;r#2#%V@@f{Exb&NRLf=1&H4g%L?;?#q{H3{Y*
zFwOXTNUDYmKDXj8Cn*n;0cNZGQyzx1-(t>*hH!VY@i0TEaWgEks%HqbZjoIY_Jo$j
z;<CWC8eA5*R{U*ch06lB3_t&%MnLP+f;6v<qsv8bxd<*NI9da?M~+)uK7z|faCyPe
zVh~4;TimS?+^vG6k7ZwO6&x)Hapb=pUm=1k2#z{ITtRTOGQ@GFEY24kV{Df61-H%Q
z$QN27;y7CtSB&6_f}?$6Ek$XMmWepds~ulSaDk^U36A!PxRT(;G>)@lal^oEmKd8Q
zhk@Jd{GG-P1GmLFsd1bSiyM*lf(T(m+IzFcjYxZUY8>ao;zp&tz(S5ndx3=<mG&5e
zaeR~;iz`ce0j@0V1-P=b_ZE%gJX#zd8*6pm3K~~*w+W6Ty-hH8X$<GkV%{Ja#@EEW
z!C>AXn73&R=g(qp7ffJLZx;+Tg>Bs~xVLK@XVc=|7{R?!aDgR$qu}15ah!K6CvO67
zv;3_gw(ut4Haq`_5-P`e`zMW~6f8G-eg|;7OniC=aJ!6ExdZL(HoIjvPl(XMQP|mI
zHO6wPG^3L-v_J7FG)jt&PVNq#T%x=vn)SGxQ$@288s}2cY=p*noJiw5DVmMYI0uU6
znh1^jRWuu+-EC+#LVKs7*$C|(Lvu`h_ZpgwMD8;*8=>8AXf{Hle5=0L2<?!e*)|?9
zG#jBkXlOP<d#D2KVMDVK+9QT$BecVYW+Svm4b4Vqj~SYc(2f|I?epV?X8H1NL$eXu
z6NY9Zv?mSCMrcnNnr-7fhNdDkYQq?4*EyU~uCvA<|JTVs<pn8;E9AKdmVGnst~B24
z&CrgO=BcnZLpxSElmc2ON~^`)DY(G;-YK}Rp#<x^Q*d9`I7+m|y(NNsi{SoQ<K7~;
zZ)hB)-s0XWxWM|pRdC<bxVH-KTN+2ru(-Pf7g*7|1V?#bKkpLUzi1q_hq2?uZM^q3
z;M$!4_cq|#oiA$K+ko5Pd`aV|VYa=uOM8J;e7m&ww8p(%+WS|HqZV4+J0iGu2rjUu
z?-1O-X&mR(;_i;%?iSoEl;9Zd7TmvU9Ou#E-WkEYQ*i4w?wx`=rE%1Fi@Qf~L7aJy
z;4aj-dj$7wjiXhtxO*eGdj)s7#@#Ep?`Rw?#|o^B)fny*Tp(xn2`-Sc`vmtN8h3vL
zcYkDj_Y3a3y3YFr_dSi{9NYHZ73t@@1owT7dzaw;Q{!l*EbdSQcSvx5qH%`=_malZ
zlC8iFtQy|~g1gD&sDHyINB#Q&a@0RsJBxczZ~<Q)jPT__Y43*`M~i524@LU<kl_9c
zB`7Zs3GQW$qn23Q!x7xWk?}n&xc6xsE$a%|^<bTkM8@}s;NFiC>K@1U0gXEx!5tPH
zqe(_2hXqH>VQK4w8bj-B+j>+m0Z$$k%!g2dxJL!|jK<M|Tij!U3v~7|!OhaR$ADY!
z{FTPh%3Iu#2<}J(cSPEIR^#XiEbei^1v>P&;698J?C0Zx`!$WDx3IW(M{w^J+^=ig
zy9M_VjiX1gxF;gGCj=MB<`aVZ4UMDMvA8D<j&ty&;Mm6}1@oI4L$72pPem|K8O&3H
zc}`>KwN_xCP>uOLf(dlsJ%UM@dz6viLXPK|Dnbi(!@}{|nXZGlOx<Uk#t7{w>QhmN
zWA@N!r4`NYp}p78tjDFCt8z9%qeLs3jnF996KRxNMY9naB~;N|6H!r470pIylqE&8
z5gMgN(QJf9*-<pd)JG{%G#iOfRus)fXp{p*vk@BSS<!5S#(Auk??pJK4bG9G*$C}_
z8JdmIe!Bwge;b;O&^~HtHbVOyL$eXu?;4to(0<R*Y=rjvhGzTxF+;O_`M9Ck2<;Pw
zW+SveFf<#X{h^`RHjWvZ-9tMHnw9^fM*gX1M~%)L6`dguY5yso7W1@VXdkJOPYZ^e
zRXOSmc}g5**y7$RIQl%c^<LA~d!?<wu2RM==6!+*?A7}O!`We5?-Ly7g>6v}Ebjdg
z-1{TA_Y015M;vv<;yw_;eL!$QtndNB{h7wGzZUmFgX7pfXvX%z$k?cFE9A_D{r`|)
z0^9Q;!R$o|b&q`fKe{dIq{TfWxIYJtZ9QY!dPds1OJk_B7W1rN=;v7bvx1>VQ8~8t
zIgO)UUt{|CVc=GqleG^6x7s{U_F=Slt@D3Tp0Pf4e~tXBl05Z3oTtu*^VIinp1K~+
zQ_sVB>UfanxG9@!q$SE;aECpi^o8@3mvEjE6V6i-!g*?NI8W^j=c&2jJhe8Qr^bf!
z)YfpGni|eiOM^T;P0bWO0Zs1_co2I%8&7B1fBdFF<$v@v$5F4gl*ZFMZ)n!jeA3XY
z<@l7LSx@t6L$jXdGZkomWN6mY{IQ|A#%jM{Xx7s(Hc)-Gp5{*t&3c;88k+6%2}83M
zmD;N6vz~@prD)dEP@5FZdKzkkqS-bm?TThS&0iRr^)!E3fyPKqVXde6yrEf7^Vf!E
zJ<S&k&3c+I8k+SqUote?=NApl^5x5hW<AYU49$9)zcDoHY5vyGY#V=PXv))2508Tu
z$Tjnny>Om#7tT}W!g<PDI8Rv%=P765JY_7Lr+kI;l&x@{auv=~rows3Q#emq3g;<D
z;XGw1oTvPR^OT)%o^lh;Q)a?>%1by;SqbMUC*eG0B%G&wg!7b*aGr7z&Qm7BdCEgL
zPgw}(DF@*^Wgy7Q6M&Ow$12;WS4yQFJBfO=9vVCL_l9Qe*gqJWmCJuLG;7EH$<VAF
z`)UQ+*9^_tv9BANYwXxR8=AFa-!L?5$G&N3){cG4&}^Uo#n7z2{;Q!`JN9peX6@L&
z8=AFarwq-u@ohu1cI-QbX6@L2RG@v=(5xN%o}pPg_I*RMcI-b5&Dyb-49(iH9~heL
z^A8Qp^5wq_&Dyb-4b9rI9~qjpV?Q=D+s3$|Svz(Tv_P(D$0&Q@JmoH&r_6=(l(%r6
zvKG!$&cb=hSU6Al3g;<X;XLIkoTp5M^OUD>p0X6qQ;x!U%1}5@`3dJKJK;R#CY-0t
zg!7b_aGtUf&QngpdCEvQPx%PvDI4KD<szJ?Ooa24hj5;<5YAH$!g<O-kpFqwF@(~m
z*|C_RSvyu`XjU$(4b9py{#^lO7p)zeT!A*l(5xMsYG|&pW77=H+Og?|X6@JvL$h{l
zrlHwBpJQlNU(Yo(YsY38nzdu+8Je|YvklF*F~`uX9h+-t){dQDfi}<3tR1_+(5xN1
z(9o<MyU5V29pkgfYCfzTn{Q~g&zBgQ<;$gpX6@KzhGy;9<%VYM*aAbdZCqh!){asB
z0=cFgqwIzAl)G@AG8fKM-okmxS~yQR3+E|g;XLIloTqGs^OUP_o-!5AQ=Y<k%2GH_
zISS_~L*YE-C!DA3g!7b}aGo*~&Qo5(dCE#SPdN$aDI?)L<s+P@Y=rZai*TMY5zbQ{
z!g<O<I8QkU=P3h0{^x1OT+}-;9$RQ=){fN}nw85%hGy;9Vnefb?8*wXC5C40*iu7t
zjUB5sG;7D|49(iHs|-!qG3o@z7VwODjw_t!n8JCEC!FV4!g-D(oaY$AdG<e?XWzql
z_B)(spM(4taFTj2@-I}}16Wrz*ZG|(2jI)vkb>6>)22HwOv7{UBwUW4r>TAfbBUcK
zoR;9K`m1>AVT$}x9Orlp|M;n1)%XtS6_VeK=O0LaoqPkM8aqm_y()76(}O)T#luPX
zoQ(8i{PNROd~yz^>-<dp$zL3M?e(j}fgdB7tfpg|@FL~pUGMqM4|WXB{<R0DIae?J
z?JqTfM*W|KPu?FzqU?+zQ3vluq6|HTL^*mt(iEiUkSI@|K%z{&fJC|aCnRbn<9Esz
z<KT0U=!eh6C;i#^_@vEaJF}NMa`w%(ol}oBEx^amay=J#{M2HBKgZaKpMx!=@e{w!
z(QmR|j;DFK)6|B~v9{K4x8hT-Fa5#SKJdLy#pL?CO}BS{OI<q`9e<$n`=5#hwg|;v
z(d%sduloF-eg28~rL!M;6#TFM!EccN<Ry8?ac@Do2&o&1{M&;x8)-ih`8J4jAyOWR
z{N#L*zxN;o{A7P)v6+I$<rzO0pklVkdF&bVykfI+4oIwq>|*mb@`p}&%NOp+%z%9M
zR}9vs3N=%sTz-)cujBIWY`O%WUFl>Y=jZy1?neB`ZpJl@h?QEkY3l|`quh&2zv8PD
z`nC60!~uS9hrh9!$9FUO<ay**1T%%^LIlFr(A+r71$CaX`ty3d+V+25?FRMyGPnQ#
E0QNBs1ONa4

diff --git a/docs/DeveloperHowTo.html b/docs/DeveloperHowTo.html
deleted file mode 100644
index 209b85738..000000000
--- a/docs/DeveloperHowTo.html
+++ /dev/null
@@ -1,6 +0,0 @@
-<html>
-
-<head>
-<meta http-equiv="refresh" content="0; url=http://github.com/UV-CDAT/uvcdat/wiki/Development"/>
-</head>
-</html>
diff --git a/docs/HELP.html b/docs/HELP.html
deleted file mode 100644
index 7430ec110..000000000
--- a/docs/HELP.html
+++ /dev/null
@@ -1,6 +0,0 @@
-<html>
-
-<head>
-<meta http-equiv="refresh" content="0; url=http://github.com/UV-CDAT/uvcdat/wiki/Building-UVCDAT"/>
-</head>
-</html>
diff --git a/docs/README.txt b/docs/README.txt
deleted file mode 100644
index 99bf6afef..000000000
--- a/docs/README.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-The documentation of UV-CDAT has moved to github. Please visit
-this page for the latest documentation:  https://github.com/UV-CDAT/uvcdat/wiki
diff --git a/docs/ReleaseNotes.txt b/docs/ReleaseNotes.txt
deleted file mode 100644
index ca81f7443..000000000
--- a/docs/ReleaseNotes.txt
+++ /dev/null
@@ -1,226 +0,0 @@
-********************* Changes in the next release  ****
-********************* Release 4.3 *************************************
-New build system, CDAT is now installed under CDAT/VERSION/bin 
-New Fonts system: most ttf fonts work now!
-New direct postscript output
-Hatch/pattern output working (see known bug though)
-Improved VCDAT
-New Package: Thermo
-EZTemplate
-genutil.filters
-going away from pyfort, using f2py instead
-pydebug included
-mpeg output
-generalized grid support
-improved web documentation
-
-!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!
-There is a known bug in cairo (the postscript rendering library) that will
-cause the output to be wrong if you are using both
-pattern/hatches and dashed lines
-
-
-********************* Release 4.0 *************************************
-
-----------------------------
-MAJOR CHANGES TO THE RELEASE
-----------------------------
-
-*- Support for Generalized Grids, regridding possible using the SCRIP regridder
-(source code provided but not installed)
-
-*- Template Editor, let you resize, drag, drop object on your VCS Canvas
-
-*- Major Overhaull of the VCDAT GUI, with template and graphic methods
-editors. Various mode of use (variable browsing, computation mode, graphics
-mode). 
-
-*- Page Layout Editor available in VCDAT
-
-*- More projection available (GCTP package), total of 28 new types of
-projection, each with full parameters control
-
-*- Improved install process
-
-*- New packages: filter, vertical interpolation, IaGraph, f2py, R/Rpy, VTK
-(Linux only, and not supported), thermo (thermodynamic diagrams)
-
-*- Full list of Changes available in files Changes_3.3_to_4.* (Open Office, csv and
-pdf formats)
-
--------------------------
-KNOWN BUGS
--------------------------
-
-*- There is apparently a bug on the Mac X server, this forces the user to move
-the VCS canvas window in order for the threads to start. If not the 
-application might hang.
-
-*- Beta version does not have support for inline scripting from VCDAT, final
-version and some future beta wiull have full support.
-
-
-********************* Release 3.3 *************************************
-01. vcs plotting now as at-plotting-time overwritting capabilities, i.e, you can change a graphic method or template attribute as you plot by passing it to the plot command
-02. vcs text function let you know draw string on the fly, also accepts colors argument passed a colornames
-03 vcs canvas object have a new function "match_color" which allows the user to get the color in the colormap (in use or passed) that is closest from its desirted color
-04. rank function now available in genutil.statstics
-
-********************* Release 3.3 (10/01/2002)  ************************
-01. Meshfill support is now complete (for graphical output)
-       all is done at the C level, projection are authorized as well as
-       axes transformations. MUCH faster than the old python based support
-02. CompasrisonsStatistics contrib package added (PCMDI's compall)
-03. VariablesMatcher, VariableConditioner objects added, allows easy regrid/masking 
-       of datasets for comparison, module: cdutil
-04. Build changes have removed the need for users to set LD_LIBRARY_PATH.
-05. Now available on AIX.
-06. genutil.statistics now has percentiles and median functions
-07. genutil now has grower function (add dimensions to MV so they have matching shapes)
-08. genutil.statistics: fixed a bug when input datasets had different masks 
-       (now uses the union of both masks)
-09. pyclimate package added to the contrib, which is a Python package designed
-       to accomplish some usual tasks during the analysis of climate variability
-       using Python. It provides functions to perform some simple IO operations,
-       operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA
-       analysis of coupled data sets, some linear digital filters, kernel based
-       probabilitydensity function estimation and access to DCDFLIB.C library from
-       Python.
-10. Fixed the Read Script File file browser dialog "File of type". The
-       text said "Search for netCDF" and "Search for GrADS". This has
-       been replaced with "Search fo VCS file" and "Search for Python files".
-11. Sync'ed the continents to work with the Page Editor. In the process,
-       I fixed the canvas update to work with threads
-12. Ran Insure++ on code to clean up memory leaks in XGKS and VCS.
-13. Fixed "open" bug. That is, fixed bug for when x.open() called. It no
-       longer exists out of python from the command line.
-14. Can now print a VCS Canvas plot, produced in background mode,
-       to an output file (i.e., postscript, cgm, gif, or eps.).
-15. Reordered the include files to find the VCS include files first.
-       This solves the problem of VCS not compiling on Solaris 2.8
-       platforms. Solaris 2.8 platforms have a project.h file located in
-       the /usr/lib directory. project.h is also an include file for VCS.
-       In any case, forcing the VCS include files to be seen first, solves
-       this problem and should have no effect on other platforms.
-16. Bug fixes for the DEC platform. The low-level primative attribute settings
-       now work for the DEC platform.
-17. Fixed the legend label to be positioned at the bottom right corner of
-       the plot. It will also move with the legend when the legend is
-       altered by the user.
-18. Now the user does not have to specify the DISPLAY variable in order to run
-       VCS or VCDAT. That is the user can now run the VCS module from the
-       command-line, script, background mode, or from VCDAT without specifying
-       the environment variable DISPLAY. That is, XOpenDisplay is now set
-       to ":0.0" or "localhost:0.0" for VCS and for Tkinter.
-19. This function displays graphics segments, which are currently stored in
-       the frame buffer, on the VCS Canvas. That is, if the plot function
-       was called with the option bg = 1 (i.e., background mode), then the
-       plot is produced in the frame buffer and not visible to the user.
-       In order to view  the graphics segments, this function will copy
-       the contents of the frame buffer to the VCS Canvas, where the
-       graphics can be viewed by the user.
-20. Added outfill and outline to the browser interface. They are now part of the
-       graphics method pulldown selection. Also add a graphics method attribute
-       settings interface for the two graphics method so the user can
-       change their attributes.
-21. Added the capability for low-level primatives to accept Numeric arrays for
-       their list of x and y axis point values.
-22. Bug fix for generating more than 140 plots. That is, commented out
-        Py_DECREF(dict);
----
-       This DECREF causes memory problems for Python if the
-       user chooses to generate more than 140 plots in a Python loop.
-       That is, needed memory is removed then overwritten, which causes
-       Python to give the undefined global name error for "range", "len",
-       or some other globally defined Python key word.
-23. vcdat_teaching_script_file.py now work with appropriate graphics methods.
-       That is, you can now run "python vcdat_teaching_script_file.py" to replay
-       what was done in the VCDAT session.
-
-       Note: vcdat_recording_script_file.py worked because it did not rely on the
-               ASD.scr file.
-24. Show the index values for 1D axis variables
-25. Updated the "Alter Plot" GUI to reflect the minor ticks settings and the
-       axis type (i.e., Linear or Log10). Currently, the user can only view the
-       axis representation. Later when time permits, I will change the "View"
-       text input window to an "Edit" text input window. This will let the user
-       specify (via a Python dictionary) the location of the axis label and the
-       label text string. For example, { -90: "south pole", 0: "equator",
-       90: "north pole"}.
-26. Modified the VCDAT module to track user directory, file, and variable requests.
-       It also logs the user's wall clock and cpu time. Examples: "vcdat -u",
-       "vcdat -h".
-27. Updated the VCS initial.attributes file to Mike Wehner's initial.attibutes file.
-28. The Boxfill legend labels can be set with a list or a dictionary. For
-       example: [0, 10, 20, 30, 40] or {40:'some text', 50:'some more text'}.
-29. boxfill.legend={.5:'hi'} will now work if the legend is vertical
-       or horizontal.
-30. Colormap bug fix. The default colormap can now be changed,
-       then saved as a new colormap.
-31. VCDAT option menu: -h, print out help menu; -n, turn splash screen off, -u, turn
-       user tracking mode on. See item 17.
-32. Corrected the legend when plotting log based boxfill plots.
-33. Updated the X and Y axes, so that the user can modify and
-       change the axes values.
-34. The graphics method's data world coordinates (i.e., datawc_x1, datawc_x2,
-       datawc_y1, and datawc_y2) will override the incoming variable's coordinates.
-35. VCDAT has a new icon to bookmark files, works just like the directories
-36. Beta DODS support on Linux, build using --dods=/path/to/DODS/directory
-37. gplot now builds automatically
-38. various utilies necessary to produce GIF output are also now provided as part
-       of the install process.
-39. VCS template object now have scale, reset, move and moveto associated function
-40. added cddump utility (ncdump like utility that works on any type of file that
-       cdms can open) 
-41. VCDAT has new icon functions: grower, mask, getmask, less, greater, equal, not, standard deviation
-
-********************* Release 3.2 (4/15/2002)  ************************
-1. Revised build system allows for installation into an existing Python.
-   Please see README.txt to learn the new installation system.
-   (Paul Dubois). DRS and QL support are not built by default, unless you
-   use the --PCMDI option.
-2. In genutil.statistics, the linearegression now can return std error
-   and probabilities for regression constant (in addition of the regression coef).
-   Power users that used this capability should be aware that the syntax slightly
-   changed, see the doc string. (Charles Doutriaux)
-3. Contributed package shgrid, cssgrid, dsgrid  added (Clyde Dease)
-   See "INTRODUCTION TO NGMATH" below for descriptions.
-   After installation, go to the Test subdirectory of each and run python shgridtest.py;
-   a documentation file will be produced as a byproduct.
-4. Updated averager so it now takes integer for single axis operation.
-   Updated averager so the weights options are now "unweighted" and "weighted" instead of 
-   "equal" and "generate"; old options still work.
-   Updated averager so the weights keyword now works.
-   Updated the doc strings of averager so they reflect the previous 2 changes.
-   Updated genutil.statistics so weights options are now compatible with cdutil.averager.
-
-5. Solution to the high CPU usage problem when displaying a VCS Canvas. 
-   Modification to the VCS Canvas event mainloop was done to avoid high CPU 
-   usage while the VCS Canvas sat idle. This modification has no other 
-   effects on the graphics or the VCS Canvas behavior.
-
-6. Extensive improvements to the VCDAT browser. These are detailed at:
-   http://esg.llnl.gov/cdat/GUI_Modifications.html
-
-   INTRODUCTION TO NGMATH
-
-        The ngmath library is a collection of interpolators and approximators
-for one-dimensional, two-dimensional and three-dimensional data. The packages, 
-which were obtained from NCAR, are:
-
-  natgrid -- a two-dimensional random data interpolation package based on 
-              Dave Watson's nngridr. NOT built by default in CDAT due to 
-             compile problems on some platforms. Works on linux.
-
-  dsgrid --  a three-dimensional random data interpolator based on a
-             simple inverse distance weighting algorithm.
-
-  fitgrid -- an interpolation package for one-dimensional and two-dimensional 
-             gridded data based on Alan Cline's Fitpack. Fitpack uses splines 
-             under tension to interpolate in one and two dimensions.  
-             NOT IN CDAT.
-
-  csagrid -- an approximation package for one-dimensional, two-dimensional and 
-             three-dimensional random data based on David Fulker's Splpack. 
-             csagrid uses cubic splines to calculate its approximation function.
-
-- 
GitLab


From c3ffa3a3423fe8dfab5cce289ca82712217add08 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 15:23:36 -0400
Subject: [PATCH 051/196] Initial VCS documentation

---
 docs/index.rst      |  60 ++++++++++++++++++++++++++
 docs/user-guide.rst | 103 ++++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 163 insertions(+)
 create mode 100644 docs/index.rst
 create mode 100644 docs/user-guide.rst

diff --git a/docs/index.rst b/docs/index.rst
new file mode 100644
index 000000000..99c91e62a
--- /dev/null
+++ b/docs/index.rst
@@ -0,0 +1,60 @@
+VCS: Visualization Control System
+==================================
+
+What is VCS?
+---------------
+
+The PCMDI Visualization Control System (VCS) is expressly designed to meet the needs of scientific community. VCS allows wide-ranging changes to be made to the data display, provides for hardcopy output, and includes a means for recovery of a previous display.
+
+In the VCS model, the data display is defined by a trio of named object sets, designated the “primary objects” (or “primary elements”). These include:
+
+* **Data Ingestion**: The data, which drives the visualization is ingested into the system via cdms2 or numeric modules such as numpy;.
+
+* **Graphics Method**: The graphics method, which specifies the display technique.
+
+* **Template**: The picture template, which determines the appearance of each segment of the display. Tables for manipulating these primary objects are stored in VCS for later recall and possible use.
+
+In addition, detailed specification of the primary objects’ attributes is provided by eight “secondary objects” (or secondary elements”):
+
+* **colormap**: Specification of combinations of 256 available colors
+* **fill area**: Style, style index, and color index
+* **format**: Specifications for converting numbers to display strings
+* **line**: Line type, width and color index
+* **list**: A sequence of pairs of numerical and character values
+* **marker**: Marker type, size, and color index
+* **text**: Text font type, character spacing, expansion and color index
+* **text orientation**: Character height, angle, path, and horizontal/vertical alignment
+
+By combining primary and secondary objects in various ways (either at the command line or in a program), the VCS user can comprehensively diagnose and intercompare climate model simulations. VCS provides capabilities to:
+
+- View, select and modify attributes of data variables and of their dimensions
+- Create and modify existing template attributes and graphics methods
+- Save the state-of-the-system as a script to be run interactively or in a program
+- Save a display as a Computer Graphics Metafile (CGM), GIF, Postscript, Sun Raster, or Encapsulated Postscript file
+- Perform grid transformations and compute new data variables
+- Create and modify color maps
+- Zoom into a specified portion of a display
+- Change the orientation (portrait vs. landscape) or size (partial vs. full-screen) of a display
+- Animate a single data variable or more than one data variable simultaneously
+- Display data in various geospatial projections
+
+For an overview of the concepts present in VCS, we recommend checking out the :doc:`user-guide`.
+
+VCS is published under the Apache 2.0 License. Its source code can be found at
+https://github.com/UV-CDAT/uvcdat/Packages/vcs
+
+Table of contents
+-----------------
+.. toctree::
+   :maxdepth: 2
+
+   admin-docs
+   user-docs
+   developer-docs
+   plugins
+
+API index
+---------
+
+* :ref:`genindex`
+* :ref:`modindex`VCS
\ No newline at end of file
diff --git a/docs/user-guide.rst b/docs/user-guide.rst
new file mode 100644
index 000000000..77f300f32
--- /dev/null
+++ b/docs/user-guide.rst
@@ -0,0 +1,103 @@
+User Guide
+**********
+
+Document Conventions
+====================
+
+This User Guide is written for end-users of vcs, rather than developers. If you
+have suggestions or questions about this documentation, feel free to contact us
+`on UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_,
+`the mailing list <>`_.
+
+vcs specific entities will be ``formatted like this``.
+
+.. _concepts:
+
+Concepts
+========
+
+The VCS module can accept data from the CDMS module, the CU module, or the Numeric module. For use on
+how to use either of the mentioned modules, see their respective documentation. For examples on the
+direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts.
+
+VCS Model
+---------
+
+The VCS model is defined by a trio of named attribute sets, designated the “Primary Objects” (also known as “Primary Elements”).
+These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or Numeric modules;
+the graphics method, which specifies the display technique; and the picture template, which determines the appearance of
+each segment of the display.
+
+VCS Primary Objects (or Primary Elements)
+-----------------------------------------
+
+A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below.
+
+**Graphics Method Objects**
+
+A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
+
+* **boxfillobject** - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
+* **continentsobject** - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
+* **isofillobject** - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
+* **isolineobject** - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is “Gi”.
+* **outfillobject** - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
+* **outlineobject** - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
+* **scatterobject** - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
+* **vectorobject** - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
+* **xvsyobject** - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
+* **xyvsyobject** - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
+* **Yxvsxobject** - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
+* **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
+* **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
+
+**Picture Template Object**
+
+A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows:
+
+* **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
+
+**Data Object**
+
+Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 and Numeric module documentation for data extraction, creation and manipulation.
+
+VCS Secondary Objects (or Secondary Elements)
+---------------------------------------------
+A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objects’ attributes. Currently, there are five secondary objects with more to follow.
+
+**Colormap Object**
+
+The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
+
+*colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
+Note: VCS colormaps are objects, but they are not referenced like other secondary objects.
+
+**Fillarea Object**
+
+The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows:
+
+*fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
+Line Object
+
+**Line Object**
+
+The line object allows the editing of line type, width, and color index. The description of the line object is as follows:
+
+*lineobject* - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
+Marker Object
+
+The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows:
+
+**Marker Object**
+
+*markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
+
+**Text Objects**
+
+Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows:
+
+*textcombinedobject* - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
+
+*textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
+
+*texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
-- 
GitLab


From eb6017fdb3dc17f4a5287f4c9f45f9bc2eaa66aa Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 21:28:46 -0400
Subject: [PATCH 052/196] Added vcs plotting section

---
 docs/user-guide.rst | 72 +++++++++++++++++++++++++++++++++++++++++----
 1 file changed, 67 insertions(+), 5 deletions(-)

diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index 77f300f32..acf80e490 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -65,21 +65,24 @@ VCS Secondary Objects (or Secondary Elements)
 ---------------------------------------------
 A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objects’ attributes. Currently, there are five secondary objects with more to follow.
 
-**Colormap Object**
+Colormap Object
+^^^^^^^^^^^^^^^
 
 The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
 
 *colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
 Note: VCS colormaps are objects, but they are not referenced like other secondary objects.
 
-**Fillarea Object**
+Fillarea Object
+^^^^^^^^^^^^^^^
 
 The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows:
 
 *fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
 Line Object
 
-**Line Object**
+Line Object
+^^^^^^^^^^^
 
 The line object allows the editing of line type, width, and color index. The description of the line object is as follows:
 
@@ -88,11 +91,13 @@ Marker Object
 
 The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows:
 
-**Marker Object**
+Marker Object
+^^^^^^^^^^^^^
 
 *markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
 
-**Text Objects**
+Text Objects
+^^^^^^^^^^^^
 
 Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows:
 
@@ -101,3 +106,60 @@ Graphical displays often contain textual inscriptions, which provide further inf
 *textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
 
 *texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
+
+
+Getting Started with VCS
+------------------------
+
+Import VCS
+^^^^^^^^^^
+
+In Python, before one can start using a module they must first load it.
+To load the VCS module, like all other Python modules, either type:
+
+``from vcs import``
+
+or
+
+``import vcs``
+
+If you use ``import vcs``, then you must prepend "vcs" to certain calls
+(e.g., ``vcs.help()``). If you use ``from vcs import *``, then you must
+be aware of possible name clashes. That is, if two packages are imported
+using the form ``from name import *`` and both have a "help" function,
+then Python doesn't know which ``help`` function to call. For such
+cases, and indeed as an unspoken rule, it is best to use "import name"
+to avoid name clashing between packages.
+
+Create Canvas Object
+^^^^^^^^^^^^^^^^^^^^
+
+To construct a VCS Canvas object type the following:
+
+``a = vcs.init()``
+
+There can only be at most 8 VCS Canvas objects initialized at any given
+time.
+
+Plotting in VCS
+^^^^^^^^^^^^^^^
+There are several different ways to display data on the VCS Canvas. The
+most basic way is to use the plot() function. The simple plot() function
+command: plot(array1,[array2], [template object], [graphics\_method
+object]). The examples below are showing how to plot a simple array
+using default values for everything else.::
+
+    # Import vcs and io (cdms) modules
+    import vcs
+    import cdms2
+
+    # Open sample NetCDF data file
+    data = cdms2.open('clt.nc')
+
+    # Initialize vcs and then plot the variable
+    canvas = vcs.init()
+    clt = data['clt']
+    canvas.plot(clt)
+
+    # Close the canvas context
+    canvas.close()
\ No newline at end of file
-- 
GitLab


From 5543360ec377d82cde87172c3c6882f472dff238 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 21:39:43 -0400
Subject: [PATCH 053/196] Fixed link to user-guide

---
 docs/index.rst | 4 +---
 1 file changed, 1 insertion(+), 3 deletions(-)

diff --git a/docs/index.rst b/docs/index.rst
index 99c91e62a..0bb30b293 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -48,10 +48,8 @@ Table of contents
 .. toctree::
    :maxdepth: 2
 
-   admin-docs
-   user-docs
+   user-guide
    developer-docs
-   plugins
 
 API index
 ---------
-- 
GitLab


From e8300df1a48e3b870bbe7ff96fb4adcc3b2cfeb2 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 21:46:12 -0400
Subject: [PATCH 054/196] Added vcs plotting example

---
 docs/user-guide.rst | 11 +++++++----
 1 file changed, 7 insertions(+), 4 deletions(-)

diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index acf80e490..e04e8e9e3 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -6,8 +6,8 @@ Document Conventions
 
 This User Guide is written for end-users of vcs, rather than developers. If you
 have suggestions or questions about this documentation, feel free to contact us
-`on UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_,
-`the mailing list <>`_.
+on `UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_,
+`mailing list <uvcdat-users@lists.llnl.gov>`_.
 
 vcs specific entities will be ``formatted like this``.
 
@@ -33,7 +33,8 @@ VCS Primary Objects (or Primary Elements)
 
 A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below.
 
-**Graphics Method Objects**
+Graphics Method Objects
+^^^^^^^^^^^^^^^^^^^^^^^
 
 A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
 
@@ -147,7 +148,9 @@ There are several different ways to display data on the VCS Canvas. The
 most basic way is to use the plot() function. The simple plot() function
 command: plot(array1,[array2], [template object], [graphics\_method
 object]). The examples below are showing how to plot a simple array
-using default values for everything else.::
+using default values for everything else.
+
+.. code-block:: python
 
     # Import vcs and io (cdms) modules
     import vcs
-- 
GitLab


From 0a2c266c6f97f8b75ba572df4e9ebed837fd042a Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 21:52:07 -0400
Subject: [PATCH 055/196] Removed references to Numeric module

---
 docs/user-guide.rst | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index e04e8e9e3..7e2c74437 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -16,7 +16,7 @@ vcs specific entities will be ``formatted like this``.
 Concepts
 ========
 
-The VCS module can accept data from the CDMS module, the CU module, or the Numeric module. For use on
+The VCS module can accept data from the CDMS module or can use the numpy array. For use on
 how to use either of the mentioned modules, see their respective documentation. For examples on the
 direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts.
 
@@ -24,7 +24,7 @@ VCS Model
 ---------
 
 The VCS model is defined by a trio of named attribute sets, designated the “Primary Objects” (also known as “Primary Elements”).
-These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or Numeric modules;
+These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or numpy array;
 the graphics method, which specifies the display technique; and the picture template, which determines the appearance of
 each segment of the display.
 
@@ -60,7 +60,7 @@ A picture template determines the location of each picture segment, the space to
 
 **Data Object**
 
-Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 and Numeric module documentation for data extraction, creation and manipulation.
+Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation.
 
 VCS Secondary Objects (or Secondary Elements)
 ---------------------------------------------
-- 
GitLab


From 58eba47ec284ab8638f5ebeadf0935ded32225e3 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 13 May 2016 23:59:55 -0400
Subject: [PATCH 056/196] Added some more examples for plotting

---
 docs/user-guide.rst | 69 ++++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 68 insertions(+), 1 deletion(-)

diff --git a/docs/user-guide.rst b/docs/user-guide.rst
index 7e2c74437..d894308a1 100644
--- a/docs/user-guide.rst
+++ b/docs/user-guide.rst
@@ -13,6 +13,17 @@ vcs specific entities will be ``formatted like this``.
 
 .. _concepts:
 
+Installation
+============
+While there are many ways a user can install vcs, installation using conda is
+preferred for the end user. Currently, to install vcs, you need to install entire uvcdat
+pacakge. ::
+
+    conda install -c uvcdat
+
+It is assumed that conda is installed on user's system and is available on the shell.
+
+
 Concepts
 ========
 
@@ -165,4 +176,60 @@ using default values for everything else.
     canvas.plot(clt)
 
     # Close the canvas context
-    canvas.close()
\ No newline at end of file
+    canvas.close()
+
+As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data.
+
+.. code-block:: python
+
+    # Import necessary modules
+    import vcs
+    import cdms2
+    import numpy
+
+    # Manually create data
+    data = numpy.sin(numpy.arrange(100))
+
+    # Reshape to make it useful for vcs
+    data = numpy.reshape(data, (10, 10))
+
+    # Initialize vcs and then plot the data
+    canvas = vcs.init()
+    canvas.plot(data)
+
+    # Close the canvas context
+    canvas.close()
+
+It should be noted that plot can take multiple arguments. For example, plot can take ``bg=1`` option to draw visualization in the background. Below is the plotting climate data example with few new options to plot method.
+
+.. code-block:: python
+
+    # Import vcs and io (cdms) modules
+    import vcs
+    import cdms2
+
+    # Open sample NetCDF data file
+    data = cdms2.open('clt.nc')
+
+    # Initialize vcs and then plot the variable
+    canvas = vcs.init()
+
+    # Create isofill graphics method
+    iso = canvas.createisofill()
+
+    # Create default template
+    template = canvas.createtemplate()
+
+    # Scale visualization by factor of 2
+    template.scale(2)
+
+    clt = data['clt']
+
+    # Plot isofill with continents outline and default template
+    canvas.plot(template, iso, clt, continents=1)
+
+    # Close the canvas context
+    canvas.close()
+
+
+.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. Note that the order of the objects is not restrictive, just as long as they are before any keyword argument.
\ No newline at end of file
-- 
GitLab


From 11990623d5935de7a1e6bd426e63b33f26899117 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 14 May 2016 00:35:59 -0400
Subject: [PATCH 057/196] Moved vcs documentation to vcs directory

---
 {docs => Packages/vcs/docs}/index.rst      | 0
 {docs => Packages/vcs/docs}/user-guide.rst | 6 +++---
 2 files changed, 3 insertions(+), 3 deletions(-)
 rename {docs => Packages/vcs/docs}/index.rst (100%)
 rename {docs => Packages/vcs/docs}/user-guide.rst (99%)

diff --git a/docs/index.rst b/Packages/vcs/docs/index.rst
similarity index 100%
rename from docs/index.rst
rename to Packages/vcs/docs/index.rst
diff --git a/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
similarity index 99%
rename from docs/user-guide.rst
rename to Packages/vcs/docs/user-guide.rst
index d894308a1..476295e3c 100644
--- a/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -220,12 +220,12 @@ It should be noted that plot can take multiple arguments. For example, plot can
     # Create default template
     template = canvas.createtemplate()
 
-    # Scale visualization by factor of 2
-    template.scale(2)
+    # Scale down visualization space
+    template.scale(0.8)
 
     clt = data['clt']
 
-    # Plot isofill with continents outline and default template
+    # Plot isofill with continents outline and custom template
     canvas.plot(template, iso, clt, continents=1)
 
     # Close the canvas context
-- 
GitLab


From 4b51eff646d7c12a37158a724ac59ef332c3e365 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 14 May 2016 07:51:59 -0400
Subject: [PATCH 058/196] Added list of create and get methods

---
 Packages/vcs/docs/user-guide.rst | 191 ++++++++++++++++++++++++++++++-
 1 file changed, 190 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 476295e3c..de96d5e7f 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -232,4 +232,193 @@ It should be noted that plot can take multiple arguments. For example, plot can
     canvas.close()
 
 
-.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. Note that the order of the objects is not restrictive, just as long as they are before any keyword argument.
\ No newline at end of file
+.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. The order of the arguments is not restrictive, just as long as they are before any keyword argument.
+
+
+Other Plotting functions in VCS
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+There are other ways to plot data in VCS. These additional plotting
+routines utilizes the same parameter format as the plot() function. What
+makes these plotting functions unique are their direct association with
+the graphics methods. That is, each graphics method has its own plot
+function. For example, if the user wishes to plot data using the isofill
+graphics method, then the function isofill() can be used instead of the
+plot() function. If the isofill object is not specified then the default
+isofill graphics method will be used. The user can also pass down the
+name of the graphics method to be used. In some ways, the graphics
+method plot functions can be thought of as short cuts to plotting data.
+
+Note, if a different graphics method object is specified and passed down
+to one of these alternate plot functions, then the alternate plot
+function will behave as the plot() function and plot the data in the
+specified graphics method format.
+
+See table below for additional plot functions.
+
++--------------------+--------------------------------------------------+
+| Plot Function      | Description                                      |
++====================+==================================================+
+| ``boxfill()``      | plot data using the boxfill graphics method      |
++--------------------+--------------------------------------------------+
+| ``continents()``   | plot to the screen continental graphics method   |
++--------------------+--------------------------------------------------+
+| ``isofill()``      | plot data using the isofill graphics method      |
++--------------------+--------------------------------------------------+
+| ``isoline()``      | plot data using the isoline graphics method      |
++--------------------+--------------------------------------------------+
+| ``outfill()``      | plot data using the outfill graphics method      |
++--------------------+--------------------------------------------------+
+| ``outline()``      | plot data using the outline graphics method      |
++--------------------+--------------------------------------------------+
+| ``scatter()``      | plot data using the scatter graphics method      |
++--------------------+--------------------------------------------------+
+| ``vector()``       | plot data using the vector graphics method       |
++--------------------+--------------------------------------------------+
+| ``xvsy()``         | plot data using the xvsy graphics method         |
++--------------------+--------------------------------------------------+
+| ``xyvsy()``        | plot data using the xyvsy graphics method        |
++--------------------+--------------------------------------------------+
+| ``yxvsy()``        | plot data using the yxvsy graphics method        |
++--------------------+--------------------------------------------------+
+| ``scalar3D()``     | plot data using the 3d\_scalar graphics method   |
++--------------------+--------------------------------------------------+
+| ``vector3D()``     | plot data using the 3d\_vector graphics method   |
++--------------------+--------------------------------------------------+
+
+
+Creating VCS Objects
+^^^^^^^^^^^^^^^^^^^^
+
+The create functions enables the user to create VCS objects which can be
+modified directly to produce the desired results. Since the VCS
+"default" objects do allow modifications, it is best to either create a
+new VCS object or get an existing one. When a VCS object is created, it
+is stored in an internal table for later use and/or recall.
+
+Create the following VCS objects:
+
++-------------------------------+---------------------------------------------------+
+| Create Function               | Description                                       |
++===============================+===================================================+
+| ``createboxfill()``           | creates a new boxfill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createcontinents()``        | creates a new continents graphics method object   |
++-------------------------------+---------------------------------------------------+
+| ``createfillarea()``          | creates a new fillarea secondary object           |
++-------------------------------+---------------------------------------------------+
+| ``createisofill()``           | creates a new isofill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createisoline()``           | creates a new isoline graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createline()``              | creates a new line secondary object               |
++-------------------------------+---------------------------------------------------+
+| ``createmarker()``            | creates a new marker secondary object             |
++-------------------------------+---------------------------------------------------+
+| ``createoutfill()``           | creates a new outfill graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createoutline()``           | creates a new outline graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createscatter()``           | creates a new scatter graphics method object      |
++-------------------------------+---------------------------------------------------+
+| ``createtextcombined()``      | creates a new text-combined secondary object      |
++-------------------------------+---------------------------------------------------+
+| ``createtextorientation()``   | creates a new text-orientation secondary object   |
++-------------------------------+---------------------------------------------------+
+| ``createtexttable()``         | creates a new text-table secondary object         |
++-------------------------------+---------------------------------------------------+
+| ``createvector()``            | creates a new vector graphics method object       |
++-------------------------------+---------------------------------------------------+
+| ``createxvsy()``              | creates a new xvsy graphics method object         |
++-------------------------------+---------------------------------------------------+
+| ``createxyvsy()``             | creates a new xyvsy graphics method object        |
++-------------------------------+---------------------------------------------------+
+| ``createyxvsx()``             | creates a new xyvsy graphics method object        |
++-------------------------------+---------------------------------------------------+
+| ``create3d_scalar()``         | creates a new 3d\_scalar graphics method object   |
++-------------------------------+---------------------------------------------------+
+| ``create3d_vector()``         | creates a new 3d\_vector graphics method object   |
++-------------------------------+---------------------------------------------------+
+
+
+Get Existing VCS Objects
+''''''''''''''''''''''''
+
+The get functions are used to obtain VCS objects that exist in the
+object memory tables. The get function directly manipulates the object's
+attributes in memory. If the object is used to display data on a plot
+and is manipulated by the user, then the plot will be automatically
+updated.
+
+Get the following VCS objects:
+
++----------------------------+--------------------------------------------------------------------------------------+
+| Get Function               | Description                                                                          |
++============================+======================================================================================+
+| ``getboxfill()``           | get specified boxfill graphics method and create boxfill object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getcontinents()``        | get specified continents graphics method and create continents object                |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getfillarea()``          | get specified fillarea secondary object and create fillarea object                   |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getisofill()``           | get specified isofill graphics method and create fillarea object                     |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getisoline()``           | get specified isoline graphics method and create isoline object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getline()``              | get specified line secondary object and create line object                           |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getmarker()``            | get specified marker secondary object and create marker object                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getoutfill()``           | get specified outfill graphics method and create outfill object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getoutline()``           | get specifed outline graphics method and create outline object                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getscatter()``           | get specified scatter graphics method and create scatter object                      |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettextcombined()``      | get specified text-combined secondary object and create text-combined object         |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettextorientation()``   | get specified text-orientation secondary object and create text-orientation object   |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``gettexttable()``         | get specified text-table secondary object and create text-table object               |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getvector()``            | get specified vector graphics method and create vector object                        |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getxvsy()``              | get specified xvsy graphics method and create xvsy object                            |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getxyvsy()``             | get specified xyvsy graphics method and create xyvsy object                          |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``getyxvsx()``             | get specified yxvsx graphics method and create yxvsx                                 |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``get3d_scalar()``         | get specified 3d\_scalar graphics method and create 3d\_scalar                       |
++----------------------------+--------------------------------------------------------------------------------------+
+| ``get3d_vector()``         | get specified 3d\_vector graphics method and create 3d\_vector                       |
++----------------------------+--------------------------------------------------------------------------------------+
+
+
+Removing VCS Objects
+^^^^^^^^^^^^^^^^^^^^
+
+Unwanted VCS objects can be removed from internal memory with the use of
+the remove function. The remove function will identify the VCS object
+type and remove it from the appropriate object table.
+
+Remove VCS objects:
+
++----------------------+----------------------------------------------------------------------+
+| Remove               | Description                                                          |
++======================+======================================================================+
+| ``removeobject()``   | allows the user to remove objects from the appropriate object list   |
++----------------------+----------------------------------------------------------------------+
+
+Show VCS Object List
+^^^^^^^^^^^^^^^^^^^^
+
+The show function is handy to list VCS objects tables.
+
+The show function is used to list the VCS objects in memory:
+
++-----------------+----------------------------------------------------------+
+| Show Function   | Description                                              |
++=================+==========================================================+
+| ``show()``      | list VCS primary and secondary class objects in memory   |
++-----------------+----------------------------------------------------------+
-- 
GitLab


From 917e581050bf6a70e04b3a17afaf474cdbba2f21 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 14 May 2016 14:39:13 -0400
Subject: [PATCH 059/196] Added reference documentation

---
 Packages/vcs/docs/user-guide.rst | 131 +++++++++++++++++++++++++++++++
 1 file changed, 131 insertions(+)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index de96d5e7f..9a9ae40fc 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -422,3 +422,134 @@ The show function is used to list the VCS objects in memory:
 +=================+==========================================================+
 | ``show()``      | list VCS primary and secondary class objects in memory   |
 +-----------------+----------------------------------------------------------+
+
+
+VCS Reference Guide
+-------------------
+
+``init``
+^^^^^^^^
+* Initialize, Construct a VCS Canvas Object
+
+.. code-block:: python
+
+    import vcs,cdms2
+
+    file = cdms2.open('clt.nc')
+
+    slab = file.getslab('clt')
+
+    a = vcs.init()
+
+    # This examples constructs 4 VCS Canvas a.plot(slab)
+    # Plot slab using default settings
+    b = vcs.init()
+
+    # Construct VCS object
+    template = b.gettemplate('AMIP')
+
+    # Get 'example' template object
+    b.plot(slab, template)
+
+    # Plot slab using template 'AMIP'
+    c = vcs.init()
+
+    # Construct new VCS object
+    isofill = c.getisofill('quick')
+
+    # Get 'quick' isofill graphics method
+    c.plot(slab,template,isofill)
+
+    # Plot slab using template and isofill objects
+    d = vcs.init()
+
+    # Construct new VCS object
+    isoline = c.getisoline('quick')
+
+    # Get 'quick' isoline graphics method
+    c.plot(isoline,slab,template)
+
+    # Plot slab using isoline and template objects
+
+``help``
+^^^^^^^^
+* Print out the object's doc string
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    ln = a.getline('red')
+
+    # Get a VCS line object
+    # This will print out information on how to use ln
+    a.objecthelp(ln)
+
+``open``
+^^^^^^^^
+* Open VCS Canvas object.
+* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.open()
+
+``close``
+^^^^^^^^^
+* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.plot(array, 'default', 'isofill', 'quick')
+    a.close()
+
+``mode``
+^^^^^^^^
+* ``Options <0 = manual, 1 = automatic>``
+* Update the VCS Canvas.
+* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed.
+
+.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.mode = 0
+    # Set updating to manual mode
+    a.plot(array, 'default', 'boxfill', 'quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
+
+``update``
+^^^^^^^^^^
+* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual).
+
+.. code-block:: python
+
+    import vcs
+
+    a = vcs.init()
+    a.mode = 0
+
+    # Go to manual mode a.plot(s,'default','boxfill','quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
-- 
GitLab


From 853af67c62c4aa7e5e42400e3027bf497a5c49fd Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 16 May 2016 09:00:14 -0700
Subject: [PATCH 060/196] Update Canvas.py

Allow for deprecation warnings
---
 Packages/vcs/vcs/Canvas.py | 38 ++------------------------------------
 1 file changed, 2 insertions(+), 36 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 8de655fbc..79dcd0dcd 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -27,6 +27,7 @@ Normally, created by vcs.init()
 Contains the method plot.
 """
 import warnings
+warnings.filterwarnings("default","",DeprecationWarning,"",0)
 from pauser import pause
 import numpy.ma
 import MV2
@@ -5308,43 +5309,8 @@ Options:::
     ##########################################################################
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
-        """
-        Function: gs
-
-        Description of Function:
-        This routine allows the user to save the VCS canvas in one of the many
-        GhostScript (gs) file types (also known as devices). To view other
-        GhostScript devices, issue the command "gs --help" at the terminal
-        prompt. Device names include: bmp256, epswrite, jpeg, jpeggray,
-        pdfwrite, png256, png16m, sgirgb, tiffpack, and tifflzw. By default
-        the device = 'png256'.
-
-        If no path/file name is given and no previously created gs file has been
-        designated, then file
-
-        /$HOME/%s/default.gs
-
-        will be used for storing gs images. However, if a previously created gs
-        file exist, then this output file will be used for storage.
 
-        By default, the page orientation is the canvas' orientation.
-        To translate the page orientation to portrait mode (p), set the parameter orientation = 'p'.
-        To translate the page orientation to landscape mode (l), set the parameter orientation = 'l'.
-
-        The gs command is used to create a single gs file at this point. The user
-        can use other tools to append separate image files.
-
-        Example of Use:
-        a=vcs.init()
-        a.plot(array)
-        a.gs('example') #defaults: device='png256', orientation='l' and resolution='792x612'
-        a.gs(filename='example.tif', device='tiffpack', orientation='l', resolution='800x600')
-        a.gs(filename='example.pdf', device='pdfwrite', orientation='l', resolution='200x200')
-        a.gs(filename='example.jpg', device='jpeg', orientation='p', resolution='1000x1000')
-
-        NOTE: This method is marked as deprecated
-        """ % (self._dotdir)
-        warnings.warn("Export to GhostScript is no longer supported", Warning)
+        warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning)
 
     ##########################################################################
     #                                                                           #
-- 
GitLab


From 243863d7c57e1727ef3110610682910ad230026c Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 16 May 2016 14:36:56 -0400
Subject: [PATCH 061/196] Added note on deprecation warning and moved it
 further down

---
 Packages/vcs/vcs/Canvas.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 79dcd0dcd..06a8bebdc 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -27,7 +27,6 @@ Normally, created by vcs.init()
 Contains the method plot.
 """
 import warnings
-warnings.filterwarnings("default","",DeprecationWarning,"",0)
 from pauser import pause
 import numpy.ma
 import MV2
@@ -74,6 +73,8 @@ import vcs.manageElements  # noqa
 import configurator  # noqa
 from projection import round_projections  # noqa
 
+# Python < 3 DeprecationWarning ignored by default
+warnings.filterwarnings("default", "", DeprecationWarning, "", 0)
 
 class SIGNAL(object):
 
-- 
GitLab


From 4e5e43d954b7e8fd05c0873bc38df773e02e6743 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 16 May 2016 14:58:39 -0400
Subject: [PATCH 062/196] Using a simple approach to turn on deprecation
 warning

---
 Packages/vcs/vcs/Canvas.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 06a8bebdc..1599e0a03 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -74,7 +74,7 @@ import configurator  # noqa
 from projection import round_projections  # noqa
 
 # Python < 3 DeprecationWarning ignored by default
-warnings.filterwarnings("default", "", DeprecationWarning, "", 0)
+warnings.simplefilter('default')
 
 class SIGNAL(object):
 
-- 
GitLab


From ce13552f749e477e8fa55dc81b6b0d945d52497f Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 16 May 2016 13:18:22 -0700
Subject: [PATCH 063/196] Added some new tests

---
 Packages/vcsaddons/Lib/__init__.py            |   3 +
 Packages/vcsaddons/Lib/core.py                |   9 +-
 Packages/vcsaddons/Lib/histograms.py          |  53 ++-
 Packages/vcsaddons/Lib/polar.py               | 395 ++++++++++++++++++
 testing/vcsaddons/CMakeLists.txt              |  24 ++
 .../vcs_addons_test_convert_arrays.py         |  69 +++
 .../vcs_addons_test_histogram_defaults.py     |  22 +
 .../vcs_addons_test_histogram_inherit.py      |  62 +++
 testing/vcsaddons/vcs_addons_test_polar.py    |  28 ++
 .../vcs_addons_test_polar_inherit.py          |  50 +++
 10 files changed, 692 insertions(+), 23 deletions(-)
 create mode 100644 Packages/vcsaddons/Lib/polar.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_convert_arrays.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_histogram_defaults.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_histogram_inherit.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_inherit.py

diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py
index eadaa435c..1ba782945 100644
--- a/Packages/vcsaddons/Lib/__init__.py
+++ b/Packages/vcsaddons/Lib/__init__.py
@@ -1,5 +1,6 @@
 gms = {}
 import histograms
+import polar
 import EzTemplate
 import yxvsxfill
 import continents
@@ -10,3 +11,5 @@ def createhistogram(name=None,source='default',x=None,template=None):
     return histograms.Ghg(name,source=source,x=x,template=template)
 def createusercontinents(name=None,source="default",x=None,template=None):
     return continents.Guc(name,source=source,x=x,template=template)
+def createpolar(name=None, source="default", x=None, template=None):
+    return polar.Gpo(name, source=source, x=x, template=template)
diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py
index d1b49bf11..0a75da125 100644
--- a/Packages/vcsaddons/Lib/core.py
+++ b/Packages/vcsaddons/Lib/core.py
@@ -54,9 +54,12 @@ class VCSaddon(object):
             self.legend = None
             self.projection='linear'
         else:
-            gm =  vcsaddons.gms[self.g_type].get(source,None)
-            if gm is None:
-                raise "error could not find graphic method %s (of type %s)" % (source, self.g_type)
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type].get(source,None)
+                if gm is None:
+                    raise "error could not find graphic method %s (of type %s)" % (source, self.g_type)
+            else:
+                gm = source
             self.datawc_x1=gm.datawc_x1
             self.datawc_x2=gm.datawc_x2
             self.datawc_y1=gm.datawc_y1
diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py
index 452ac671e..0273529cd 100644
--- a/Packages/vcsaddons/Lib/histograms.py
+++ b/Packages/vcsaddons/Lib/histograms.py
@@ -21,7 +21,10 @@ class Ghg(VCSaddon):
             self.fillareacolors = []
             self.bins = []
         else:
-            gm = vcsaddons.gms[self.g_name][source]
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type][source]
+            else:
+                gm = source
             self.line = gm.line
             self.linewidth = gm.linewidth
             self.linecolors = gm.linecolors
@@ -31,16 +34,16 @@ class Ghg(VCSaddon):
             self.bins = gm.bins
 
     def list(self):
-        print '---------- Histogram (Ghg) member (attribute) listings ----------'
-        print 'Canvas Mode = ', self.x.mode
-        VCSaddon.list(self)
-        print 'fillareastyles = ', self.fillareastyles
-        print 'fillareaindices = ', self.fillareaindices
-        print 'fillareacolors = ', self.fillareacolors
-        print 'line = ', self.line
-        print 'linewidth = ', self.linewidth
-        print 'linecolors = ', self.linecolors
-        print 'bins = ', self.bins
+        print '---------- Histogram (Ghg) member (attribute) listings ----------'  # pragma: no cover
+        print 'Canvas Mode = ', self.x.mode  # pragma: no cover
+        VCSaddon.list(self)  # pragma: no cover
+        print 'fillareastyles = ', self.fillareastyles  # pragma: no cover
+        print 'fillareaindices = ', self.fillareaindices  # pragma: no cover
+        print 'fillareacolors = ', self.fillareacolors  # pragma: no cover
+        print 'line = ', self.line  # pragma: no cover
+        print 'linewidth = ', self.linewidth  # pragma: no cover
+        print 'linecolors = ', self.linecolors  # pragma: no cover
+        print 'bins = ', self.bins  # pragma: no cover
 
     def plot(self, data, template=None, bg=0, x=None, **kwargs):
         if x is None:
@@ -49,8 +52,8 @@ class Ghg(VCSaddon):
             template = self.template
         elif isinstance(template, str):
             template = x.gettemplate(template)
-        elif not vcs.istemplate(template):
-            raise ValueError("Error did not know what to do with template: %s" % template)
+        elif not vcs.istemplate(template):  # pragma: no cover
+            raise ValueError("Error did not know what to do with template: %s" % template)  # pragma: no cover
         try:
             data_name = data.title
         except AttributeError:
@@ -198,9 +201,9 @@ class Ghg(VCSaddon):
                         break
                     else:
                         # Shouldn't ever get here since level 0 is 0
-                        assert False
+                        assert False  # pragma: no cover
             else:
-                assert False
+                assert False  # pragma: no cover
             styles.append(self.fillareastyles[lev_ind])
             cols.append(self.fillareacolors[lev_ind])
             indices.append(self.fillareaindices[lev_ind])
@@ -235,11 +238,21 @@ class Ghg(VCSaddon):
         for d in dsp:
             if d is not None:
                 displays.append(d)
-
-        dsp = template.drawColorBar(self.fillareacolors, levels, legend={0: "No Variance", .1:"", .2: "", .3:"", .4:"", .5:"", .6:"", .7:"", .8:"", .9:"", 1: "High Variance"}, x=x)
-        for d in dsp:
-            if d is not None:
-                displays.append(d)
+        legend_labels = {0: "No Variance",
+                         .1: "",
+                         .2: "",
+                         .3: "",
+                         .4: "",
+                         .5: "",
+                         .6: "",
+                         .7: "",
+                         .8: "",
+                         .9: "",
+                         1: "High Variance"}
+        template.drawColorBar(self.fillareacolors, levels,
+                              legend=legend_labels, x=x,
+                              style=self.fillareastyles,
+                              index=self.fillareaindices)
 
         displays.append(x.plot(line, bg=bg))
 
diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
new file mode 100644
index 000000000..27fb73882
--- /dev/null
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -0,0 +1,395 @@
+import vcs
+import numpy
+import vcsaddons
+
+def circle_points(center, radius, points=75, ratio=1):
+    """
+    Generates the coordinates of a circle in x list and y list.
+    """
+    x = []
+    y = []
+    if ratio > 1:
+        ymul = ratio
+        xmul = 1
+    else:
+        xmul = ratio
+        ymul = 1
+    for i in range(points):
+        x.append(center[0] + xmul * radius * numpy.cos(float(i) / points * numpy.pi * 2))
+        y.append(center[1] + ymul * radius * numpy.sin(float(i) / points * numpy.pi * 2))
+    x.append(x[0])
+    y.append(y[0])
+    return x, y
+
+
+def text_orientation_for_angle(theta, source="default"):
+    """
+    Generates a text orientation that will align text to look good depending on quadrant.
+    """
+    # Normalize to [0, 2*pi)
+    while 0 > theta:
+        theta += 2 * numpy.pi
+    while 2 * numpy.pi <= theta:
+        theta -= 2 * numpy.pi
+
+    if 0 < theta < numpy.pi:
+        valign = "bottom"
+    elif 0 == theta or numpy.pi == theta:
+        valign = "half"
+    else:
+        valign = "top"
+
+    if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta:
+        halign = "left"
+    elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta):
+        halign = "center"
+    else:
+        halign = "right"
+
+    # Build new text table
+    to = vcs.createtextorientation(source=source)
+    to.valign = valign
+    to.halign = halign
+    return to
+
+
+def convert_arrays(var, theta):
+    """
+    Normalizes valid input options to two lists of lists of values and a list of names.
+
+    Handles:
+    list/tuple of list/tuples/arrays
+    (X,N,2) array
+    (N,2) array
+    list/tuple, list/tuple
+    """
+    magnitudes = []
+    thetas = []
+    names = []
+    if theta is None:
+        # var must be list/tuple of arrays or an array
+        if isinstance(var, (list, tuple)):
+            for arr in var:
+                if isinstance(arr, numpy.ndarray):
+                    if len(arr.shape) == 2 and arr.shape[1] == 2:
+                        magnitudes.append(arr[..., 0].tolist())
+                        thetas.append(arr[..., 1].tolist())
+                        try:
+                            names.append(arr.id)
+                        except AttributeError:
+                            names.append(None)
+                    else:
+                        raise ValueError("Array is wrong shape; expected 2d array of 2-long elements, got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1]))
+                else:
+                    if len(arr) == 2:
+                        # Might be just a pair
+                        if not isinstance(arr[0], (list, tuple)):
+                            magnitudes.append([arr[0]])
+                            thetas.append([arr[1]])
+                            names.append(None)
+                            continue
+                    mag_group = []
+                    theta_group = []
+                    for val in arr:
+                        if len(val) != 2:
+                            raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples, got %s of %d elements." % (type(val).__name__, len(val)))
+                        mag_group.append(val[0])
+                        theta_group.append(val[1])
+                        names.append(None)
+                    magnitudes.append(mag_group)
+                    thetas.append(theta_group)
+        else:
+            if len(var.shape) == 3:
+                for i in range(var.shape[0]):
+                    magnitudes.append(var[i, ..., 0].tolist())
+                    thetas.append(var[i, ..., 1].tolist())
+                    try:
+                        names.append(var[i].id)
+                    except AttributeError:
+                        names.append(None)
+            else:
+                magnitudes = [var[..., 0].tolist()]
+                thetas = [var[..., 1].tolist()]
+                try:
+                    names.append(var.id)
+                except AttributeError:
+                    names.append(None)
+    else:
+        magnitudes = []
+        if isinstance(var, (list, tuple)):
+            if isinstance(var[0], (list, tuple, numpy.ndarray)):
+                magnitudes = [list(v) for v in var]
+            else:
+                magnitudes = [var]
+        elif isinstance(var, numpy.ndarray):
+            if len(var.shape) == 1:
+                magnitudes = [list(var)]
+            elif len(var.shape) == 2:
+                magnitudes = [list(var[i]) for i in range(var.shape[0])]
+            else:
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
+
+        thetas = []
+        if isinstance(theta, (list, tuple)):
+            if isinstance(theta[0], (list, tuple, numpy.ndarray)):
+                thetas = [list(v) for v in theta]
+            else:
+                thetas = [theta]
+        elif isinstance(theta, numpy.ndarray):
+            if len(theta.shape) == 1:
+                thetas = [list(theta)]
+            elif len(theta.shape) == 2:
+                thetas = [list(theta[i]) for i in range(theta.shape[0])]
+            else:
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
+        names = [None] * len(var)
+    return magnitudes, thetas, names
+
+
+class Gpo(vcsaddons.core.VCSaddon):
+    def __init__(self, name=None, source="default", x=None, template=None):
+        self.g_name = "Gpo"
+        self.g_type = "polar_oned"
+        super(Gpo, self).__init__(name, source, x, template)
+        if source == "default":
+            self.markersizes = [3]
+            self.markercolors = ["black"]
+            self.markers = ["dot"]
+            self.clockwise = False
+            self.theta_offset = 0
+            self.magnitude_ticks = "*"
+            self.magnitude_tick_angle = 0
+            self.theta_tick_count = 6
+            self.group_names = []
+            # Nice default labels
+            self.xticlabels1 = {
+                0: "0 (2pi)",
+                numpy.pi / 4: "pi/4",
+                numpy.pi / 2: "pi/2",
+                numpy.pi * 3 / 4.: "3pi/4",
+                numpy.pi: "pi",
+                numpy.pi * 5 / 4.: "5pi/4",
+                numpy.pi * 3 / 2.: "3pi/2",
+                numpy.pi * 7 / 4.: "7pi/4",
+            }
+        else:
+            if isinstance(source, (str, unicode)):
+                gm = vcsaddons.gms[self.g_type][source]
+            else:
+                gm = source
+            self.markersizes = gm.markersizes
+            self.markercolors = gm.markercolors
+            self.markers = gm.markers
+            self.clockwise = gm.clockwise
+            self.theta_offset = gm.theta_offset
+            self.magnitude_ticks = gm.magnitude_ticks
+            self.magnitude_tick_angle = gm.magnitude_tick_angle
+            self.theta_tick_count = gm.theta_tick_count
+            self.group_names = gm.group_names
+
+    def theta_from_value(self, value):
+        if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
+            # No scale specified, just use the value as theta
+            return value
+
+        minval = self.datawc_x1
+        maxval = self.datawc_x2
+
+        pct_val = (value - minval) / float(maxval - minval)
+        rad_val = numpy.pi * 2 * pct_val
+        if self.clockwise:
+            # Reflect the value
+            rad_val *= -1
+        # Adjust by theta_offset
+        rad_val += self.theta_offset
+        return rad_val
+
+    def plot(self, var, theta=None, template=None, bg=0, x=None):
+        """
+        Plots a polar plot of your data.
+
+        If var is an ndarray with the second dimension being 2, it will use the first value
+        as magnitude and the second as theta.
+
+        Otherwise, if theta is provided, it uses var as magnitude and the theta given.
+        """
+        if x is None:
+            x = self.x
+        if template is None:
+            template = self.template
+
+        magnitudes, thetas, names = convert_arrays(var, theta)
+
+        if self.group_names:
+            names = self.group_names
+            while len(names) < len(magnitudes):
+                names.append(None)
+
+        flat_magnitude = []
+        for i in magnitudes:
+            flat_magnitude.extend(i)
+        flat_theta = []
+        for i in thetas:
+            flat_theta.extend(i)
+
+        canvas = x
+
+        # Determine aspect ratio for plotting the circle
+        canvas_info = canvas.canvasinfo()
+        # Calculate aspect ratio of window
+        window_aspect = canvas_info["width"] / float(canvas_info["height"])
+        if window_aspect > 1:
+            ymul = window_aspect
+            xmul = 1
+        else:
+            ymul = 1
+            xmul = window_aspect
+        # Use window_aspect to adjust size of template.data
+        x0, x1 = template.data.x1, template.data.x2
+        y0, y1 = template.data.y1, template.data.y2
+
+        xdiff = abs(x1 - x0)
+        ydiff = abs(y1 - y0)
+
+        center = x0 + xdiff / 2., y0 + ydiff / 2.
+        diameter = min(xdiff, ydiff)
+        radius = diameter / 2.
+
+        # Outer line
+        if template.box1.priority > 0:
+            outer = vcs.createline(source=template.box1.line)
+            x, y = circle_points(center, radius, ratio=window_aspect)
+            outer.x = x
+            outer.y = y
+            canvas.plot(outer, render=False, bg=bg)
+
+        if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
+            if self.magnitude_ticks == "*":
+                m_scale = vcs.mkscale(*vcs.minmax(flat_magnitude))
+            else:
+                if isinstance(self.magnitude_ticks, (str, unicode)):
+                    ticks = vcs.elements["list"][self.magnitude_ticks]
+                else:
+                    ticks = self.magnitude_ticks
+                m_scale = ticks
+        else:
+            m_scale = vcs.mkscale(self.datawc_y1, self.datawc_y2)
+
+        if template.ytic1.priority > 0:
+            m_ticks = vcs.createline(source=template.ytic1.line)
+            m_ticks.x = []
+            m_ticks.y = []
+
+            if template.ylabel1.priority > 0:
+                to = text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation)
+                m_labels = vcs.createtext(Tt_source=template.ylabel1.texttable, To_source=to)
+                m_labels.x = []
+                m_labels.y = []
+                m_labels.string = []
+                if self.yticlabels1 == "*":
+                    mag_labels = vcs.mklabels(m_scale)
+                else:
+                    mag_labels = self.yticlabels1
+            else:
+                m_labels = None
+
+            for lev in m_scale:
+                lev_radius = radius * float(lev) / m_scale[-1]
+                x, y = circle_points(center, lev_radius, ratio=window_aspect)
+                if m_labels is not None:
+                    if lev in mag_labels:
+                        m_labels.string.append(mag_labels[lev])
+                        m_labels.x.append(xmul * lev_radius * numpy.cos(self.magnitude_tick_angle) + center[0])
+                        m_labels.y.append(ymul * lev_radius * numpy.sin(self.magnitude_tick_angle) + center[1])
+                m_ticks.x.append(x)
+                m_ticks.y.append(y)
+
+            canvas.plot(m_ticks, render=False, bg=bg)
+            if m_labels is not None:
+                canvas.plot(m_labels, render=False, bg=bg)
+
+        if template.xtic1.priority > 0:
+            t_ticks = vcs.createline(source=template.xtic1.line)
+            t_ticks.x = []
+            t_ticks.y = []
+
+            if self.xticlabels1 == "*":
+                if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
+                    tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4))
+                    tick_labels = {t: str(t) for t in tick_thetas}
+                else:
+                    d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count)
+                    tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta)
+                    tick_labels = vcs.mklabels(tick_thetas)
+            else:
+                tick_thetas = self.xticlabels1.keys()
+                tick_labels = self.xticlabels1
+
+            if template.xlabel1.priority > 0:
+                t_labels = []
+                if self.xticlabels1 == "*":
+                    theta_labels = vcs.mklabels(tick_thetas)
+                else:
+                    theta_labels = self.xticlabels1
+            else:
+                t_labels = None
+
+            for t in tick_thetas:
+                angle = self.theta_from_value(t)
+                x0 = center[0] + (xmul * radius * numpy.cos(angle))
+                x1 = center[0]
+                y0 = center[1] + (ymul * radius * numpy.sin(angle))
+                y1 = center[1]
+                if t_labels is not None:
+                    label = vcs.createtext(Tt_source=template.xlabel1.texttable,
+                                           To_source=text_orientation_for_angle(angle, source=template.xlabel1.textorientation))
+                    label.string = [theta_labels[t]]
+                    label.x = [x0]
+                    label.y = [y0]
+                    t_labels.append(label)
+                t_ticks.x.append([x0, x1])
+                t_ticks.y.append([y0, y1])
+            canvas.plot(t_ticks, render=False, bg=bg)
+            if t_labels is not None:
+                for l in t_labels:
+                    canvas.plot(l, render=False, bg=bg)
+
+        values = vcs.createmarker()
+        values.type = self.markers
+        values.size = self.markersizes
+        values.color = self.markercolors
+        values.x = []
+        values.y = []
+
+        if template.legend.priority > 0:
+            # Only labels that are set will show up in the legend
+            label_count = len(names) - len([i for i in names if i is None])
+            labels = vcs.createtext(Tt_source=template.legend.texttable, To_source=template.legend.textorientation)
+            labels.x = []
+            labels.y = []
+            labels.string = []
+
+        for mag, theta, name in zip(magnitudes, thetas, names):
+            x = []
+            y = []
+            for m, t in zip(mag, theta):
+                t = self.theta_from_value(t)
+                r = (m - m_scale[0]) / float(m_scale[-1] - m_scale[0]) * radius
+                x.append(xmul * numpy.cos(t) * r + center[0])
+                y.append(ymul * numpy.sin(t) * r + center[1])
+
+            if template.legend.priority > 0 and name is not None:
+                lx, ly = template.legend.x1, template.legend.y1 + len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1)
+                x.append(lx)
+                y.append(ly)
+                labels.x.append(lx + .01)
+                labels.y.append(ly)
+                labels.string.append(name)
+            values.x.append(x)
+            values.y.append(y)
+
+        if template.legend.priority > 0:
+            canvas.plot(labels, bg=bg, render=False)
+        canvas.plot(values, bg=bg)
+
+        return canvas
diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt
index 64b8d1755..06ebff600 100644
--- a/testing/vcsaddons/CMakeLists.txt
+++ b/testing/vcsaddons/CMakeLists.txt
@@ -35,6 +35,30 @@ cdat_add_test(vcs_addons_test_EzTemplate_12_plots_spacing
   ${cdat_SOURCE_DIR}/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
   ${BASELINE_DIR}/test_EzTemplate_12_plots_spacing.png
 )
+cdat_add_test(vcs_addons_test_histogram_defaults
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_defaults.py
+  ${BASELINE_DIR}/vcs_addons_test_histogram_defaults.png
+)
+cdat_add_test(vcs_addons_test_histogram_inherit
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_inherit.py
+  ${BASELINE_DIR}/vcs_addons_test_histogram_inherit.png
+)
+cdat_add_test(vcs_addons_test_polar
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar.py
+  ${BASELINE_DIR}/vcs_addons_test_polar.png
+)
+cdat_add_test(vcs_addons_test_polar_inherit
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_inherit.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_inherit.png
+)
+cdat_add_test(vcs_addons_test_convert_arrays
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py
+)
 
 if (CDAT_DOWNLOAD_SAMPLE_DATA)
   cdat_add_test(vcs_addons_EzTemplate_2x2
diff --git a/testing/vcsaddons/vcs_addons_test_convert_arrays.py b/testing/vcsaddons/vcs_addons_test_convert_arrays.py
new file mode 100644
index 000000000..c39bea513
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_convert_arrays.py
@@ -0,0 +1,69 @@
+import vcsaddons
+import numpy
+
+magnitudes = [1, 2, 3, 4]
+thetas = [5, 6, 7, 8]
+zipped_input = zip(magnitudes, thetas)
+grouped_zipped = [zipped_input[:2], zipped_input[2:]]
+
+one_array = numpy.array(zip(magnitudes, thetas))
+three_d_array = numpy.array(grouped_zipped)
+two_arrays = numpy.array(magnitudes), numpy.array(thetas)
+two_array_groups = numpy.array([magnitudes[:2], magnitudes[2:]]), numpy.array([thetas[:2], thetas[2:]])
+list_and_array = two_arrays[0], thetas
+two_lists = magnitudes, thetas
+lists_of_arrays = [two_arrays[0]], [two_arrays[1]]
+array_and_list = magnitudes, two_arrays[1]
+one_list_tuples = zip(magnitudes, thetas)
+one_list_grouped_tuples = [zip(magnitudes[:2], thetas[:2]), zip(magnitudes[2:], thetas[2:])]
+one_list_of_arrays = [numpy.array(zip(magnitudes[:2], thetas[:2])), numpy.array(zip(magnitudes[2:], thetas[2:]))]
+
+def compare(input, expected):
+    result = vcsaddons.polar.convert_arrays(*input)
+    print "Checking", result[0:2], "vs", expected
+    assert result[0] == expected[0]
+    assert result[1] == expected[1]
+
+grouped = ([magnitudes[:2], magnitudes[2:]],[thetas[:2], thetas[2:]])
+
+compare((one_array, None), ([magnitudes],[thetas]))
+compare(two_arrays, ([magnitudes],[thetas]))
+compare(two_array_groups, grouped)
+three_d_expected = ([[1, 2], [3, 4]], [[5, 6], [7, 8]])
+compare((three_d_array, None), three_d_expected)
+compare(list_and_array, ([magnitudes],[thetas]))
+compare(two_lists, ([magnitudes],[thetas]))
+compare(lists_of_arrays, ([magnitudes],[thetas]))
+compare(array_and_list, ([magnitudes],[thetas]))
+compare((one_list_tuples, None), ([[i] for i in magnitudes], [[i] for i in thetas]))
+compare((one_list_grouped_tuples, None), grouped)
+compare((one_list_of_arrays, None), grouped)
+
+
+def test_error(input, error):
+    try:
+        vcsaddons.polar.convert_arrays(*input)
+    except:
+        print "Got", error
+    else:
+        assert False, "Should have raised a %s" % error
+
+# Test error conditions
+
+# Single arg:
+
+# List of 3d arrays
+test_error(([numpy.array([[[1, 2]]])], None), "ValueError for list of 3d arrays")
+# >2 element arrays
+test_error(([numpy.array([[1, 2, 3]])], None), "ValueError for list of 3-element arrays")
+# <2 element arrays
+test_error(([numpy.array([[1]])], None), "ValueError for list of 1-element arrays")
+# Wrong-sized lists
+test_error(([[(1, 2, 3)]], None), "ValueError for wrong sized lists.")
+
+
+# Two args:
+
+# Too many dimensions
+test_error((numpy.array([[[1, 2]]]), numpy.array([[1, 2]])), "ValueError for too many dimensions for magnitude.")
+test_error((numpy.array([[1, 2]]), numpy.array([[[1, 2]]])), "ValueError for too many dimensions for magnitude.")
diff --git a/testing/vcsaddons/vcs_addons_test_histogram_defaults.py b/testing/vcsaddons/vcs_addons_test_histogram_defaults.py
new file mode 100644
index 000000000..b2b19e499
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_histogram_defaults.py
@@ -0,0 +1,22 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+numpy.random.seed(seed=12345)
+vals = numpy.random.random_sample(2000) * 100
+histo = vcsaddons.histograms.Ghg()
+histo.plot(vals, bg=True, x=x)
+
+fnm = "vcs_addons_test_histogram_defaults.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_histogram_inherit.py b/testing/vcsaddons/vcs_addons_test_histogram_inherit.py
new file mode 100644
index 000000000..8ce19e0c2
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_histogram_inherit.py
@@ -0,0 +1,62 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs, cdms2
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc")
+clt = cdmsfile("clt")
+
+levels = [10, 20, 30, 40, 60, 70, 80, 90, 100]
+histo = vcsaddons.histograms.Ghg()
+histo.bins = levels
+histo.line = ["solid", "dash", "dash-dot"]
+histo.linewidth = [1, 2, 3]
+histo.linecolors = ["red", "green", "blue"]
+histo.fillareastyles = ["solid", "hatch", "pattern", "solid"]
+histo.fillareaindices = [1, 2, 3, 4]
+histo.fillareacolors = ["blue", "green", "red", "orange"]
+
+histo2 = vcsaddons.createhistogram(source=histo)
+
+print "Checking all inherited attributes..."
+assert histo2.bins == histo.bins
+assert histo2.line == histo.line
+assert histo2.linewidth == histo.linewidth
+assert histo2.linecolors == histo.linecolors
+assert histo2.fillareastyles == histo.fillareastyles
+assert histo2.fillareacolors == histo.fillareacolors
+assert histo2.fillareaindices == histo.fillareaindices
+print "Inherited all values."
+
+histo2.levels = [10, 20, 10, 100, 110, 50, 20]
+histo3 = vcsaddons.createhistogram(source=histo2.name, x=x)
+
+print "Checking name-based inheritance"
+assert histo3.bins == histo2.bins
+assert histo3.line == histo2.line
+assert histo3.linewidth == histo2.linewidth
+assert histo3.linecolors == histo2.linecolors
+assert histo3.fillareastyles == histo2.fillareastyles
+assert histo3.fillareacolors == histo2.fillareacolors
+assert histo3.fillareaindices == histo2.fillareaindices
+print "Inherited all values."
+
+histo3.datawc_y1 = -1
+histo3.datawc_y2 = 200000
+histo3.datawc_x1 = 0
+histo3.datawc_x2 = 100
+
+histo3.bins = None
+histo3.plot(clt, template="default", bg=True)
+
+fnm = "vcs_addons_test_histogram_inherit.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar.py b/testing/vcsaddons/vcs_addons_test_polar.py
new file mode 100644
index 000000000..8a848e7a1
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar.py
@@ -0,0 +1,28 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+polar = vcsaddons.polar.Gpo()
+polar.markers = ["dot", "circle"]
+polar.markersizes = [3, 5]
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24))
+magnitude = list(numpy.sin(theta))
+
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar_inherit.py b/testing/vcsaddons/vcs_addons_test_polar_inherit.py
new file mode 100644
index 000000000..4eb946359
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_inherit.py
@@ -0,0 +1,50 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+gm = vcsaddons.polar.Gpo()
+gm.markers = ["dot", "circle"]
+gm.markersizes = [3, 5]
+gm.markercolors = ["red", "blue"]
+gm.clockwise = True
+gm.theta_offset = numpy.pi / 4
+gm.magnitude_ticks = [.2 * i for i in range(6)]
+gm.magnitude_tick_angle = numpy.pi / 10
+gm.theta_tick_count = 10
+gm.group_names = ["First", "Second"]
+
+polar = vcsaddons.polar.Gpo(source=gm)
+
+assert polar.markersizes == gm.markersizes
+assert polar.markercolors == gm.markercolors
+assert polar.markers == gm.markers
+assert polar.clockwise == gm.clockwise
+assert polar.theta_offset == gm.theta_offset
+assert polar.magnitude_ticks == gm.magnitude_ticks
+assert polar.magnitude_tick_angle == gm.magnitude_tick_angle
+assert polar.theta_tick_count == gm.theta_tick_count
+assert polar.group_names == gm.group_names
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24))
+magnitude = list(numpy.sin(theta))
+
+theta = [theta[:len(theta) / 2], theta[len(theta) / 2:]]
+magnitude = [magnitude[:len(magnitude)/ 2], magnitude[len(magnitude) / 2:]]
+
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_inherit.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
-- 
GitLab


From 2dbbdec142265dc8199f3d990201ad660c551392 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Tue, 17 May 2016 16:34:12 -0400
Subject: [PATCH 064/196] Fixed vtk ui tests

---
 testing/vcs/vtk_ui/vtk_ui_test.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/testing/vcs/vtk_ui/vtk_ui_test.py b/testing/vcs/vtk_ui/vtk_ui_test.py
index 40bbeaef1..d5cbe18bf 100644
--- a/testing/vcs/vtk_ui/vtk_ui_test.py
+++ b/testing/vcs/vtk_ui/vtk_ui_test.py
@@ -1,4 +1,4 @@
-import, os, sys, time, vcs.vtk_ui, vtk
+import os, sys, time, vtk, vcs.vtk_ui
 
 
 def init():
@@ -102,7 +102,7 @@ class vtk_ui_test(object):
         generate_png(self.win, self.test_file)
         pth = os.path.join(os.path.dirname(__file__), "../..")
         sys.path.append(pth)
-        import regression
+        import testing.regression as regression
         return regression.check_result_image(self.test_file, compare_against)
 
     def test(self):
-- 
GitLab


From 29d182bd5442ea6b71f95f9856d6ec9a9983a6ac Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 07:31:37 -0400
Subject: [PATCH 065/196] Updated test name to match the naming convention

---
 testing/vcs/test_vcs_boxfill_lev1_lev2.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
index c5b5cbcbe..6c1d985d7 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
@@ -7,4 +7,4 @@ b = x.createboxfill()
 b.level_1 = .5
 b.level_2 = 14.5
 x.plot(s, b, bg=1)
-regression.run(x, "test_boxfill_lev1_lev2.png")
+regression.run(x, "test_vcs_boxfill_lev1_lev2.png")
-- 
GitLab


From 88df13b95b1a5000db8d5e58c9c3152865dc5baa Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 09:57:38 -0400
Subject: [PATCH 066/196] Updated dv3d package to use regression module

---
 testing/dv3d/TestManager.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/testing/dv3d/TestManager.py b/testing/dv3d/TestManager.py
index 51ed57183..94e7e365e 100644
--- a/testing/dv3d/TestManager.py
+++ b/testing/dv3d/TestManager.py
@@ -10,7 +10,7 @@ import vcs, os, sys, shutil, collections, subprocess
 TestingDir=os.path.dirname(__file__)
 pth = os.path.join(TestingDir,"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 DefaultSampleFile = "geos5-sample.nc"
 DefaultSampleVar = "uwnd"
@@ -128,8 +128,8 @@ class vcsTest:
         test_image = '.'.join( [ self.name, 'test', 'png' ] )
         self.canvas.png( test_image, width = 900, height = 600 )
 
-        ret = checkimage.check_result_image( test_image, self.image_name,\
-                checkimage.defaultThreshold+3. )
+        ret = regression.check_result_image( test_image, self.image_name,\
+                regression.defaultThreshold+3. )
 
         if  interactive:
             print "Type <Enter> to continue and update ref image ( type 'n' to skip update )."
-- 
GitLab


From 9e86fb3b9be60ea6e08558034a56c2a22bc2c9db Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 09:59:10 -0400
Subject: [PATCH 067/196] Fixed flake8 test

---
 Packages/vcs/vcs/Canvas.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 49fdbf978..0e9d23426 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -76,6 +76,7 @@ from projection import round_projections  # noqa
 # Python < 3 DeprecationWarning ignored by default
 warnings.simplefilter('default')
 
+
 class SIGNAL(object):
 
     def __init__(self, name=None):
-- 
GitLab


From dc508774b3b0d886a7908213c73f8ac21b64a55d Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 10:01:36 -0400
Subject: [PATCH 068/196] Updated esmf test to use regression module

---
 .../regrid/testEsmfRegridPeriodictyRegional.py  | 17 +++--------------
 1 file changed, 3 insertions(+), 14 deletions(-)

diff --git a/testing/regrid/testEsmfRegridPeriodictyRegional.py b/testing/regrid/testEsmfRegridPeriodictyRegional.py
index 2b9279525..53a1a706b 100644
--- a/testing/regrid/testEsmfRegridPeriodictyRegional.py
+++ b/testing/regrid/testEsmfRegridPeriodictyRegional.py
@@ -1,9 +1,4 @@
-import vcs,cdms2
-import os,sys
-import EzTemplate
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, EzTemplate, testing.regression as regression
 
 data = sys.argv[1]
 png = sys.argv[2]
@@ -38,11 +33,7 @@ s_esmf_lin.id = "ESMF Linear"
 s_esmf_con = s.regrid(grid_dest,regridTool="esmf",regridMethod="conservative")
 s_esmf_lin.id = "ESMF Conservative"
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x=regression.init()
 t=x.createtemplate()
 t.blank()
 t.data.priority=1
@@ -60,7 +51,5 @@ x.plot(s,M.get(),gm,bg=1)
 x.plot(s_regrid2,M.get(),gm,bg=1)
 x.plot(s_esmf_lin,M.get(),gm,bg=1)
 x.plot(s_esmf_con,M.get(),gm,bg=1)
-x.png("esmf_issue_1125")
 
-ret = checkimage.check_result_image("esmf_issue_1125.png",png,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, "esmf_issue_1125.png", png)
-- 
GitLab


From 395c7459b2a3915c0c7a6eb4f37561ad9af972b8 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 10:10:53 -0400
Subject: [PATCH 069/196] Updated diagnostics tests to use regression module

---
 testing/metrics/diags_test.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/testing/metrics/diags_test.py b/testing/metrics/diags_test.py
index ff71c8d66..2d4131cb8 100755
--- a/testing/metrics/diags_test.py
+++ b/testing/metrics/diags_test.py
@@ -12,7 +12,7 @@ import sys, os, shutil, tempfile, subprocess
 import cdms2, numpy
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import argparse, pdb
 
 class DiagTest(object):
@@ -95,7 +95,7 @@ class DiagTest(object):
     def execute(self, test_str, imagefilename, imagethreshold, ncfiles, rtol, atol):
         print test_str
         if imagethreshold is None:  # user didn't specify a value
-     	    imagethreshold = checkimage.defaultThreshold
+	    imagethreshold = regression.defaultThreshold
         # Silence annoying messages about how to set the NetCDF file type.  Anything will do.
         cdms2.setNetcdfShuffleFlag(0)
         cdms2.setNetcdfDeflateFlag(0)
@@ -118,7 +118,7 @@ class DiagTest(object):
             imagebaselinefname = os.path.join( self.baselinepath, imagefilename )
             #pdb.set_trace()
             print "OK THRESHOLD IS:",imagethreshold
-            graphics_result = checkimage.check_result_image( imagefname, imagebaselinefname, imagethreshold )
+            graphics_result = regression.check_result_image( imagefname, imagebaselinefname, imagethreshold )
             print "Graphics file", imagefname, "match difference:", graphics_result
             
             #initialize to successful graphics check
-- 
GitLab


From 314fc2bf70fb9c410ba7c1200a1fcaec9dfbfdd5 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 11:53:42 -0400
Subject: [PATCH 070/196] Fixed numpy test

---
 testing/vcs/test_vcs_boxfill_10x10_numpy.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/vcs/test_vcs_boxfill_10x10_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
index a45aa5889..765917942 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
@@ -3,5 +3,5 @@ import vcs, numpy, os, sys, testing.regression as regression
 s = numpy.sin(numpy.arange(100))
 s = numpy.reshape(s,(10,10))
 x = regression.init()
-x.plot(s)
+x.plot(s, bg=1)
 regression.run(x, "test_vcs_boxfill_10x10_numpy.png")
\ No newline at end of file
-- 
GitLab


From f19b372e0ccc187eef150f1ce7af83a56021616b Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 11:59:38 -0400
Subject: [PATCH 071/196] Fixed canvas update test

---
 testing/vcs/test_vcs_boxfill_decreasing_latitude.py | 2 +-
 testing/vcs/test_vcs_canvas_background_update.py    | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
index 1f727e3b5..6cbc0f016 100755
--- a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
+++ b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
@@ -51,5 +51,5 @@ gm.fillareacolors = cols
 gm.datawc_y2 = 30
 gm.datawc_y1 = 90
 
-x.plot(ice, gm, tmpl, bg = 1)
+x.plot(ice, gm, tmpl, bg=1)
 regression.run(x, "test_boxfill_decreasing_latitude.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background_update.py b/testing/vcs/test_vcs_canvas_background_update.py
index a28c08713..80f79d2aa 100644
--- a/testing/vcs/test_vcs_canvas_background_update.py
+++ b/testing/vcs/test_vcs_canvas_background_update.py
@@ -5,4 +5,4 @@ x.backgroundcolor = (255, 255, 255)
 x.open()
 x.backgroundcolor = (255, 255, 0)
 x.update()
-regression.run(x, "test_backgroundcolor_yellow.png")
\ No newline at end of file
+regression.check_result_image(x, "test_backgroundcolor_yellow.png")
\ No newline at end of file
-- 
GitLab


From a14200869100199487b89ea08dd37ad2b7d97731 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 16:06:31 -0400
Subject: [PATCH 072/196] Using a fixed size for click test

---
 Packages/testing/regression.py     | 8 +++++---
 testing/vcs/test_vcs_click_info.py | 2 +-
 2 files changed, 6 insertions(+), 4 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index b272d592f..68ded76d9 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -16,14 +16,16 @@ import vcs
 
 defaultThreshold=10.0
 
-def init():
+def init(*args, **kwargs):
     testingDir = os.path.join(os.path.dirname(__file__), "..")
     sys.path.append(testingDir)
 
-    vcsinst = vcs.init()
+    vcsinst = vcs.init(*args, **kwargs)
     vcsinst.setantialiasing(0)
     vcsinst.drawlogooff()
-    vcsinst.setbgoutputdimensions(1200,1091,units="pixels")
+
+    if ('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs):
+        vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
     return vcsinst
 
 def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
diff --git a/testing/vcs/test_vcs_click_info.py b/testing/vcs/test_vcs_click_info.py
index c16a6c0d4..f37ee651a 100644
--- a/testing/vcs/test_vcs_click_info.py
+++ b/testing/vcs/test_vcs_click_info.py
@@ -8,7 +8,7 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
 # Tests if the info produced when clicking on a map is correct.
 src = sys.argv[1]
 plot = sys.argv[2]
-x = regression.init()
+x = regression.init(bg=False, geometry=(800, 600))
 
 # data
 f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
-- 
GitLab


From 9c121a20f2121a0fcca665123d50b27553f176a0 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 21:19:46 -0400
Subject: [PATCH 073/196] Fixed failing test because of incorrect capture
 window size

---
 testing/vcs/test_vcs_configurator_resize.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/testing/vcs/test_vcs_configurator_resize.py b/testing/vcs/test_vcs_configurator_resize.py
index 7692e6218..bd3490afb 100644
--- a/testing/vcs/test_vcs_configurator_resize.py
+++ b/testing/vcs/test_vcs_configurator_resize.py
@@ -4,15 +4,16 @@ x = vcs.init()
 x.open()
 x.configure()
 
-x.backend.renWin.SetSize(814, 303)
-
 fnm = "test_vcs_configurator_resize.png"
 
 win = x.backend.renWin
-win.Render()
+win.SetSize(814, 303)
+
 out_filter = vtk.vtkWindowToImageFilter()
 out_filter.SetInput(win)
 
+win.Render()
+
 png_writer = vtk.vtkPNGWriter()
 png_writer.SetFileName(fnm)
 png_writer.SetInputConnection(out_filter.GetOutputPort())
-- 
GitLab


From 2c3407c4a791c301930ea1c634918f16a163d148 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 21:38:46 -0400
Subject: [PATCH 074/196] Set size on render window only if it has been created

---
 Packages/vcs/vcs/VTKPlots.py | 18 ++++++++++--------
 1 file changed, 10 insertions(+), 8 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 8cfe83a28..da5810614 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -73,7 +73,7 @@ class VTKVCSBackend(object):
         # Initially set to 16x Multi-Sampled Anti-Aliasing
         self.antialiasing = 8
         self._rasterPropsInVectorFormats = False
-        self._initialGeometry = geometry
+        self._geometry = geometry
 
         if renWin is not None:
             self.renWin = renWin
@@ -383,9 +383,9 @@ class VTKVCSBackend(object):
             # turning off antialiasing by default
             # mostly so that pngs are same accross platforms
             self.renWin.SetMultiSamples(self.antialiasing)
-            if self._initialGeometry is not None:
-                width = self._initialGeometry["width"]
-                height = self._initialGeometry["height"]
+            if self._geometry is not None:
+                width = self._geometry["width"]
+                height = self._geometry["height"]
             else:
                 width = None
                 height = None
@@ -444,9 +444,9 @@ class VTKVCSBackend(object):
             if (self.bg):
                 height = self.canvas.bgY
                 width = self.canvas.bgX
-            elif (self._initialGeometry):
-                height = self._initialGeometry['height']
-                width = self._initialGeometry['width']
+            elif (self._geometry):
+                height = self._geometry['height']
+                width = self._geometry['width']
             else:
                 height = self.canvas.bgY
                 width = self.canvas.bgX
@@ -554,7 +554,9 @@ class VTKVCSBackend(object):
             return True
 
     def geometry(self, x, y, *args):
-        self.renWin.SetSize(x, y)
+        if self.renWin is not None:
+            self.renWin.SetSize(x, y)
+        self._geometry = {'width': x, 'height': y}
         self._lastSize = (x, y)
 
     def flush(self):
-- 
GitLab


From a36eba425fa8451c4628957e6af434f010736398 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 18 May 2016 21:48:57 -0400
Subject: [PATCH 075/196] Return geometry object if no argument is given

---
 Packages/vcs/vcs/Canvas.py   |  3 +++
 Packages/vcs/vcs/VTKPlots.py | 10 +++++++++-
 2 files changed, 12 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 0e9d23426..49361248c 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -4116,6 +4116,9 @@ Options:::
     a.geometry(450,337)
 
 """
+        if len(args) == 0:
+            return self.backend.geometry()
+
         if (args[0] <= 0) or (args[1] <= 0):
             raise ValueError(
                 'Error -  The width and height values must be an integer greater than 0.')
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index da5810614..7e32798d1 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -553,7 +553,15 @@ class VTKVCSBackend(object):
         else:
             return True
 
-    def geometry(self, x, y, *args):
+    def geometry(self, *args):
+        if len(args) == 0:
+            return self._geometry;
+        if len(args) < 2:
+            raise TypeError("Function takes zero or two <width, height> " \
+                            "or more than two arguments. Got " + len(*args))
+        x = args[0]
+        y = args[1]
+
         if self.renWin is not None:
             self.renWin.SetSize(x, y)
         self._geometry = {'width': x, 'height': y}
-- 
GitLab


From a5bc559a3c3e56daafe0a210071ef25a1c87005b Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Thu, 19 May 2016 13:18:50 -0700
Subject: [PATCH 076/196] Fixed some group name retrieval bugs, added colormap
 support, better secondary management

---
 Packages/vcsaddons/Lib/polar.py | 57 +++++++++++++++++++++------------
 1 file changed, 37 insertions(+), 20 deletions(-)

diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
index 27fb73882..8aff589a7 100644
--- a/Packages/vcsaddons/Lib/polar.py
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -118,14 +118,29 @@ def convert_arrays(var, theta):
         magnitudes = []
         if isinstance(var, (list, tuple)):
             if isinstance(var[0], (list, tuple, numpy.ndarray)):
-                magnitudes = [list(v) for v in var]
+                for v in var:
+                    magnitudes.append(list(v))
+                    try:
+                        names.append(v.id)
+                    except AttributeError:
+                        names.append(None)
             else:
                 magnitudes = [var]
+                names.appned(None)
         elif isinstance(var, numpy.ndarray):
             if len(var.shape) == 1:
                 magnitudes = [list(var)]
+                try:
+                    names.append(var.id)
+                except AttributeError:
+                    names.append(None)
             elif len(var.shape) == 2:
-                magnitudes = [list(var[i]) for i in range(var.shape[0])]
+                for i in range(var.shape[0]):
+                    magnitudes.append(list(var[i]))
+                    try:
+                        names.append(var[i].id)
+                    except AttributeError:
+                        names.append(None)
             else:
                 raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
 
@@ -142,7 +157,8 @@ def convert_arrays(var, theta):
                 thetas = [list(theta[i]) for i in range(theta.shape[0])]
             else:
                 raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
-        names = [None] * len(var)
+        if not names:
+            names = [None] * len(var)
     return magnitudes, thetas, names
 
 
@@ -219,7 +235,6 @@ class Gpo(vcsaddons.core.VCSaddon):
             template = self.template
 
         magnitudes, thetas, names = convert_arrays(var, theta)
-
         if self.group_names:
             names = self.group_names
             while len(names) < len(magnitudes):
@@ -233,7 +248,6 @@ class Gpo(vcsaddons.core.VCSaddon):
             flat_theta.extend(i)
 
         canvas = x
-
         # Determine aspect ratio for plotting the circle
         canvas_info = canvas.canvasinfo()
         # Calculate aspect ratio of window
@@ -254,14 +268,15 @@ class Gpo(vcsaddons.core.VCSaddon):
         center = x0 + xdiff / 2., y0 + ydiff / 2.
         diameter = min(xdiff, ydiff)
         radius = diameter / 2.
-
+        plot_kwargs = {"render": False, "bg": bg, "donotstoredisplay": True}
         # Outer line
         if template.box1.priority > 0:
             outer = vcs.createline(source=template.box1.line)
             x, y = circle_points(center, radius, ratio=window_aspect)
             outer.x = x
             outer.y = y
-            canvas.plot(outer, render=False, bg=bg)
+            canvas.plot(outer, **plot_kwargs)
+            del vcs.elements["line"][outer.name]
 
         if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
             if self.magnitude_ticks == "*":
@@ -294,7 +309,7 @@ class Gpo(vcsaddons.core.VCSaddon):
                 m_labels = None
 
             for lev in m_scale:
-                lev_radius = radius * float(lev) / m_scale[-1]
+                lev_radius = radius * float(lev - m_scale[0]) / (m_scale[-1] - m_scale[0])
                 x, y = circle_points(center, lev_radius, ratio=window_aspect)
                 if m_labels is not None:
                     if lev in mag_labels:
@@ -303,10 +318,11 @@ class Gpo(vcsaddons.core.VCSaddon):
                         m_labels.y.append(ymul * lev_radius * numpy.sin(self.magnitude_tick_angle) + center[1])
                 m_ticks.x.append(x)
                 m_ticks.y.append(y)
-
-            canvas.plot(m_ticks, render=False, bg=bg)
+            canvas.plot(m_ticks, **plot_kwargs)
+            del vcs.elements["line"][m_ticks.name]
             if m_labels is not None:
-                canvas.plot(m_labels, render=False, bg=bg)
+                canvas.plot(m_labels, **plot_kwargs)
+                del vcs.elements["textcombined"][m_labels.name]
 
         if template.xtic1.priority > 0:
             t_ticks = vcs.createline(source=template.xtic1.line)
@@ -327,10 +343,7 @@ class Gpo(vcsaddons.core.VCSaddon):
 
             if template.xlabel1.priority > 0:
                 t_labels = []
-                if self.xticlabels1 == "*":
-                    theta_labels = vcs.mklabels(tick_thetas)
-                else:
-                    theta_labels = self.xticlabels1
+                theta_labels = tick_labels
             else:
                 t_labels = None
 
@@ -349,15 +362,18 @@ class Gpo(vcsaddons.core.VCSaddon):
                     t_labels.append(label)
                 t_ticks.x.append([x0, x1])
                 t_ticks.y.append([y0, y1])
-            canvas.plot(t_ticks, render=False, bg=bg)
+            canvas.plot(t_ticks, **plot_kwargs)
+            del vcs.elements["line"][t_ticks.name]
             if t_labels is not None:
                 for l in t_labels:
-                    canvas.plot(l, render=False, bg=bg)
+                    canvas.plot(l, **plot_kwargs)
+                    del vcs.elements["textcombined"][l.name]
 
         values = vcs.createmarker()
         values.type = self.markers
         values.size = self.markersizes
         values.color = self.markercolors
+        values.colormap = self.colormap
         values.x = []
         values.y = []
 
@@ -389,7 +405,8 @@ class Gpo(vcsaddons.core.VCSaddon):
             values.y.append(y)
 
         if template.legend.priority > 0:
-            canvas.plot(labels, bg=bg, render=False)
-        canvas.plot(values, bg=bg)
-
+            canvas.plot(labels, **plot_kwargs)
+            del vcs.elements["textcombined"][labels.name]
+        canvas.plot(values, bg=bg, donotstoredisplay=True)
+        del vcs.elements["marker"][values.name]
         return canvas
-- 
GitLab


From dc86269c4bf0eb8fe579234fa7bf386f55e1b242 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 19 May 2016 11:37:09 -0400
Subject: [PATCH 077/196] Added test to validate geometry function

---
 testing/vcs/CMakeLists.txt       |  4 ++++
 testing/vcs/test_vcs_geometry.py | 32 ++++++++++++++++++++++++++++++++
 2 files changed, 36 insertions(+)
 create mode 100644 testing/vcs/test_vcs_geometry.py

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 9e8cf78e3..05928a0c8 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -381,6 +381,10 @@ cdat_add_test(test_vcs_missing_colorname
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_missing_colorname.py
   "${BASELINE_DIR}/test_vcs_missing_colorname.png"
   )
+cdat_add_test(test_vcs_geometry
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py
+  )
 ##############################################################################
 #
 # These tests perform plotting and need sample data
diff --git a/testing/vcs/test_vcs_geometry.py b/testing/vcs/test_vcs_geometry.py
new file mode 100644
index 000000000..578bfb67d
--- /dev/null
+++ b/testing/vcs/test_vcs_geometry.py
@@ -0,0 +1,32 @@
+import sys, vcs
+
+# This will check if we can set the geometry
+# at the initialization of canvas
+canvas = vcs.init(geometry=(600, 400))
+canvas.open()
+
+if dict(width=600, height=400) != canvas.geometry():
+    canvas.close()
+    sys.exit(1)
+
+canvas.close()
+
+canvas2 = vcs.init()
+
+# This will check if we can safely set the geometry even
+# though the canvas window has not been created yet
+canvas2.geometry(400, 400)
+canvas2.open()
+if dict(width=400, height=400) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+# This will check if we can dynamically change the geometry
+canvas2.geometry(500, 400)
+canvas2.geometry(500, 500)
+if dict(width=500, height=500) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+canvas2.close()
+sys.exit(0)
\ No newline at end of file
-- 
GitLab


From 8d4184228fb68955da38207e85807417d5794794 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 19 May 2016 22:35:11 -0400
Subject: [PATCH 078/196] Added plot script output

---
 Packages/vcs/docs/static/clt.png | Bin 0 -> 58300 bytes
 Packages/vcs/docs/user-guide.rst |  12 +++++++++---
 2 files changed, 9 insertions(+), 3 deletions(-)
 create mode 100644 Packages/vcs/docs/static/clt.png

diff --git a/Packages/vcs/docs/static/clt.png b/Packages/vcs/docs/static/clt.png
new file mode 100644
index 0000000000000000000000000000000000000000..3d721ffdb6ef9be9c54cbff606c06497f33adf8d
GIT binary patch
literal 58300
zcmd3NV{m3o808z=wr$(CGqID2tqCSJCbpf4-id8<V%xSR+V|V7{jpX1V}EW}-KtxC
zZ`JJw=k(KkPe-XL%OJty!vg>SBsp10bpQY?;lB$O9MprdS(ps~kOJf+#WcN*E^>eP
zX)bp4_B2?qCtKEIq_LV3Ve6tpM8m_&LE>zjNte|2Maw(H$m`{+M{AR4{;7G9G%oTm
zofU4<TY!ga-NA=YLvslf6$i%?Z<b?EwrGAHnY5VBL=~4+Eq;EJ5q6svdOl)4{=DMr
zxvc0kVu1&>4=@ieBur5I$w&*XASPD0T>#4n1r_!)gd7O!{f89YtFEeg!t?*#!-vw|
zEOtxzk!=kbfBz4l=|KO$Km-zjwuS~q{bR;gSCt0;twt`p<v0|M!Cym$uzYH-tslou
zKku^-C(^f_2avF1N;{5d1{S{==O!hA``wMv9OnmohT#~!oK@BR{C8SD5ROE^ZJ(iB
zj&bhaF`2_RfBxus;hi&H>;3t9?9_IcrtjExID}(-p)~az-GZa3shQ*dw2B``&B4*o
z-7QQ(vA?A6w0P>CZv69ySI1LYY%F5kvc8eg&)38Bucwxtsj)E|dwa|;Un8SDqq=-f
zo4E`&bHBK*5&qo-3in3A`$-N%=H)4#?dthctEnt5m%YK**jP9W>XNcDWPHw(0TdBI
z=bhf`fUh?$yCp`0R$x)lUSR}r`(8AIcCEqcZE+RHw-c`%k0o6a|A*NMwc^^Yr?soK
zJi+en^xRxSL&MFrwXD3ntE%lT-)F960}S=A*W>(c|L4uUctb7Xh{i_Fmi5b9?`hCb
zrm}gy4p{>DekvVz#Rt4=h}B8!Or$Y&oR(#0Wo0?`d^`#HJ^^MxZw^Lb7zcbjUU^;;
z5c}Qqb#``+kOaKfPcZ9u`FM;3d_Byo#6(9wU2lIaXsD#7r+YPC`}LE_prU;KJFnBP
zSvosA+y1!nYHDf{RZy51jw9Iq`F8ns)#3DdcW6}C0`e?7yX*N<O$0Jg7PrHi63^zs
zEksy8{MXBfNVC(nu;<0f!oor-!?(4|r>iI4A34h9+Eqvh2=_i+-B}_Zrz$9dH=?p1
z40WBZmrcvz7rv9@`vtSs3${)2rQr1q4cBeQdHc)mTjy^w>xLowySLt&^r`%!UQ=(x
z<@1&Er+}7qrmepA_I3p3R+F9HAP5*_LSa8|8UJGpWIl0C&4uY)fkj&nB_$<u^ZkuB
zxBb06zkr~ZmlsgvUY_o}1;6L}Kb58%wR>LbHLT|3<rx_o`VS!SR2#I-J@4mwZHevD
z1g+Y~$H)IXt1K5Jo@rV(dVB8q`T$-2UIMw4UZ4o2elyC<$nf3`!vQz``C2Mpn#F1=
zF8lFxx&C#R-V^%2j8d=k+nd?HeJDG6EBOX~9t1<&dX#17*m2p6$6?iZJ4~?2b?uwR
zZ0Iv1^c)N7`uhA(5&3YJ@wc)%nqcX9Uoro+xUit9t(`SxW&A(Q>j&BMeJab671zwn
z?B{Ka@dhY1#J|2jUu_~sNYpKR5pJJ9ZUuaO@NBxzE3kBxEZWw!9c68QKDE@;)M%6`
z>NZ%#UBkeOkP!#G-@=gi-DG~xdtL7N^cW%Xa$kNwXgf$!(4^NgHQkj!5rTX_RSEEU
z*^Q7m6M4I6;FuBQuoyAdCeB}X>iH<kc64nsXxa4CH#If2v9VFHYkx!W|MlDapt7Lg
zFH7rx8PmEOK9)@5rT;oFfu{2x*@csc-j);;6ga#7$7h8BZs@`WWCFzHcarZw<&v_Q
zlkx3cNnCb|QP7Bawy$aO{ai;A$VQw36p29yTCDo!+qES_{No^A;HvdyFGfK@!9QF0
z^&lCPa3DwJb3OmAs^xDBnEjTs@#vZJB=ULtE8D62^(fovCn!{d+nYo?Fva(e)0rIQ
zZVf+Om_!_W4h9x~jO=TEuJ|?iPywkGGgvLhG0W#qJv+*kbNOxNN>IJz>n?if_>KO>
ztE#FdWM;0K(m(lhNl8f&O*;wu@oYZ7Np9>a6Xn(|GZ^m#Lm_ZGZTWpXE<0_uYi-PW
zt?g+DdZ;v_oceW<Z8daTeeCJ|TzlbP`Ei`<^>CurvYwlkc2qSpHuegF-_So$+y%L~
zJV4yH7e7*s__}%PtWsN6cao|v;qCnbV!QWohPo5?c0RWw`>oC&AWX{1$q`+pST(F3
z_v&hEr=_NHS^q7FBj9N_sO`9FKMz3>2ITo(wY6;dFwoQ6j;GMAw>UXDI{v))GvWum
ziR>5eW|-6eEPs4{d87JhxC255xA}1F{j`A7(PU<>*Yl4JkI#?d()-7D1Lpka>KD|v
zp{932UNjQs{A5Ykk=qAhG?8kI%2NiA`vL{7JMTeMGk@w{*LgR7G?gu}L1$-cD+x0l
z85xN@qOg6nySwYzVOY0ZrTH5~T-lE8HmmiPr>CbxBiF5`BcIRP0%XesdxdA79YM{_
z&E{7O!b2L*l5c4V=Tc93k4>j6%Xi7gl76l20@AcXkCXh1-FjVEIv^*2a*#RQt~U@&
zG*X~+{ZhCLqMT<(;g9`lJ%LlFTKW8@?{(L4j@$I=_xz8mj%#jC&I--nb8~Yb-<0o{
zeaXVMZK0wpf?j%d3V2(;pAqTtI9~$cCG3JhSSnTCBRbD|8{M^woCxK1;ZZpZ;~<8m
z`{pA<!0QSGO8*{X-^-?WXtNJCuZ!Pv<pS5VR^PwB(fex7N6`NKpSce$^$5!RzfP)<
zX~DtbJ!1b05zta5bhNY&1eigk|E~|pXn})_#%7;2DJdzX3hDpW6tlT&iv2<MgWAdM
zzeo`GdM+PO($UlZuBbSvT^uuMSY4{oUpA~tK3K^01ZA$8nwnRQa+1a~Vd@o5Ki%a|
z5T7@!+HW@f6NZHrVKHof01;$#T%6E(bwj6-lbl?Xd?`QZJ}EgFWbW%Qfs=vfs^wl@
zcj-T6DDLb_%$hm`otmZ4P{`!51;YX2`M-Zvb#+%kA1o;;v3NL{-E88=15GT|+IIha
z7c?hPDWthCpIMxgZ(c75jcBZWG~2JbPOh9@=NI&%f3wBpAgt8<4JrtP1q3=MmV-g9
zA4Oc%Bucj%1TRJ_-e?d#-wxr_^?d%TWfuaukk$_c5-Kg@&~BkZjR-G3&*zM;*PdJC
zrWe9%!$l^dySSv}U?t*$s~c~ePQ4mLs-Tt^4Hvd(+oZ!-e$vQ<J<;s24l=OAZpFM>
z`#g<FUwJ6eKc|RbPNid;&Og4q_6>}H$59ZgJU2CU(GVkw_;>&vN*m)xGDrNjftZ*W
z*=RbIWCJ<H01K}VJ<un|Me`lzk+t~ym$I1H?y)i<o=~a=(Swdj6lb5QbC5*ELvY5b
z03!Ke*WN*q0yFoRREGiL7#{JS)KOZDX+dI3bjSI@<^ZWU?U|6IKv+F~e{d<cwWK%|
zj)Zg#g!puaE_1k|ibP(NH@;|w-`3P$W*%5REVNg8(QXPfR2GNo^n8OzIQh;Ncm5me
zSF!fu(jw^#wm@b64K-^tZtk)46R}GyW_7A(-c&zd&Rpe-7ojs=9+Vq)`C48^Y|S}c
zh+U&yUFrEU4E_$WpVL!?%d}lM#BDeg{EvwV-_g_+6JpFnXb$lCFZF3~JX~j)ygZux
z?v+S|Jq2_M>ysf*(7ModjDs=gcl8VBkY&QmpiAI}bZ{9kyXt{%;}PK9Nik$*<a*s=
zGM1TK_=w_vjIJ<34mxatgUtS56|Xcss*i}!iyS2&AEf$b_f1rws_9YT@JtM?>>DkL
z!s0~hALwI~PcPgt@L~>5UNFDLD+V<LmtqUW1^4p97bX9~j}=!#kdw6-y%x&`a|tWE
zq1N1!qLO79N9Luqe+L&vi`M2m6t|;b4k7gnAHN<D#ihs0-tjM7f!>Cbs(E+xB^w-G
z7$t2JPa2nNtPG>s7S+uD%R)SyN@~v~Rt>C!8V@T%It$w)TVckG7{kG0CXr`AK`Sz8
z@AP0rzaEo%o#<z{YmiMB0Yn`9x;BYKkB&LK(uO7pf}(c4qs_beZ$A9Lqo3>~;t^DK
zZh#66=nyg<p69=X64yOn!mM^n)gte^C@mYV6L*IbK{oSc{-8R!`w4_-gRI<Am=Jrx
z$Oy?oT(fev(P-YdYKlkCdi?2>7&3X4kP^);5y<j_)lC)#sRn~VIO;eRE<dWs^C(tv
zYeGvJcF9xb3Jd~KM3cDM?=LXot6(spW(4mDz1XT8Jcy8yND$V}@;(zQtg*-A&kY9L
zXsZ@Vufi3`X&cCbmz@1z6=BVR!wcnLUmPQ(E&BmO01Ik%A+TO|=67-HZ%92w3hu0u
zwNZ&NSa&uuV@rVvF}A6fh-!C8a!E*r%xyE0<N89mu_NRm7RjoRIb@gvq*{*oEU9`<
z>CwB$nnhEJEWcn3lB8<nC2D|+WZQwe9wm`w3^n#$3=v+TX07&H4&g-NY7|adV`a4b
znQ9V(42Sb&^4;rX)CKgb2M&o$;>bX?1$ZayryzX^Ik~ZgN)4G`#9bhU2dzrV*LS{$
z?VH<`_Vwiz{}q?R`oVPoEt%Jb)H?@h5}#(TmWB;43>eKtOyEm8;FgXshrh6M3r>cn
zc_|@S5ln&F8pB6v*tN=l8`}wH1LVN<UQ<NQ!F=KD7K|K$SvyPcBATgi5d0opM_C`;
zjN(iE;l>&TtqB!I5?|1ZFX(*(nn+sx-=1Qa+hyT?3d7lZMl+>cgkW0_tB3GK#bQQU
z7?CRsg2|40>3iyqix9Z4=+V_$e57Og$vM&==`bQ;rX*)0n#6+xIVF%p<B_XiOkyMO
zO`YcvYurQBrpEE_{E9pM=gNY4=-x@piYqFpm!x$Gnj8ZmkwmjzY(C^n>`E{6r4ff{
z@sF{7DnddnWpUZ%fmUFq1L3(Y!+7+m@}Oaf2{Y~8N3fxgnNyyNkRBUI)1di678C0;
zh)M{FN)d?WA{^+-!gAvbZ3hKi-~^{0=^VukVoC`YHqfjK$L5xW3R#+hBVdq@i|8A7
zCI{nxm0bvoc44~Fi-r^Qo>}>GKrw6M>dX<~^j_oWo9Ng^_Z~sALYIki!wkUy*D}5Y
zMNPqqHNc)U%|9|;d6IlNUR6l$D`->{DDoZ4m!g}JZt-F><XcZx%Kk!$1$ms;wER~G
z&(6-aZ2#<#FFm7wd6h{TIgjWMcns=(9D&M79J@TX){%H(4-94+*_xGc#N;aqW(k~<
za)CHU0sExejb>yIXXQsjHif@s!9Gb=c6>l88~E5Xopp8%@70n1QqMrt)@YAc^1LIg
zW>v)<(sc1$_0r_+n<9<~n9QgiK6}d7sZWEvydCuamcpIzX=-h`i)VpU^gvqG0>Jb;
zs$+Q#zcBc^ZklP>GquVmX?J3wTkAgya5Y7y7;*dO-t4r$BTcPB({|m^_<{RJuZGBM
z(E36b=gQ^g9#m%fiGUbHJHm8nXK@#eGk72_7LCa1u^#>5#j$Nm*>eI_%Q2Fq_MMt%
zY-@>ob;dWcdRpXnsnm-x8~;N;0WLI?sx}O&6Dc>eIlJhruf_KG*Vc#@j%l6g>42o+
zP|_G8G7ON<8ra}KqrX*P0%Mn+NH}vsY1U$j=`ooPzS5cUWA(7GKCjaiJ~<-`@@Bh(
zHY9yFgpqpC#Jw18q3CI#5iURWW4jUs4kz>APSQ6Rg%e(~5}hjzR*m2=ZSmFTRJRqz
z9HCQWN>y_**@<0cA`>k2()~6XkrfQ>9IbLivulp(WH-Y2L(hA#L-dqb+bM{9(o`o0
zvoTp96i5#!4`=1+{h^oC2w1H-IBT;+z3tZ;`XP%wBgrE3>P>s^yrArX(OYW^T>?JI
zVHzE%LoawyY4(77cDAY|=89fCqSliCvO9=8k`X+pU3rklgvFIR(rlLQ_HRFs{<mz`
zukC75iZDESvp%D-mS3uK!*6`=TnIbXLK1px9tPh=71UtNYNMT32WR2;3djcojEVl0
zP}2DD(``;zHU%q0pIIY}&?&ZO+5SPmE(w><3ft|OFSM~+w)gPCcWnPt%r0WgZQ(BS
z3rZUXMBp<P-+uwhH!fLX55U!+P$?8DSXX_GnbsLKbBj_X_2F{K`@|7N6Yq+V*yg=0
zeItau&mEK_%gNwLwSTFhK+rZNc0-yBdFbH3GkyEW3b%T?AS=v!fi*+2-xzzS%z!+f
zYxm0RK_vBJuKF3mhA2ruenZ(CtmjHDtExeeAVY=R!+fgODu}Aab^<PEvUpMEpaOis
zUsD$|$&K&@qL#BOHup&@Qiv;}qN79MQ%ALM3vgwFW$maWP>noO^|MHIXqlZzF5lH^
zR_`gLlOoUfTU)k%8k%h40Njv13%Is99hQ0Q|D|n1;U7a2o5jahniAUX7m(slqLoCe
zw;8gVX?oM5^GDI8Rws(@C&+$ldOwPQjGwLK316v}-~}K-R-zD39TiS0)TBR70ghPJ
z8!7GRY$#@w9dR=gHGP1Me{5N$kyZ{R!ZL*8>JK+b6)F;dm9iPpgI{Aqs6mg=k*D8g
z(yOE6I-IYNv1{&Zu0kZl{{#VxL$&rgTW)fNf#{!tbTtqwno?WSCYG;eiV(5{)A6Y#
zEPbx9_s%_(hVPJ~ed_=QW~*I9`g7fGJU3Emrt5&wT!aLH^wKumVsVH$di<4r0IS_7
zyXpaHN4Noz716z!Zjz)2R&CySp)ygJGY4Y0Mxu?)738?B?mN5{MQ_sl`=f_5S|<_?
zSVFORBX%<MfmcDknPzesR8xT7=1$qPLQS@yKp<bNJWd6jwRt6hxh(J9Om=>HZ#^Gf
zEd;PxZ8g3W&kZq6da~ckW8^XeT%up8^kCZJlnWJE`iYQ|3XYT}<N{{QH>R@%+*e4Y
z6V2o)vt)ZTS|G#!h^-SW`YGuHJghsN1KY%wH%u;t7ueWP7!*d9_$UHCc|-W#-eE-(
zp{BBisy$_OOU(|^a!y3h>+dXVAMTQ{Ij%>v9L9_4-?Ngl)4x*g6szKEt!b_v6iuXO
zYnajq726El7%!5PZv7{%`-)MZ;`H?kD?f+6a=?sYB(y0`zT<07gFHhB(&WkeOVlSh
zYz?~}8ZfC$;Y7s8fh!ctPe8Q|Rzk_<!6<M=^}*5?@aL4nDqqflV$~~AAU*caAi*Cl
z4bMn#B9DkD9)LwpZ!}|l=xmOfk~XBM5l3SKC7t`y8-zO4uYn1G!M4)6ktw42BIbCO
z?hTx%4ptAO3E^NbFVSJp?&5QJ4N>P#NnxB2x4i9#`3=^9F8l9}T;Jdslxd#W-yWtO
z5Er<TdX(p9mG1TbXsvJpheC9fqN6Qv;|g~F@y#j(VIlo7TFZq@H>~vdW1Img)s#d@
zFXCQL%zIE)#ZN^GuSPx!mmOa?hwiof@7M*&Xo;k-Ahs!)u98OOFPJ5=$+8;#rH=c7
zqSZ%w1_oWF*#~r>6i$M`f1%3_`T==A4YAUY&vmZz!mu>V{?(&%YkcsLIhcrP({{`p
z7#xRbU&7J$UBeqk1BRFaN=+Lx4nk+XH<&(aos5(k@F{U6>dG9ZUS=fqqT1Ns>)@L0
z70OVNso?27#A)<TPf$E7sZ1Lgjvu-Xp@r9Qu?T&kswS=xiakgW42g7u8xRiH^H`4$
zWp+TLzw6)zmwV~_`r}`PNgA3~Q#ke`X{#4+haKMJ)6#FRAXr?eC}KonTX2=7-L@?X
zTAK^vxF(^B7k2@IbwRP`Gm0(PxPp;+z$%LerW%x>9fXuVAAzyuXGC=?nt}k-*T|&7
z)>})!n@3xYn{W--Y-O6ztUHUm6#9Yh9JsRm6enkWc`!N-HV+iD>taD!FVu}9Zq&`W
z4kt9LYc5(rzmy@tEwPyvr*~zD9t}Y}>rCKSx$@jRX*Vo=qZxh~t9uc33sHK)Xc=D8
zHuR_xwHrV`-lRF%%)*DAIv*kK+gy5^t>v<(zO^g2+QhP=L3Tm1=bqZG>LW8DFdf_u
zFvxFXjRm=s?yvw`*Ox+_B)R<sRzJ&1iCyltAu8)C@W&S{u09lAjtFZPC%%j}P1*Ex
zXrXVzn;ltQU2cn7JNBMuQr2WwhkFT3Cq9WIGuC=3V5k-%113=b0su3SPjSyle?Hop
zR1bUfl^*acwEn|~qs3^X;`U+_inSuz!o{@(!Hy#JQIRNwO_C=13q2M{iLDP$IlE&e
z5yAe#0-BpsH3TBKjM+P)Is1a1JFQwQ?yS=!{U}zTf*u{{NZ=gw4@(sER(IIoYJ=%+
zcsCHzNik@SQDP9Aj;SX$UCY;*&kj95t|@QLOQn`)e^C@<;;U_FHEYmaY$&(u!xsOx
zMykV9?o9KSfn1NyYqbszhkk_eQ1`6h?HQZ2GM~b~yBnP%`;Hd(ikY6|#l+#3nK%$j
z2pvZ~7FtK)E*pZ{gM0~7Ar-S8YVxh6NljbnpO34IX*`8FHJ-1I>k7%4`Tb1>yl=11
zqN>CLsXJI#D4Ze~QwN%v;lh&Xt$6iUP*bCu=*`QbCp%@NG|Ml0!5>6dK`~$qRNBLR
z1=7vDm6{wgqQ8;AZ3huEAe2ZHSaNhfjk#QDl{tpBcOAUUzRlOdBXytSbz$m!K<Eqj
z;)}_Qp{bhz*wtxsB!Ur49vuVGaem|{31sGseRg3_kujytxnf(HbDFrRcA}X0!Qc8s
zmfl#_*A1zM9|GQ(Hjba4P5MzcrZlx7bdW{4j>HbS<aal5JLdawn^42meqzPWmM8%-
zvZk|ORxNkiuc=OYp<%!wswDXm*Hz&18|CHt&ik8x#bxVu#^y}8=&aJB5U3#Q6R`c#
z@TN0jdjfLt{^kBH!J$E%9IgG-n9zk_;&6A{d9R#f?@eF@=4?qCWUkDs>k~MvL+nrh
zP0v5&@RRl@Hn`nBSfE8lx^!;H8aBXAfNHTs5@^jR`mUO0fVnY*t?WX?@TD~DJf{v8
z{j63`GGQ7RjAHYSOv)26>1NG4=*;o!=(dh%aInFLuMYd=SA!WF_WPXe2Tk=CBx!k2
zB^ym~H<xq+Yc7|uncc_=&c`Ef>kd&XR9;v^AF0g3rv-2|GO7@mP|X<qIq(l>v@Pd-
zq~N@wjGExkvLs$~JB+&Xgm#IWRn=xdU7?TJbtE};jSl!V2yCOZqc(g>TQj53Rfr-Y
zC60$278B*b6!$QJ6vS3PKrAJnb%rwl-|jyI!Ep>_r6AUpdH%0Y&dKd{Ze9=PN5VL^
z6$+ID<q6x6sa@#{={<yU;AfO+OaQDYpZt;RQIE7KG2Q^(>dZo-{l@e=SGD12k^+zq
zfXza2Pw2`_LzX9@1dNbb2Cp`n*6K0QLatmr_zaGn&+tN1m?3{Oa-A6#gq<+0_fq4{
zn@!B{xngu&VD+UXK*_liTQWA`@dHl|R3?@cL87OZ%^i&=ipA|~_t4b-C$;FZqC4JD
zm6<o@o89V3+&g|Kn24f!Wx=b-JNg+-q(Pc<iV?Y(96%>(-T#bH)*AE(GZsg|5!5CJ
zK=QP<Qen8?14C|6*PLMos236r_iywrc<}fk`<luvn<3!Bf9tOb%vKbb_W+cU<a@u|
z=BgPtOb3?+QJsDk)ud*KfMHIXHsWhgib^4Pz{Rr8{)Gn~)5iTWCG2hgHHsTQE9#0U
zYDvTmBMqV8Wb%rgluY{?-6?XvWT%)v^%qUBGb7?Y%KhfkHn7yo1RwEog?v<KJpi2V
zd*Y8!bOI{wz%U7jL^c}5ce;4~=Dr9Z0WrL6RX$}icAALX4?b#)^Cd9GHUbNn57aDs
zi*k+2(y`&AxX$LF$yGprKHSYn57+@Uc(t(TH0+$X>qQQxZj{B4!nm$EZzD{3C>$#C
zG5BA|vtA!4;nF?G$grM1VO)4_pcg#uE3RX>?+)MJQ!s8`z+Dair|&hlQz?Mf=kT=q
z^V_QEv~~R;Y<QHj5Ab1%G!4$mq$Q{6Cufc!Bx-)|3umKac<4jM*QgX3C9X+i4vlrQ
zeLtX}C>&{BR*rNvd~y8!3%H+N(j&Ql*?03TP>$7rR=eNGzNVVaY7FtO*}bY?!sA^h
ztsCb3m)vgM7(YaI2v^u-o}?b5z13k0BVicZg-#3}WXX3mymPO9wE%4TWEC+yGr0`P
zSL~huku+K(gGo}32&VU2T3zugnDZ7+$1(==a#R+2OZ=UE6C0k{swl^i`N;X8b@_E?
zc9sE>pL60LJdl@mYO#DV`u;taD?yQe7j;TF6>11?VXt$rL*&)lWh%wf?ckC|mt=_@
z!l`i~ErA+9eaTzV64Gx2tf67}j=_}DI`jK=_aC*+Nw%f$eu`)*+`y{D^YHOZ4n$UA
z9r~uEPxP>sWG-xzLuz$t4P6C!@cXYbCyh@TGbGG(4fVbY4q1dXXnrQxy%Z!#wtD0*
zyA@JQih~;4wsis2IJzk#Pq>c}1Q}Ci`#=j??OURZKJ=Hj8XObB7vahwSxVPh3{Uf>
zSzAdbYBEA42Ic~+bo>bI*4W*JA=syu-TH2kAB0i2n2UWF{)s5(l(e)IUXXA?Im@l~
zjVZH@F;2$~c(Dw;Ufd-1dqwW9ZU9$i7}-b~;WQnY=jbz3d(T)j%>~tE#WiO|ImR4`
zTO%NwFxvIz${KA?>X1|hMD$>a^-=uw9FNmxdMw$HCiVZ;0+1+nCRWcX63K@^Xb>{A
zu=^s~gk%cj4LONJ>SE|*ObuUF|4b^y&|!~v9K#n|g0;`r*ywH6qp!*kDtOy53Q0Ww
zcDrXL%t%V0t!N&Fio2lDU2^M2_}v6|=)j|Mkqj6=@ZIkxez^y_j(;H0n<?^Fa&g=Z
zzX8}lD3<$hp9SCFDw#ps6}B9t!_PqrsEAe91g(8*XS1_~Lwkq+(l7wV*E~EbNsm!$
zQXrkEYDG)zkk||f7s(=y%Fdnys_Ypm5eW7P)_ZECZDWHfIi|pTVv&P0TTG<DuKLTL
ziUa~93IaW{y3CU&{tqs5T8@D-8ECUM%<bUMBAn`<_-H0dEA+g{S0m{Galml-A7?lv
zZi9RceHh@m06mI1oOKqwF!i%N`!9Ck=oePAfna$%eF3D)!86(|WjWH7Ina8^#5NmO
zC{idwF48I;i(!y}6^|{vfj)aE3{wZ-IyFg!sm~xSgix-oJ;Tg!zYH|vR<HuoTu4SA
zICEgkQeHDj%Tqan?_@FKWPNrY9%<P98oej_fqEo+lYpiW9Iqq;U^g2@@9r9Atxp2-
z(KR%*$Cg5LEISa<L3o!Xrg)r)Ll<`ShBpu&<KYUA6pnm&eXeX00~#_`4U_XsM?UfL
zg~p3}l{VawQg3faU!Zjxb~s)*{NQ;c*x$+?osVqqO*G9lldUKnGDPkt$vJNMofH1y
zwZU0XC*+6*r#4yu%y}863&mdis3pZXp!9W`02=5v6ebUHzZZL8lE_f>NY%pBsK(|n
z@K8=>!9!Sud)v$;9SPu8dExo2i-{`AH;<2T;1X}(TFD^fcQ`V1luD3da&yeTk}-<D
zflv7KE3YzPabEVB4r$BjE$^<nBqH)5MI{2bVO|k{e6$QE-#Aih0BP8+oH|7Im3PN2
z6Kz31=A+_D3ZXR6xdkk>x$W-}>Q?tFYD@;08JjXNdL@Fw3^+umd1LkLnE*&r7nOtm
zko21&EResy1Cl7ll)mrkrVx{Vz%3a~y$95raaXncdGIqU)^$Nb$TOWK^weVa;#C+<
zL<eIP4p)j8Tc23F1vJ30k_X2zRli66z+i>rM>fZ`VPnMayK@>tap5_av=HpAGaKc4
z8;RN*yeXM2##316vB8b=gbzXQG1hNZQ3fkbF&Zy*+WqA#Z(>IoCW<L3hL^EZrwTEJ
z3YYHYLCp~)r<kMoFX1m_6uK0X#c~6AFx4%9n`W3b)-;2>H)KleHIs2$!IEKreZ?bX
z(HM|{F^W5S*P`o#M!j2mU{!zXR-64}GT^aT!U(s;308(93=GD`v8XFr+c@|aKQE(&
z{1@U?;Gp-O`6_j|%yW(rTs4!HeI>aY>^zJq{Omp)uR~|@_=7m&SVk>^;nR!cjsH?C
z^jvHSR5vm=4gnG1^%!a`?)DrQi}u-LGaz^VSJ5BFwm#g{uDCjA%?h5ScrdyYxr%5J
z-1Qic5s{Rx;@FGi`<V?0;{l$j_xNf$^3O*!JlWC{fPwH^tm6tNC9tXmr}~c64fpQ_
zcg`$o+0+JAQ<HUb@a|dnQ}_td>Q~B9V0OF-x9TiBp;Jz(_1?GFN!`JZQ@|mQ$LigS
zrYO;ZINZT3C;o^|k)ve+vWEpv73nh-Q{#o{_QwXw#(>a$rU>bvOE(7eO&5V$>^sg^
zqq$RIY1Gm4k1y24Z#OZCL0VZn!0x0FQEJ@=IlLMQ9z>JexYyJJ;yyy8px8yCF|_YA
zNok5}-beA^w8DyVTu?)R*B(e&1yc{W-n6<5-|(J^4?#I<@=Vxw{_2ALJ7;H6I0M8G
zQeBy%yo0T_{RepyrC(G~XWf<qEjqD`_|jR(<djn_l}REf2U*5I3!!gQrhVFULkgIb
z@GkUvG>*D?M2xA?{Ct`LKIFu_z-41XLWWn{^{)DQeOM;BFc-c4CY>rE?uz<!G{dNG
z?na3ydPjq-o0*;IscJJ3<r|di{MbVmxJ;B#e;Nv1h1CdRthO~ciY|l+pxFw$t}kme
zO~QZ&K=@a@4=oKDF^fQb+p;F;6?vTI&%0Dw84Qy!!ag1Gba=52enE~@xoJC8c>;yi
zvxy;{UP0}`<2bd-t6PS!bC9uqwWsXbiZutc*vPz(knwlupCpH;KQIrJsoW69X4~M@
zs(9a#of`LqMG;ubxXJkitYMaubNbK=aZeM9Q90|oBq-j3BCJ;>G%q?{yoIWQY$TDT
ziH4Yow%zTU)ad_dwI2|&x?IHIBxU7H1VRY*q<fKAP7^BUL*4HL{CMWXe$S)A;l+73
z#(+|m=EH+<LJEV-V}NS?JK5{pzXohhsxB|foU#pvo;HKUHDOT1lW=2uxq5zMY>!bg
zHhZx7eV!0`(HP&Mg9t|vlTd@%aiBHbb=OUHYPm+)qMRG8*1!2E-gXmxdK<yu2{fKQ
z!5qQ-Qz1>Er$D&S5%!}lLOK=r$U4Q<pUkx**MIINT@z+Skme~|SImRIY@`d<V)0!i
z_(5}$ZDoT|7|^TA9>L~?DQyh1^!j}a3&~!fH_gAgUFYV&F^nd7<iW>z?24XjI-1N}
zFXH|0d{HT|G7EbR{J9y9zJRzpnB5z5Zv-6;&cw}pLA9_S6UM}Cc4rWi>Ts?e3@wEV
zEH-kuqJN1J7tfp$!F*O&zM0J_!PrZG*JR+EMd({djsx-1f~iY#@poHZyR6Jy`7CeK
zeX*FD+<Izmt-DvuTI0diUe=3%D4&I8n&>s4&$zx|Sxq0ZyE}<eKFqHacYd|tcC&4E
z&pa5Dmc*V6Ifa1NZyJB~h~z9W`@F9d7_bd;*GilFLK>8Hjts-(85lMP#ACvhgNnuE
zf9!8}CdHDa%9>!ym_xNt>6bSe7B#*R?vYUd!F@+M{BXCz*9(w{gV9u>DR$g&C;q5S
zJbjM#?hz3|R(JRTzJVuHnc7(N8jsN$7fEQ_w8b+3#qA^YUdb^m_JcLziylW6t}04g
zBIiS-V2L$7@Jp&w@-=@~C%Vt>VJSD}eK__NYsV;2y<cl{D(^P&!90_iZcA5$Y&!qg
zMX{iImz6cOT1XH<&nAG@AROBhPn+(w7(6OyjlRGc%~kA62OLSJxhw7|3n>LV{L{k)
zV;PuGy?6+a_Y1ls3MVUm!WBM|CJIelx+QWOV@6}w9n*Pa+$N|VgOg?Gt+0wh>pdbX
zi|?1!)$1E~l8v&cjcSCPL)Tea6v3|<THEi`^(06$6V{p8kP6~k{Ns#y+q3%W*cz%(
z#pf3Y;WDTT)O2MHE4=q9<rad@*O(=%;ok=cG>-l_;g1Yb*&znhLZalrX?qja28uwo
zzc{EuP05b&s$?w#bkK(TM~ORT=^nyCT>lJ4D<ZFs&|oebGg#3FeI7FKV+k|)87=PF
zbfJMWh9AaeWM<wQ{sl#6hZ!&qZf57Vj9&h1UkSx31I|bo`-i->vN29csl>X%9l~Nv
z8b3R`vrgBSJ4S@;JLdT_x|fWzH<jreV1mT`FlM%Mq^Kh8F@m-tEYsr1)e+h25F*86
zc1tUWY5nUBp4mUTd_OXV><FD1s@wb-ddYY|%kW~;+*8%|aGs-Nh%v+-+b@=S2So%s
z6Ks`r1E&wc5)~rK8Oi{0zL1I45r5CvyaMHg;T;RTV`-f}ha4owxe7(Z#Tj_vUD8=S
zH0calBx!iyR}9J=?XN7MY;fTjQ`5<oi#I!bo8%4%Fk`^?3m>uDb$!9vAgth%s$Q27
zV-isQE(tjlH|QsCS{)FowM*HRMkKA>VzLA*POgeq8;HNxbs%$t)rL{vaNcGvHBV@r
zR#iRK)w<xHcAhyh`ykVfqcA4OK_@2hVBBS}o*0OI_1KEVgcfFM97NR^>q|?H=S+O1
zsRRGcN))Qu0rn({#2BvU-95inP4r@=00S5eV*ppypAS?v+Jyhe0!1uQu7oP-_eh25
zuIl*O>cVId0ckuuYa9gYTE=<7q`U*vSi%ljX|59XoF9n4esuG2?r{#PQY#LBMEU?a
zFw$9tmwjfTqrdm!g^11M^lHcbo}V;UMyA@#*3k_(N{mt4De4~%cN+Emg_W#pYL*^^
z7E+kF0a<T1=3up$I@llWqa7fw46luny{7A*A~job3ZTzfu#qJg{QakTb>d<-!(f4H
z57V7wKmOo=3+7mQ;K_IS;^)CIS-=gJ-Q{|=cZ<2*r1c&K-HP<51wd<p(7}cko-968
zaLh&Q@>ZpWG-M8|G)+)J=y4y<T=bhX)eUzYGWo03&&s|UY{vxnCYfoAqa?>#6)F<V
zxdpdg<ox>L#NP}V>%9S*{mCTgN3n~l=VdEnO+NIw8+%9jLGKSpuE0?23ukP>m~9kt
z?15zKAb8dNIry0-bn#%a&fw`41lTs~Kt0_a4QduL-F4>5x+dhRisyxvLab(i4cMrO
z8=s`Eyn<%^8f8_uai?F)SFQ+^3+sXv!Piksa;-0U-oO}^#OObJKiVQOrsbh*`O0>p
zb;f=$7Af5c+%z^vv#geRrt5px%e4;S_4jHzg$l3m(#;VtbxI=BI4GU8N7IS0Q3gGN
z^p2;G|NfJZfn>1Po|Qf(bEtF4?{;AXWJq&+rh*7t#XDMoL|^1g$ottPVm1uf@x5Y<
zOa~d@+ydp4T!NweLZN=d#>p{s^|r2xRR>V%B~%6rEQm>m#>IpYYH>Bt1%2h~aw$?t
zYM<RzhXEHuOM`dh*h;2jn+3(A@=py1Wyu_=S`uS?vT{^Nl^J_F!6^q5+>vqHs#R8v
z;X;n}>!U)lR-AOe%8W*fJrsrG%sdgh6Z+HAHPI;%&*5BUXlG2xpi%<SpL}*s4=8k8
zUt@DZiL-w^B69}DCF(Q2+cF*v_^l7lwyd~hWHn?KNw*{rfUzhcXEn`bUwe#0h_yVB
ziH8(Y?57gF1$Z~4JX81Lt_-C$ShU<6ICD3e-X>rKEe<I2=tJ;(^g@GOn*_20_h-1Q
zp$TSvioC#p2nLNU2;YmS6=_CES&V)GwWL}z+-Bnc3A8~trknQ)P0dk_@T@Nc|1hcE
z2<c94_|=3#Q#+0}WRhF(&8gsDMZ)xmogK!E8<0?H3A`h8j}yy0Y?&VsI>uiM3%}db
zC8rv$31L?+P#Pce!tFKP_}(-v4lFrIaBMFGF^UINRS+gQw2eU4U4TS({%@Bp$w^5d
z0d2#K@OpOX**%HXqCe*fE7B7}*_^a?`X6^h59TyNecjsu)m;mHg#E__ejOC3K>JBP
zwCJaXPX<@zdtWIC-C~<}3Q=7mwt%7=?6k?E2h=~%<7X?Ha?#V*2}!8K2B7MQyAiDV
zZ^ANs0^U?VMu%Fu^QnelruaDEhdY2VAh^oupdD3zIVw@MGoGv$au`6*c{UxC1$M#f
z+3=lmfIR-3*J0$7&)9EhIjYmkW_e0In=aNQDi0oq3i)bwer5A}8@y+KDQXBcrL)Ga
zuemV>M32cOZf$Y{-xHZw(Bp>NOxNR*K1fstQrMn;s6P(V+H_q%sL<sH(SgtZnk=b*
zhqWR~Rc4&ZFdcD8w-lyk5b0M}_#7&H6kd+=h6rm(;L3{j>4|wy>tc+CsPITam99x%
zMApU|oin`${>jBsvye>i=>MHAOYXEYjN|?3d4;%&2{SK4;#Mv!qb2=Hn)HhWxV5{P
z7~2^m&;to`P1G<&tW)(&lOQ0xEv9LbcQRj*DuyC4gsGg@<qV)FgYvubXYrG9B;W5i
zY3Jq{YN{65U?6Z)H3^G|Sikh_GOAOr{O4EsZv*VY2Cm(QBx2bFtrO7(h-XHs9G!X1
zVNY!K1W_Glp@0xmBzxY5@>7aJ^pnaEIx%za{mKv&pmdXH^9iH9ri{?OaciL%q6{s@
z1-D${Bz~rxRU!!4Y07iDJVLnJ1W4$Lus`YYE|lf*un*}jVqP_XNidj)<YK(v5N=hM
z&isC08oh{bD#C{;;xpD^Gk_N;(5`RaGLSq$@4Ms<S4&d}v^g#}%@CJGkIJq$yTF}&
z4yIU8)Bhp+^z7)WDm??K-u*5W<mH3U5^5`NfPOH&|Kz1*g5mqyQw#(;8G{$WY@i1F
zB5nk8kfNM1`^h@zU8`t<)RfHWEiVV{TA8u41Wsp^0}rno9w09q1J$lc+%l6ZsuCd6
z<Ob*=EFGf_Eufg~4Tbg}!R80yocZz0aXYekm{cLZi}M*UwT=eNthik4_eu^&rNU8a
zniG-Pn26&clJM!S4klTfEdson|7kpLL3|5lOVDqHl!kc-+J&5=gZGbN(e4b<_QnN$
zk`<O5qIhl9*#^8rpU$y&4O;c|v8N(U+Ew~5FF|ogk)FJ~Jb(O>2Fe_<Z+i^*M0PoU
zE+PuE+5rDWtpC2u)}mS?*B=*}dGn~ndT*3KK3&<az3`9E{Bzq%`|sa3J>V!o7&vAh
zn5=;92M9db#;;i04!fkRNutNA`?BHu9B$wy3z}Kv#DhfIrDU2ddcINo@e$*8nCfe|
zQ*=Tgk8Hv>Ip+qUkn|#p+1M@GB$AwBE3`%EJoXu>R?po*#0{;$aGrr=lS_yupL^l@
zn!d&0QC~?E6G2E6{=}m%IPz3+5sj_aZSIB0Msc_s8{yev0iS9QL%gx4%|e`R(ssXx
z9pjiI^ZL6OS6VX~*d9XEp=ETbr-WNUUuiSUhCR>9ZMSL{arAf56Y$$a;4`&5@LBC}
z;AiK68kl(@*0m%q%2YxS=D8o_@6aMF50~Z4X7XEPwDwvsYK*}sR1!#c`7Fj(=(mK<
z*SL(v{s^w<zM@QViB>&cHY|J>zD(I93P_0BEY>*Gw*7*_JpcY~`dPE{g4e(VX=eNx
z#6H{k_+5_hSty6rs><3SILQL&t4|fXThA6tl-{{7vmbk3n1WqM6J&SSkivV9WYtx_
zcbq2LY|yq016sX5DZZxU3yR;G{*wC)+m}l=^=BvUl3YG$nmn%5h`qxY13!3~{BgMq
zc>aC~1Q3Qc^ah_Hxxyr7Dq0dDP`q~^>DzB43(l+Y%*j3|27pU}G&zxIh!U_bD>AF|
z2NAcos{Pr{5yf+l?!=2xbFKl?kCV3xP2=EI2upc=!43x{2_9x+zoPYabmqptwN>U8
zaicF(J6hU6{-MWiVV%iODu9th|KW2Iy<bi!@B~Y4TT!`p1LNl%#WEAF2+nq`#*d8w
zgxCt<v=+B*NBP9Kxr1W7LEHLp#?sq97e?|LX`whBtS_d`frSBf$uAB`QlmnH6!3ds
zR{1mi`Zm@UulaPUJ>o}_?7=G4zk}V>Un>aB88PNb#89<_$ps#_K*w+L#w(G17=b=B
zzuoU<KoYSPSj)IUJ6$8Gpxoe(>zciyeHA>vSc^diMxCi<bUk_JL)67LxxGj*M*i)d
zgHVwhy59B~Sz@`RlD~#WSs8hgcQd2%$C4Sgy=P?=an<H{8`$`?p7rgiT0w!oq<r##
zPN(;(85&~3Ghh<9@8st{C+g0D!(Ml*oCVo(Yt-G|F<sBS4u28tcD=;2J9d~}xD%3F
z(6++wXF2zj#E`62A2$-J{vvIwE=#_VPdXNE%M2{Urd8kq%n@HzLY|JmwM_EV)igSf
z|Bjr$R^*UTH)AD^kc6|@TU;-sjFh(&CnfhDr#T=Nx5mGgF0|_yh3TqX!Breckw?nz
zZ_qCE9<Rh$ly@ihTEogR;OR5ycMtni3knajDymr^n{SF~UllgavP5?DS6De>o_D~4
zasD2IrxD?=V3$Rp&~VsNcAow!wnWDWLdd+=|B2K&fKx-b&#xhy)mP#X8y!|v2&Wz@
zdG@wLT>&2nw0$8mfaI$G_FH`Wj&4z@QN`TEdwKp?)!s9$Cn=+M+%G%>K&@gKa76(@
zC8V%@4*|<q&>w$>R1d*i@j8%6BvF>=6fq;1xT^(Y%9`VBLXszU;#<opq6Xh>)m|c;
zmLU*oA=Cf^oEYBA#f(A@1}}BMYyxe>YQN#oilAkk>^Y0DPs5f_-6|c!pjKV?bDD`h
zvvKxr7lJFJK$a2*T_(UDiPC?C+%Qf`b6`FOUbBfT&qQ!<<f{uMMB2=?x6<_P9qYKy
z-f*flOu%w17EWY6vvp;m`#87Tc<f_y91-&60IK+-w>(?Q$#~CrtKVFw-&o#$8qYNK
z5cBk_eJ#Q?0qB4|E~2S9YSiL++Joc&`knjHaXoPei(bE0wxO3@WXyVTzNa6HW}{Bd
zYrCzSqVPiCc651&DmufW9;I&XEsC-Dn&TdR#mU8rEXaTA4QuzTYvNCOfc_wildD5>
z7^+m`Uz|7qoK|rW-!=}~T05A$CpgKrOJK+1e(G6FO{A`04*#<W0RtVs@_tw^2&sT+
z$@q|9I(HP}9r}ww6YtrF`FjYJnwQ?!+9^GmwURQ`PamVt-okMW1~w{c{f6^CEb)2E
z^yZb4a3_i)7FUwBRWd|7g~u4^1#2I+48fK97y7#dv?gSI`2}dkL073|MOxU$@^$Fy
zTmvGF?QadeVD^FU^(|5l;^Df)qhg-#$|mD|D@XiB_Gg}Km7?D{T8I`pn-)m$cL9$5
zn^H!FI}d6VMrS|Q$q-Dr_+=QoZ3HSt{E}1^F3JAzBBOz(FU>XbEXo#GF?jkSBEcW!
z!z2r_Dfd?ir^vS@NB@#JG{n1%BBd;F82aZ+TG$?Pw_wk2u~<Es?~|`Bhqo{;p;&W#
zS`~{l!olakTkwo1$}GfcCR{`d>nP9^f`Tt0zvN$i@O}X@b9|YPGw|@lB!^lYU?8jF
zampd+Ec3tRp~%QJ^ShsPN`{AIj<$L8xE7@59-}7@OBM`jtdnQx{Z{zJ8|w7auTn!Z
zr7B`f!A~)TbF&Pres7)Xc`41LF%qr5Kf|`PH5iGT(Gwp3qw4Jgxw$`t`zcr^gULqx
zK{s)(m&d{RGcw4&+VA;Q!h&+AsRm=+^9rNL;AZa$_PQ=+$&)TINqXVa*QF!0yH>my
z%ZQBUvZi0Zkcyn$8DX3k_=_d~;Lrwe-rSGJ8dlFjn~h~nVrGR4+kG-}3q){)gvfyq
zh46#*aqh)237x-1PP~g$P<A>5D$z=h&XPit$BL6U&(~rbc~I08?MH|iZMW8FSY*&n
zCpt_Af~9-koAXf`d0;hD?|LA;Hd^B{z;}4so5c&W<b@Fa$Z7dGG9)2;vx83Ik+VV&
zwqaow0I;uLiR*X4N8SOv6Cd>v|BgVAdmAo`9vgXiiTcAt0^QNAvP!HZjwB&7jpBsZ
znI{9BwwmUgzN=h${?n2;Jf?_F6;j4BJPP{nL?ep_Bh6}0V%c*T&%yhl$yf)DNJ^jV
zcI+w^#xnSrVrqBwQwXy-t(v!f58!%M`GLGcn$rK}?&I4rggfjp@P|*=yiZ^2hfg_b
z4hibE*U8T-Z~VLvO<-TPGmjs%Kxl~<xil^ch6zn<#IY&#Afz@C0X8nC1&R4;b3O&~
z8p$yeX`zanN)Ai}8GQ6Ak<y@!q6cSc8i{@~1G=1MDe9Rw<T-Y82}0EylEq&-b*o&x
zpr(+y=a+&6rZ2P2)EsNty)l>2Bc_X-!O*%DK8+i=GYR_NU}+*Dd_h#X92_2E;<y66
zOCFvn7=yUO;vBzLqPaD7nRn-=m-a|P{ZncvL<E7H58mD|fv~(7dEsRB<%^yb5m?Sh
zv9O5hKcOPX@M~cpQivw_&c7=#T$0>gLMg^m)iX5h$gv_ihIRon|J{~w@2?EvxJcIg
zOn*1_XmFxTq(gk<(`ov<hk$?~P;%YUaXu(ZGexJ_zW%3Y1SZ{iw<p1htvr&A68z6#
z&IDh;_c+6>w+FCht27ab-%fl$T6$&P6Ti?r{Q}a21kxANGrnQgTDGkWN%aXi@56j{
z;qL-LjKf%}aREs2u(Badkp6@uVN9%OS0nj2W~7Lruo-~LXbmkIJ)3~kUt3nbNJtw0
z)(lC(rTbX;M8Zn`WW=j}5y4D_R3x~nyGPYwd}i}-jPWPVd?(9(_V8@JB<sRAZF5<x
zuq*kK)jp4V%he&n%9%vVMK6lf;#Q2)(r}K;LgP?WG#EppJl%Sks<i)j=dKb`HZZar
z_=HnZ{eAIrgCkk)wKG9UKHJ1BW+07`a4JL}(?$;otzUDBFDb1dMCu!1L=o?CI%cPw
z54Auua2GY51<u}ah|!AFHocK?mYBOuM_)cBQwW>WYC$Fe_RptE!kjM&{ae#Z%7qdV
z*Mg)?H}P*raOAefxV)Vo=aHHU9LjRJh0;|RN0_$#lFObvm-YW7sO&t~2g2Bju>Cn?
zvTSPiZXhQGRz6YR6RQ|j1rmq`+SbNI`b7S2qEPH=@gjvw5L%3njqD*>huIPC6d1M!
z$<^!qHd><MJ0QGtsJHFB50rer=&-@K?t*UYpK=>uef_XbT@dKU9|3S;F#Y_ZVD*K(
zj372FvKX{Z|CZEC=V!K9vF5Tc`;YfpIb2_sXeKniQ4&J)%G6ek%f^m?eoMKfMT>CF
zyR$#)j8xInY+mVM8>nPF0;YC6mn5#?C3c{Z`H#zZL@iTvgOa+{8iF%4D!L8z@`=%^
zQq|$waBUg1GpBu~7pc7)TQ;TqC~H<&876(p_{$0ZW)IDn45{6FRpMX5MQ~$8`j8pq
zt=Yo>Ymw3k=&TW{#Jx;PMyS)1?z@Pu6ETV9kH@c`?_(Be7WFI%38I0J{b{IXQerm2
zF2!1B>fmd?`^v3nwTY?x-*rh8^s#LJ7?;|<p?1)QwlDa{P&(?UMuswpf(Ie4J}ZI_
z;4f{Hj9Jv$fm52!LVk4mz3yeb<&91H8!xxbJn|j;#jny<i?%3A+kTW<&U*4SA%v1;
z^s=SB@@Zhg3IA^`z`GrZ&2v&mLiq755BMZ!GVNV&t~y#o|1=hMiXOQa@XeJP3gyJ$
znu0@E?jJSG^Ykg>5`=cO!mG`7%<~J|5K6&PdpiV+OErxi;(MhkT^QU!M4t*S|LG}O
z(sGV_M+wydCQ=%Bn3yV?r)k}Om7{$F?0Y0nIr<w4F@)`|hi8>d4>SNl5DcXsgLTQg
zr`@#MK4bxm7E)n$zn9KHCC0CB%`~4iHg%|wzO%`QHIUhT7yVKkN2sX$w`LJqcbc?V
zz5NZpWyqmM>wI({oq-Wja$|oNEEZ-TKtTKUu35tYD;UT;NmKDjGZ}10mLRXz$u2hL
zm*39DM$<>vQsFF5y#Yo;DKHl5QU{=BNLMPUPOe2A-&6Y$ydxBKg9m{oyW`l6uki55
zc=k)dmNoKFhMDjV1Fd&g;4LDefn6t%I2WVosaoVXr#NM4;6bs-moSS?;^tcfcy1zz
z(4_yZ;w0e2@ti|PLd=Kuayh5gBP>$fX%YrQW(}5e{+2Gk=KChSHbmjE3xf!v_3R)j
zwnPRzl1b_R0INV$zq^HY;1vp9p)cIM1U`o!Fr56l?*Nlv&0y!8LzH)t7DDb(UQcFQ
z!717ZcwoVD3rnBnFR?_O52ETc7;%Q?Tp^8cXfonHf$apgB(!q}d&^J|9><fFID+}_
z_F-5n7^<l<yIA$VWZVR>Y)kKJzx^fm1QVRi%?CKRRz;bcZFZ7fB%Ezh%Q=RXu26y=
z;Ew4`HuFEd8HA6*)104((RVo4$cZ8-lw)OdnxzJDKCsaFIO`@dspqWAONNUJ)?h`V
zyNuu?n1P+xj$lb2zBu1W5gv#6)ZunhJ==B=)kfy<8PdXT7WO*x<>VE!X$e(r<i5`|
zrbZCaNaJ4i{mOrVPVw>EHcFk8I!Bfv2q#33u7VSl)b!>hgu#|U-t-+t1nPU37e-z^
zZEKj2ILOJ7L=BX;{dzDmwif3H8kAEoBzcI>s<Y0r9TbJdtoaAY6RGcHa|!0bgndQy
zTC25w?7okcmyweOuzDd|)BX`b1n?=_E@Qriu|_3JwL~c?lF5jXfC^3=qpAZ7kae=I
zT;E{?!enLHzCM+@xf$HK`gHEqH;c3Ku&gw93@9D)2@UjTy7xkXN~YzU9YWyGfqe&0
zARtuUA^W5<CCESE3j(cz33gy{<8Z?Sm;@%AVhu+E0$B|q$EDr|cBe#KR>H4^VU*ll
zCE7^C9HN4_Er**Qz+q)k7ZuH{`jCycK+ganr4`ah@v9(APU1V+s?E*x&*e94-oulB
z%PacRGNbNy2e)gBtw1TCm-6`|*2*~~Bob3bH4sBfoNzrA;3p8xPMl(I2p3kFad3%&
zDkoi5p~->^hgjs{#MUJobzncvyZyre(xaFGjANXMdK2}5GWnf54ukk`?2!c*+9Y1x
zUoEladbf14*@Z?J)+5DbLLmuaJ(*k$RP7P2ejpR4lMPtuw$c^BvCBE!zcv+mS?R{<
z=^f^Q4;AVY{WlP+5aweCmEwL$OQ@)6rADthbq4V?`HPIZmN9YO;lV{Lzs5TZhw0+U
zO<ew#cj)e-OOVd)&Y^fCBK9cWT+9z&Wb=pbMrE<CblJF>%e?9!4tm5^R@HnVmP2R>
z5`TpXji}7!Q-;xtE~$mcuyIxfYg9l3K@fDFdlB|pE8wMkRC~!tqN<g=ojiOEaYJGJ
zw{){r>lK|@dNEb4yuF7gV>6aojslCBt3|U6bt|349O<`NBMRY!UPqRdt~YrnWI&g;
zbBhKzAhfTpg)%4v;Mr5Bo?7WHWIGT_&|xTq$MG#)D6!CNCtWvqNrPGFrx;F?nVxx)
zp5Q3J4z@qey522b0Uu;><x!5DR3hdmNA0xezR~J~3*TS?5m0xY4OUqtKu|D%`2>&O
zPTo#7-NKr8$=gXzB#8m67=LQ&ef0pVmNkvk-vdv=Rm>9e215zFf>dAz<3l++iR|+O
zJ-PsRAFdzl%wCi@Nf=-?PLHE-FWe9JLM8-3JIsP0xJY(SL>!^6fa6)ri)Sn_mew99
zhW?sIvy<oQ0CF2?Keawb-^uWIxY*-vJ7Lm@yd9@OqlvXVRzM+O!IH(58DtKus(G20
zXES>u6RT+wJ*=-E-wK1(#c;JVhK+H8$G-6$GLlG)r06if+G~kDMo|;1_Oo#)I)5d{
z*zg5;JJ@tPQAYB=Ag}8($?NaKIV{vT=a>kahi0%gcT(k`46;2b@<YL!@G|Hj7(N1^
z5DFm+p`=YlJ3Tu&dN23u(E|G^FXH)s!1Ka6Z?}*YOo{=Z%{e3+I>lpZ4TLL3)q-j?
zad$i~%7rMg#P+AHH&9=~SHT4LTC0<N#@CGtnRM^RH&w>6@fNb@kQNS*Cug^xoR2TD
zv%G_XN`OtbQ`JiT7kB0v<}o#9v^~1JVa91sDD1-I0Ekkt1Tvt0Knm@t@DZeV>JY=B
z3H}W~fx7@GJVpDQZXD^%@3mHM@zHj6U&cMP@MGbeDqEkm8VZbX+b|u&kprulyW@GL
zlKyyKuas30MPE^D!E%%%MHDsBluGIdu~wX=hvmlY!fC@|6@*8eIP3z0s>G?5dKsu4
zLiV`}Tc?nRYsadRcgQnk(%swk0066;%IuO8yC<RS6^uC!7r}fD5yH=Hve7D}Q@b4!
z?`zz{WI<xICy5Bb#c|8^9|Kc&cZ4zPW<HwEfA3|^d*uB<x#`auK=J)<fY@0%1~TsL
ztNnR+vjf;#w1Yer^1ZH2V?WB>yDM+U3a&J4%AB0%6nRaB{EX?Oxs!_iVK?1cp>jY&
zfS<r4_0VY*()-=GMbf>S&US$mbK!IeeT)OK&k`)Z6KjvhAL^o~5ydukPE?SpqQMO?
zKAm3&lHxJ5<@>3uLIZkegX`PKB5a!4e!0J2qr>kr>MjB$tb%)C@~_Z|X{KL#I@|&f
zsi4u#eI7L+J+pp9?2kN&Ch2Qi52yX-*@fJ?6<rwPO55jnC}$_OpS9Je)272ehF}+t
z*c?UpO7ABt736*ZPrzbwClY%ldbx__`#+P0nlGn#bN?ZXnPQz;CpA`qV7Aa=6BdFJ
ze^K@<kfunX;%KbFJjt6RPxDo=n-_ZjRGsqHvZ{>!VUwn%J@S$MVYI%dI`hl^q05GS
zw>MgArYB+Z5)0<7MV~&oc4q80@7AE5_EctH%-pwlvxve}X8(X6y0LZ%bH*jtky&Vx
zBQq|iz_JGrrWFDeHmp`!8_uqr(AQvX+_>@Azy9^(k3T+?R`qUNZd~28cL;R@4!XOA
z3O)_pI-%&wiP0+Xp>CX=LNrN%QYi~)h!X<2<N&bAu_gsdaXpXq=ox(kD<Mfkh>+OO
z;}AQ><914)6|jW>LGdai2c2NBdgQVmpX`G2GF;hw3N;hBc0E<uP^?^*pr+hGj*>(r
zgy$`pl^55izqN6aeLH);$Bb!A1T4$yZ{X4!xKxXVWpz3xD^LAgsb;U)OZ(!&sudpj
zLBzX-$f|_~v)BL~m#7kFrM-pyc;NuA%?_3oz-+_>dZMiFu|1UucWPd2c2nynSw*Rf
z;6ScS!037U{KCeeHiF<~xC!nd&H@J<mIDrw3AhqtA?7>b7FYv!g_1d0_+!VU;>rTJ
z3HpyW5N;6MPg`2|^i0^wU-P*no13OtlKj0Z>Mlc5rR8D`LCGXkQfo@wZB4-FxnO2s
z0=Y||1yWcIKFP+{%Ta2z95xIfLM#;P6iRIXGnF<Wq%KE336VpPVLC#EfW_`)<FYBj
zx@a4RNysS-tP-`{lzOX1ASm2K_w*kvtY5e=AO+JC{OKvyPwcBe2PkA~BaP{#8!#l2
zSjCADLJSyMXz(#oB08wD36sP^i&(uI!{wp|Rws4s9PK(g1zl++6c-n7+qNwxCgx9n
z`qSC8svl9X3Y6;ChR%ATl0AbkC1oz+I{{B9@bZCgi56DfNG7Lz(2X6qR1uaUOr;cH
z0Arov>4~d_8h9SlT#V7^zo6k}0N#Q$cn~TNvwSP-@4<Mo-C2>F2ZW)Gu4<Y)=}r=q
zMW<&nF`XAyann80dz1Zmnon-$hrj~n{3o|;LK^AmUYmsxj6&$~LPB9Zu(e&YASn1{
zHjA=!7A$Ld4Cqz4N@e!_^jG$eEO`Bq=H}*y9(riQh7CY(t9pe(fu~jdqmMp%@SvE+
zG-{xSu>-_y&ns@u=4cTi2>{(~{4>=>o{JnMl>xrpa?IV7)E?ky9&hfDN_s~PwD?6}
z>^L<~XzpS2K|U+%^T+!Ah9vwa5ExHDH2QyIo(GfQ9oP$3Eo9{ly#4vfKkzOEkHKCz
z0Z+m&sP3dY0Bt^>9HzR5t}Zi8CK@ULdgNTGSk?URF};)uK!<-T=KOqCt-?G+@@(|N
zWTm*3%&3KoNhM{Vh-Ma7W^v_D_~}phX(kI#`WYhu-pAs*@4j17QZjw|^xE3mapT6F
z+gA18ENTYGZ=Y98%j5=6xGOl02itkncna~pphp2SKinwgrH(QLq3=FL3BJip{NLGn
z9V-)QDD%GFt7u<L)76ZNMwf>11<Z>e8$N=2{zkU2WX!PBMZOClL%}i^czo1M$YRo`
z!~lz#+e}AM{t^FTfY4Ze^f3Md_8$|7`Iub+=;MgKCTF4{<Fle7+S&vsYxKfoWz7My
z1AU{dW;<I<EIFm$*dG?`pyRwY={kFO4c4u<-g@h;w*qtK%(?sSyHC}se);m{z31oU
z<^7DSJ881e)GZKiZhDjgxu~W`bSj(>B~ZC;oFb8=0FBJDk4J(}{Zg|mc%-FA?3)l9
z;|$jV%xNK5k4`PgLj}2s5%%Umelm-ol6VsZK6O3XE`e)v!Cs+Lx(RB9ENF`npYYc&
zOZwQKKfy1zJ~3N<o+<J7(dUt6NxDRtb~iSW;j+stpN4w3aBBjopCT{_3Z+Y-@?H@~
zq}diBvK4lbZ3u&4FHwmN#(56EMBOr?eFDyn4FFq?v;7Lj>evP<L7%S!<pe^G(W*wL
z27fGfV*MBHW}NrK(svbm$LSB8u%t`~Z*SLJ^1GUQ6ydqbumkSy#jeVGBEn+>nttHn
z>MHGB=^frl<jM|i&-8v#O`E5adf#Cpn-gJI4ATQK)Z`hzfZHCXD~`i`yrfix&?@CT
zfKy^~!esqtEum^5JmOD?D6SHOze}(MI^skMYzC#_f;N5B_26aIVUk^pb>~=5WE?K>
z9t}DC^9gO%k{(;?m%&@3+w;qNgGuwl=FK3vsoA`F;g(<K{E`*}Erc`&%8F^IHXQ*9
zmVJ=F>3=?4wliE-QS6~jtW@ct(IxySEzNp~bhp@rwckmT$iZ7_bm4U2$j{Hu&(H6D
zapOig?|v|xgw5TxYZq|G9e4B&b8>Qe|FLM%qD6}qnLXZfc6Ro&3x67bv4_qsfxq_0
zp%Z?gzibBrFvkn4*NIfxqrjz;ms~Oue4Y`-G7Uclix662S~SsG0Jni=J6eCKp-P0E
z5W$T21%miywVd*y@F}ARJRubJJjirU-_Kt3$l`7;pDpRMRMn?!>uor?U}3zEl~JQs
z2*$FD7Lgk7rcLa8<Hn{EjCMCoRzW<_#V&v0o21FD*p(_&Vk&wRj1z`->1p5)A4=e$
z+DbK02A9ADMYWGbKd^;*_y{h7?eH$#B<u$R(eI@$mGK%3z=!zT&?w;zY~au{vGy<e
zcG~ZM*FK%_MfkP4gEl+E+vu8!<(>&)lY_j^gUOx$nJnC!Duq-Mq*EgHE%CD^>4>qS
zpF}F4SzEt7jemMl6MC5MX@vn$O;<Jfh_FirD@M-T&=D#=FAdZxgc}zq-_|gu=>*m?
z4PwM*8cTd~(oTrw_8E=8Ui#pF_YX@lp#j@6;@vc@tMh~0g%1F~<qvmr=cjzWo^=U%
z6M7@93Y03j0KsqR&j6MYyy!8%wN&Oxzfpx+d~{p*$R$Lj<K$uno4g*Ic~^A2^~7rl
za^c9x$XK*!5od&9=V2K=80^FmgajSMW=yjB+Bb-5YOARMdRzF71eDl^ngitnBSfAe
z?+H(P!6;ZR%O3}$n?ZDf$LP1y)q$%6S2fQ0r~^?0V+rWMwx6abf@)~5#oUD*aQaqe
z4iASplpz=cT^J_>LopiB{08<a;@tqcG;YqgaVFEpcyguZkh+_a#?!L6<t03{iGw?d
z7))sNoFFPH>Wt0EunpE|gi+0sXx4ZG`rkZ?__()2@(4kSU!Rvz=%f2P7@}d<$zHFA
zLip>+4gvnC6A0<S0rcS5Ow%rUcF_Zj@p%3IsG>0(r#+9oMN|S~FeVUIZ#%2fkXT~N
zD6<GQ*6_^;rJOTNz8-rS-6eFFpc3+n7clQN-ZEfFCFN62Zj~)q>P@`5flC*$HJ_}d
zCp~Kf;j9uzM-hV6045u8BOqZQhB1sJ_ew;%*qLu=m;>)XUI#0+B&gVF2PlM0hyn|A
zLN$B^$zTC9Sg3xL+GzyMAUcM?9}}~aniy1}C<CQTx_|>TO~r5}GlS6qQ;2$@`mh5s
zBPeMHNKpZlSZQ`JSv&X>IFhJD5_hmYgb+(vYw!MX{LpKHMj$2;btfl+2;PLfit4gD
zE7l0YZ)e#NDjI1{U_!5x6(I+o8mRv#|0sy(<w^`j-p%7z!y}Zw0eo8wmQl^hacpa6
z=?Exs=}iI{IDtk2hNz4x;wsR>cmr?uP+%iX4Uh=|unP)d9E^o@uz&#~p$g-F6Vr|3
zV~!;d5{mLtv<*0nG85*HDxK&y(0V9DE$Xo(<yRjrZU>0fQfh%7s%(^Xvog{*)A*#A
zO_~Ku9kmu(lb2u|{aIH^Bk^u5h1jczT^X&<(*t_+M>#fD?maAGANv;bqlerd;Mo<9
ze4LfRx|!<vRQt*GF`7VhVzRnTYI4%*<|DDAjDp*JzJko%T4lb%$9aEZ23q-i33&*)
zge3fff{m0!k`e--Ie|v(95Gc^BxwL|c>_t=yeu5t#~t%UQnWRkLLp#elcbVz4CamV
zrL>0U_zR#TSWhTmk}HnPREvFm%lX766cQklpn4oCA!YjR=_HAvg8(^<nkFl&x5!=I
zEs`P_8Y#x1?jcCL%}T<AyKJ5v6!eEAmLDcjM_M32=d-+;&kflu$OH&HhS2B*+Q5am
zljmMLu=>`@buYv{^bF7&7Wud1&;ieH3IrSM<|tr*1h`hiOap*X1@KBw%>UIJgU|!d
zR|Z?$S<edz+!s$jHt9`o4y1*RMR>gkCzo~dAYRzyU931i0Cs#T4YM`B5QiHab86k*
z*&NIKP$4Yn!XYNNH*rAJ7fUr(ig)V3b$l*td@sGY+a2Xy363_bLVZiuM{6tnR<m<6
ztyN^Lw!PkH=at?=!5GZ6#4(|kx?qC+@H?7gx;Nt)mK6eQ%4Dhe9mP8hG}Kb9g^p#Z
zN4XXO)8y5Coy@X#NO3IUW#zXdxl%<*rOKoKbH*6dgT8QJRI((Vf?7;x#etkHOf~Fe
zXBJl)q^RWRM@c8ykz|De)Y}LP<M-?Ptkr3d`wZs6i_;^(4Ev!DBGnjGeb&nC<fi)S
z0uD|fvK^~#^i&9(d$d*&m*(?YGp$wBJ;_rGxJ-|JHzoJx-Aj>)O_}6OX5LsfA9|7}
zZ|C+S97!VSq#rz@a8@4Hs6(8Av{1HG0<2Jf19kQ#2ifof>7*~gE9!Uwpv?&zs(Cn`
zIL`yPeFy^jZ-36#r!Wp~xT8-Y5M)&GdCb&;>O?Jm1st5qU~1$SFy^n|v*(DMEXHFm
z<)T{Z@8e!zHzg182tZyrdFA9xX3Y*V;+C=OKJGikhmJUWi#)7eWJa?(nXEB9CnR2f
zJuuZ!u#NQ(GQyLIhl>(3YhGw)WsKKa4H5HZkzYXJ&XzKhN!CT^N4HpkeKP0?)8bb{
z>rJF-89oDd!9ZsuB$kG9@wIS5XE7Z(O2tDwazDT9JCHM(j5xBVuyjh~DaBbwBEDT7
zRzFfgC`iCmN8HzS0eXF_hgS~8pBPOrJJ@iX^|uA(r40Xv#YMCJ7gW@2pilB*qi{RA
z;T-_>z+D66{C!YITeP59@GZr}dAz%Z1t$q2j3Ig<8NhAa`uDAEWRBKA!#P;5Z6J!%
zMRrN~=ne;VL4KvnEoD<1pi>rS36N)<M%J)DWRrq?mxZ=V%y}({*`a%&R2UwSDCZ?z
zY$<1jhMf=wp^Mpmh4-l2d6moU6&!E|&Fryz_c%2*I`#-(v|THS{P@c$<9fEeb^rYP
zpE&TuQ=P?>7E{hA!eL_ABMw{Xwo0J~t4b(Uy-w|zsPz71rAw0ej|q@hwnA=JUxOs6
z<3th;nFPWWKrdY`vm)Vbtt`p+>qmehQvs7=pVLg^O`((t`9=UbIee!I6Cwwx8U8c5
z|F=b*&C7S?XDnFx>U;;|T>mcEni(4tlsq*Ao-Np#Tvk3I@9(Vr{ku)+yDB08stbU<
zE5}z{^2Ey<cHguztq${%ga8CpuN$kIul~yWXB*Wy?-;!bSS~*`5hCs7VuC2==k{qG
z{eg7(sQB_~aX9CN=FmM}N6}Gpv|EU27;uRGr!%?5ZS<0Ntxfd>-fvC4?J9pk??R=m
zC2&jsuqtEiN59MJAJ$kx`!aNi!<Esqdw;8t?XN<uM%{a{d+3PGiGH*`Nfa3!->2Bx
zhkFGXB|s+!9wM`($`$<qc+)#{sBsB<w!lDC!qx5QrnZ%GK^o``a1IPsGeWLDC>@Ly
zx`o)~Y<-u!pp<U9g}s1E?b}>jEq}oZo2+(Vkk9Xu)kWWw%YtI12uGX_oK$r66<=I2
zgGZ-%kG3<eQVcOVCn6XuT9BfKE@9={%}g{BYewHkkyTnWpWGk(6i*$$e`4<|C-{f(
zW+qE*U+7^@k8od<sNBMjoUOKvRSHJPF4a8ZFGq3gvRKz)xu`K*4!auV(oLf@v)a_b
z%M60i=}1760?@dDIV&hPV&b%rWX=eb*obBEVQ8Vt4CMVGm6qGs_$;Ld3zMgkG7}(W
zW+{}eyo=I<@BHOo-gm}i(ETB)JK3szIi+`#FKzY5wA+(k@eWPFiMENteGHZjy4EeL
zF)DoePd*}5TzPW9JZOLQxG}^#n;UD6i=P$Y^I1k|B@~z`5KMdw<06FVr||C--`qAf
z_Kq3Cuc*P>dCFSV#dwR*iAs&mMl_Jf*aqt6=FJq<5P=XL5{F}a9AzoC3FD;+raK^?
ztduQ^Rti$LIXEGu44ethfx#NR$bte$gfz$*Y}0W{*vz=V4xJSo+5oT%>JkCwEdek@
z0_HO;eHrnH7K?Px4mu-KGI_k6kR7aBPgyxjMSllUW|A_Kpj&uNh0Tn{h|SCdaEnim
z!BADg#~WC%lGq`H1x6QIOE{RzJjr9~$2_Urb+qDJZLkuc4Dw(rH02XCSP0%1HZv~;
z5uZb49>Bb<7!wIg<DyLF!m$!wTEo~x!jfW$`GikW<>LNP4707zt+SgcUdDt|5awqF
zosq>0h#0I4Kx6Ft|BseY89=atHaBsn$;$KK52-w`o7z(Jp#+asD(B<~Nnzerw%s$B
zp8hDtw=PO3!U*dD$cH6)v=1w%t<%K8=lK3s=5J)Fo9o)?swXUsXph;OkiyS5a^(v4
zZSQ@->Z|ZP+F<GcFxy!5Ic*LsCf*3*YB8zxo51X~Z0Tl|g>v6K&y%2|jPx90Qy7!X
zyK4qJdlaIpo_S9XeA(*DDUMd0m51eb4f@%HIZzAvtzj*ww+bgI(F=dX34X#;PzhQg
zyIQG|IRynR@+7<_NfwH0lQzk94vB&YPDLwnG)n2it0&NQ13=C)N;XkhMN{$G_^>i{
zN(Nv-FDCKEDD2THuL6<P-Ryk*n)r+Vjy9a;J-8U-a1)#i&{{$mZGyzfn>5{yE0};2
z98F*XkfD@PSL@`!LNFcv0%<ib#wiM51qQmLo7ODL%g>e6RM;!@`2@Np%Fkh^TX?Uk
z7C$j`5Q<t%BvcYqexzqh8(EY6GiZOQnfKzkx)OuONs)smzVLB<y5(nOyDa!{L`j28
z;fttMNa@vCS>)ytlS9S}9C#*2Ta^*~oBo5zoertVUO*y8;7}okbZVfYkZ{4T)<LQG
zMHRj@fxv14R7-5o&Wi{e?;W;eNMAE-KWdTdO%`?7X&F-#p!a^79c!f!$_leZbya%{
zXG(3!(rvBqT;5I--BpCo49C0MRim+CHE}$e$X=mQoit<6i;_c7FQ@TF$v5u!afug>
zw4=X7INT#ogDgooQw!r1G>Fv{!^c?}EaO6g<`C2+hlDCY5Es*^<?0ZHJX4(mnsABR
z3pYn3aUQOTk-{__ktqGctyqNXb;+e@#nsy=nX{eK*%XQ0%o}j^JnFXOkd;-rp^Wv5
zFdCszl<JCB9-5%^&g^4{m`qgUj{oT$8nDL*9S7`%(u@dzN<tF|JH-BIbij2gak_E0
zY_R5VSX?FxLeYtnCRC0y^vD-*Hd4B>6+RS(!II`zcRP)rd55|Wq)b}X?@$Z<&{2LB
zm5DPQm9e4=sCugXM&VYu<r<BFP|Cr9PWeXrFtMe>nU+UVn7Wp+S^O@M@$<5ORGv9q
zm=kSCotQfybOmp&V&-J#tY_mY8X!I$pmHY@Gbs@+u5my+fE(rEc1he|;VhUbj5YF9
z*T@PxsZuG}12c(@@*1o|?0!~^VwA68&jRnT7GCp?P<ob_X$x^#G5&F}%;`VqpyxP+
z(FB0iid98VARB}?6C`hozZBZs1D8$2UzmP7{z|jpo!tB*^kO-zRSsIrL(DjgQWB-0
z`ahfvzSUboz|Y9ll^pL485b1r@WVX!+)yOGwZ1;XDG>(m^RXV?ehzPA=NlBd_n)o+
z03ZNKL_t(+KD{$Cf}t|vG}E|&knEhn&ixuOHjAES#%6hL<Z0n&Wah&h--4f!%hv<^
zDu`bN(NWLS%LjY&2u5`wK$veN!9wRh`V!hM1bo{K7GQ`YaRo;5$jt?ajb-P~)4xR*
zMfV_EATJ_GpH9+;d^Vk=*-XEJ`SX;R&nZv+b;A5M5i3!Rs3gsR1dinskbpiGyObkx
zav0(;m+^`Mzc!|1^8egCu$#k0ROYc~9nB?ldb}H}x$nPY!meRtqN$izZ*o>Y)M#%W
z!A&KGi{bd*7cTsa=wl%#hw^e3E#kT7m~0@<x3fq;j^#6LICZwMjA^EI3KOk#9pY%3
zn=LL@kIrk~xQvNsnM-rd;b+8{NM{9+^NRP!XAUJ8J7pz+qXpt<G4Y2~(sOuVH~XQr
z6iXHRask@b_MIOV%ex!6@%N|vz;6~dGk||6d)7g$Sb6D>S|?|KjcSa}MROjYo41yt
zGKz~yPiM&z*8GC?51guwraBM*;W(!O=&xYD9$gbH`#B6`DKWW;J2kUBGSRx3h)t-B
zIECEc5rOeb*t>=~>&u$f7VgZ=95m%9B!%b<>Py*K%HbmZw-Rd$FKys^>-%<9vsU|Z
zSj|<Ks)+kqRD_3xP7^1#Fm(k7pBwDdQ4g=ddi?RnPn<YWSy}n}-~Ya(q+}?apLyUU
zdrFrJr?6X6s-^UNBIHgIS`gIeEjHG^fQU6dX^B3Lxxd8dUmh$|xu1impE*;K=ehn9
zt{0QD7v`GR6BNW+a#ydWaOLEMt8QaspO3w2U*l4b>c81R6Z7VCXa8H(_#1?9h#gCd
zkZ;|Ixr|+U{6h!;(ujP|Nf?xRT5Ndx?eq?3qZOYc*T^Z-Ei$j^C(C%P0l}11wbLY&
z4p&uPc|_<T(?wgeP`i}%Q)|}uyg3S{*tPL?p_^tUJPfZWxY6AxGQLB^`R)&Ah@(1>
zxf|d6aoU!5iM<a0H~<SA6;G?i+x~IlA`CmZDvY}p5nzNkCS_mzv!JPPF#ugTA-}v)
zC{($qn?*2nMPCXS@WM?<m?{TE7D@9jAR2$+My9_<qL*6w1M@*t+u8#!c}LCgv3K;t
z#=Wy%=_@d5qx)%gzR2dteY>8ZD3#f91lWa0o)+s({IP$i3L!-9?D9vMW&dv<y}w*3
zR<PtFxme<1q#*1;B<Z#ja@|1{9Dqi6f-{D9Wwf`q|M|~<E-ET|;e{6p3kxrp&d)=K
z(a6^Rgp8CaWLPO_B;_Qcpkq%ic5)KD#nzX2A6UXA<y0?bZVX|8&;<doLvCX;($QK%
z+6Y#qoVu2}Lcd7Dv*|AHGgzmC#>nJMrg&etczXI0*aN1M_I{4jGIa%CtOEGndav0U
zF4|3WNr?q#fHu0DvD9F0#`0TMMiSMlQookR{>>RYPeu*A4$@<fJ=WRT`Nu#0aoMtE
z=ezT>*}?MtlyqP^-w)nlA~TcB{_KTC32fLs@EoI>8tP7y1zP9HhxnXbz1Hd~W|dPt
znV4-H%;%FX&_RT!R<D9ll2LkS1F?cXJkpT3f@AqOiq&VKf~|wG2q`K)&=^U`?)yh|
zAteLaw4Mh0il{50qL7c*aHIg6AZR-jm_6G8oB7S={rji|W-#V@ruQ}(GGJKDj~-{8
zK`NlV(BSp38XFsLyY03|9(g1+HC3TdoY&6JMm6iF(B@#9nRy+=$%U~a1x5`SahPow
zefjk}r*3OMxY(!Pw*|`%w(sVvSjKfy^#c|rG4Zeb?ko29Zo?{RET?)llRl<0#Ix@>
z2JsAN96kc!HUs#bsax?iz|Y8(mG+ucZ368D!!NY*;P>$8X%1_{06I-n<Wo@7r?)*Q
z3b*lDHO5u~qPT7&+tyKE%=AH>Kmg1RKE0J4R|3RkU@BS=GIhSBb=`+voi+E6<JY4R
zUJvWiOD~Oxi1^@x4?g_x!@KUf>$cl&yXBT!e)hAU^&ai*{M^#gV)oz^A2?JP)x>F8
z8bNtGo69iE38EtfQTl<_3W)cgcaXzAFZMQKxu4aSGq3lRa6)uw=Q2x+MvEqin8nQf
ziX$OlgulTth(b^xAIVS{F(zUeM3dngh9=Zjg*uq7GJ4MFo4yCP!|7bKuoM!qIl6^}
zY|4hb3i99N?q4u-DG8YX=_|-TG}KA|b|WGGBx@rfiI|GW+Grdv$^eZ%)YsRau^H*}
zq=pW$RsB{KnJSvx91&S|H!9&HLlshXzj7j4QE<nP`f8#-BX2g>8tAa#mUl;!eZrVN
zKDbXQK$u(tGEY7#D^?$^qDn}c51dP=K*^urA}fKPEGrzdDKil|qp$0UnBRZ2%6%gD
z{lDB3dnw5wfCfhhH5)cOwsPhF0J{pl8kg%g#?vL#N%ztiuhvnOvpTn~atGv$WAPX^
ze#z@_D@=*xx>!OqjQx?Uier=`38gZPTq*I{huU-%?Oxv3X6<&_ysM&`*(ZdLo~`Rj
z)t~4eHa;@Pw8$$lT%}CU?G$vJ>)_E6dYZk(k|Pin(}J-#UY)VPUB$b(AJ5v9<W5fA
zkRrtOI$}8?L=Y7U%)$&SD|za{)E|1Lfo*=snx`b&^<t?&P&Zcb#l5T+G>OO*edSHZ
zG|4sfrJWSL_n(aBm#XggS8j|DqI%y+_j(dNLPP2Ph(|dL+W`s>P#<vfxa&iIqj}Es
z3gvk#GT%$?P(*y`6_D{mrxpHgH<n|9V3D5YoH76GA1bx@$@aKHej*$9AtLyvawfqS
zQhi&;of4a2H{lT2Ie#mDf=xEpP}j&OO&tB9H|zKuw5mTn`u{+7b~aZnL`l7ay~1zs
zsX&N#6DrgLs<qN;bl9l=j3=P=E84*?kO|%;1IR%K@ZFx9<%$rUY_JOCnxVH5QDtGf
zaCGt(sYgl#Uj{%N{AO8U1cYq5<Higzhd2mV^3xk$W6=#5b(76i(G`CPOyrA{$ACix
zgc8@_iC>QMcP8k(wS@_=-D3yLcG}^8l_cod*~XJqte=iK3ZtlY=OakeA@{xSmKv2*
zSpD8^^G?8V*u^be6JL4zv_OEGBq40o6&AQ`yihFH5g=Xs-e+OgF#D{2&Cmq9-oIpA
zUMz4G3-&B4owaE^DMEQjpq^ymA=aUk;?8BI$wuNm1VyK+HOl7+ne4Uim@QQMY~@2l
zv8Z_mUyVER!iE=Fxqy^()=ht!2w`QE{*ec7V48>XpWH=^=x~Mhh2QEcnKyYtxv9PI
z^WQOMFIOcMZW~LYQP>i?uyuRBXCqDiP|oxHM<-}p{-Q*h(zh8<f~=5yOiow{E|AyM
zacNDNi=`USE;$-(l;hi#Pzp7Wot>STd4`X@VI&isCj`I`;0J@9_KhGA{udhIlcFL_
z@*X20jO;l)_ZiDyEMN7pxs<pi09F$nW%!?>aL4Rq`3aV9qof71omF43>`R($Ja?S@
zDuA7Bta<^(wU|dB53d=1lhv;oenz6_KgWweBw@8QbPO?{!eVCAx}4^!%%KzZMgz+R
z$*NyhKt>XxhbLkD5)Q08#W4?($Fi*)2qN$yo(v^8oKv!9AG7UKKK~8Bv`G>I7Zir=
zVSU>W2n%6rVj^3%ke22RA{upMP9Z%Wm~8BdqpzCB)<P1-QyDCyitG^T#}KLkcsPZX
z$uu{T7R-wAeN9X!|A6cT7)KxvZ#&yy{MUd>b%_;39;a$Nv6nLE^Q!$I?%Ga)&)+5@
zmU)ZVpF7y80@hB&WFhY4QMok*)fpiyJxvO$CK&TztxysoC^$e>({z$v7%YFOndZy5
zw3=FtEV_Lm;hXocMm_=|F*cSJD=02z`Ep8xU4&7GQQQGzE=A+BtCf)%z|x7t1z_|e
zE`XF!G9$>0@U~ozi)Kj0C0{>!WT}jiic6VOMJ+Iqcyd3)<W=49MVpyT>v()QK$axE
zaJDEc$!IP*owY&;xz-i3=M)KUMjctx{*(K5m(-&4lrW%=Va#_P)+rDSp#&(IDdhL6
z+^B`i6=KIv*cGe&W#uEAAeRsRFhQ=<v_2~h^*-LUTh`Cg%lmDctZX9-oX0AeE9~v{
zcDe<fmR2PB?)s{#pHx(kl|@<_sj2_WNhc!-py=?A;qsM9`6+3hGwUMhsHD?^9k}mL
z+7N})w~>-hU5iiHqfVmgN-8O>Vd-4Y*db{-yUn--PGLfzeUZRgwX`_91QI4bfOO%#
z(Tk|Q+&iqFG=UCbg|){@DmjhNRo|!iHKH&9gfLEcm2FOE!z(3+b5h0t)%B5|wj3m`
zpXm%-BK)c?zj^kF>r_nb%^EL>>GrkAS#n0!AID0R>h)>7zOCW*rG16qf8z)kFvHuG
zRP9TiqNYlqi|m~mq34HVa*Hdwob7O69XU#h_A+0vn0hy1o<L2Cs&iAw)@9o=>6>E?
z96f^la?xX_vxN>bwa56XhFAX!P5)dE!zFEtm2d+RB&{O?#r`Qku<X^0mllyu<>_w$
zfW8@rz>@l_@Dtt}rNZA2&FN)`2E*AItVjqW-k*5UF8COcF4=0>q#iq64yl<#LOKiP
zBBEt2BJ@5iib9S92FeD@Eq8^nNOa>we<li+;#emK1%p)qUwKDF=H_0M>)k9|nYS}6
zZzm83dmuZplN8UsBTaX$k(KXmYi?ok1h%|IRwlmH@;>_jpVHJ9r4v#DNr@8re5j-X
zBj*X+0uh`vvukRg*MJqnLBU|9hm$0zEL4A=?Sf|dv5x7~3c|!~NUeYx%@i-=x{+4*
z43{B_mCyRJZSI%PKCI<X_s*s9Y_$T0{$Fwx<~h6o94_RmeD2BM2R{H$R|Y>h$R1Ej
zmXB=5bwareEJ#1dbA9=Az(MxzK*`&k0ILH+|DfV;-U+8zSt7dPUDJ-*rYV_o6)k2`
zCbRAcuQn0x$;?XcB<%J6lP5(5wNZT(MF<AKPOEi5%lCF@f%p0J2FMU5y2>s`Y373{
zxrf|B2Eu(FuA#EQ(n-(PP(S&oQkFLJm)2vrKWONZ#4Cr5@3w?zf>4+S(;yIL!Be>h
z+3fKYGouq{2BsvFIghv)vM!@kT=r*#!lRgnv>`a3P`5F4?fFU^wVTgvhF5whRaVv=
zBrO!0C3WC#Gx^H_7O&@fXSWONZCzUGV>+rF%o}n;*|;cDCIiGqU^yj47>IuoV~<Jg
z^nB6#CqDz5&_n0K!*>SjY!L_-<-`!BW8DD?>o7|$Z)q9HLR9_CkU0_Kke9&uf-0DU
zx!8xD={F>FR`6atb-rH18N=wu>ZzoKl3xkMp3S|<+_?GhYpb|yB}tiR&xP%sJdaAB
zP+_c^mf`1`hkTE{1prMaH{3cjM>w6ju)n7ac{QZM{?nW~n(>{%Iu}SZk{Qj4M1WPh
za<%W7gr=Am2vkO5&YQ|;Kmx$Ad|J<?PF{L1fl$^;p$WyG*;YeTVD?60gp9gQGc{-7
z+v$U;^V#4>sc0UY^LA~16_Y@d1|?Bj#^;;4FP{A;G2FT#=&$u|wvu5P1L`m?6ntl}
z&K-<?WJa@bF1s_={J*}}iiV(Iz4>(tw^33`;kLm}{bpfIq<Ii6)-yz}QuTyvLe6Ec
zK%*ZkAG!!2Oh@~jES(aO=@n1uHUsS4$@fLk_hCY3Bs!61(c~5PkUqFzo~!6xf(5lT
ze@}K*?pE6CdE;iE^SiJ`Fo;jsOMv|xeha|U1EM<@0ES(f(~rW!LN+hVoD4NGi`*k$
zCz%ta6<!sF8mS*TcG!yq9|b+1M^~~OHfWG&^~wWu3IY=oZb2_pSr;9e^Vvt<88DBg
zWZ`e`M*D_9hKFMxD-<E5hH;6QzLx{{3d1T2e@J}~OQTtSko7Y#%2h%Akx^3@TWZLy
zWC_&5beIA;Iq1Zv=~kH*Jpf`HXO|SjsQr`{p~+Kh3PECFd9vL1Wg4Ei6F^D8pHjoF
z_l}#jyeBjrvO|rTZO<_^i<lJr<mL_m;FG%SBNa6)EReg}PL<*egdf5n1RB@_UJ<pw
z(SC`Lq3=GyA6BsOSAD}H<(bdr!~ombGluZHn3dVr>CFQ%Xmc$*EFI+klZT%GlsGAH
zvRXrH4^B4`9*#RG7C%uP>k6s+wdpW%V(_jV#g_o>Q!uJnSIgm+>Cv9qA3|#>hw&85
zCxK|!)w3~p@?gw1u6%*1FxFGWJxRe$BHQm1iDE!R=oWeS4v4YSW;sz_EBicB=N8aO
z%hTS4-ONXz-MV!YXS!dINDri{2eX4jJ#BI&L@&$^)*WSA3roiGbEt)@VH~^y?^jUL
z2vBHZ-7X4`vU%_w#@7O^k$EflbVHwWG8z!9BXJysyVzX7x=lR(YTs__3NEQdtW!F3
zBq+%hZqTw*pD7~l6yB0i1#qch%om)f^E5<y3v=sW(rJo3OdYiK0JK7{mh;f__D2hp
zJ9qB1+s`gF;yVn0QB7_wZ#R)<WQ&kkVDk8$Z`YF*!O|GwbnJ)eFdib|Mlk9rZD#q0
z)LTi5B;Q5R89Kq|0a_!ovw3aRVCRh>thk=~7Lq5hWFe2=o;i(4)0hM#GjXuD4hcpD
zYn7N>;L!{{gN>Nk1+dAZ3+az>0TX&{)*Bp5gsbHeT^|%yk&{5MKXHL9p56E2^>Qig
zg#qQ^!-sFb{r165`wqiK`y8VD$*&`}hms}=t5D<-{}P+aFgqw}Cc#sDGsY9s(L)4W
z63x;nY`lV{Q%H~I;YlPaFpV6Wm6XZk3~J6LJAPhaG_W*_l*z;i8{BH@V-UOSo)yxR
zl)I>cf5RsD^<d`@M4QJ)GkXGzL1<?X{XF}_&?lXiz1q{p!Vh?OPM;D|tby0zdZ?3h
zl`jw|Z@J|bo6Toa`yI{!jA}AN$OxgThtd}6tpIChQ)MCOQ&x`0=-J+94QSBm{SXat
z3Swc4o05@ZvnI~tNWowyjc9D--;t-TA|5U>)#N*Q6Yd9K;u&QXtPLf~kGBO4{xEcl
z7(Y)U3r~XQD%g6ZR|PXAw9i^aLmfk_=ovjwzwyQ!_4Q|}8PWhQsFgKk{UoNXv5azo
zTr{>bT_ixXa+X^Uo$_}H@VVXsDGbmfGl*j((lj`QVByK6uScu{3j2BEjP+jUJ*KQV
z_!;lf(8Z21mwTCsnFn37*LaV*yQva3da5!4|K%MjYtei1eHW={Q?un5xm9evgfKN5
z_OVGrg4L^woF%sx);T0z^UI!i2kL0p=&HL$5w7}=#VwF8?%g`K(D-2Tf$rD{?-kLz
z<m*!>3yPVgVZx;U>=tTVoHqZq%e|Mc)up`2DHLSiS@y!mo4qSOg1clE^-a0@d&5fp
zDcFa<B3X;@Y79a)cEL;@5;F3g0k|~Y0+46?Pb3QEHC62URJb6qruH%0z3){c2@vU<
znldjg*>7%tx_j25$U}N}yKlp4QEN;IAIV4EavhbxDy_8P+}+pV<5qYCCR?OLPqRs?
zNmI0-5VnpT?N=OVp|-mc7|(2r*?hgXKS-TnRjc7r@5x<9sSw(O+?zm$m5)4uuZ+*Q
zPtRN}KMvww?IikReHRf@R}Pr*m^}8$AsZ`4JdVnG-dZ{8)g@E%J<?F6MG=PP?6#@u
z*I$4AZ-4vSV5gk|N1+wk5W8v<CH+E6CA)>bvlBy@E;ewmO8ab=lWti6E5I9u2JmhE
zV3NZIfzmK2QP$5gNo9<Nq{;|keFk)(6BdbXwj)r8#6&0=FML!R96#<IYX63xa0&hN
z-fk&IFlty4MXZ)ED+xM)MgACtM6DR14+21s)YW~vJD}RF0_?;-=5C&TN0%5vbl~q6
zrp&z&_8>8&?vf`^xm?I;lH!lXYhFqj?=3yCYiSX}kyXr)7U+Z3Z!XNc*AsQ@%<*Md
zQsi)sA|93SX*1>U?>IL);r#{UQ3(Mx7r|orQX-wxrJo^9ir}LJiVDZ6FvC02ckGD^
z+OwvDhm(lY@4+SOsX64XH^m_MJ0Kqgaif+3y#c};m;{?(MJr-9-huZel3yjy+_8ks
zAm~$T$6M@hSf?(<Zvw~rkLstPpXxoiPPlIkP+ft`Gw%JqKZ#;PJx2`Wwov>t?7~d$
z@h!z+hEGniPlP8}zC`|~t={#M?VU~fk;-6{dXy)NK@A<sojZ44amDCXTpjr^s#!XQ
zlt7GXvd2&{{6uIV2>)mdMOe&WmYXeo{kSPT;vsknJ|FDN!WtC$JfBa`avbL@mHo_N
zvXa+unkKnti$qulXV6?ohkr8TFCx*5wTz)QeAxtTf?W-US3^ng2Pn3XqbKF;%7sT7
zl>7GWyY|{^2RrRMoJ$NL5&GnqK+qh*tDzeH1Mi>zc7LZqAUp(b;`Jk(X1Yq~D8yz0
zkEsIO+1zm#n)SGfah*k4`8mOsYBrru$%z$^216`T?13-gARMI`FrbYhbOs4u$W01Q
zg^yqXOoKgg{*zI~!!bbL|1Tsc6$-_aDL#9W?=W%^2~%MzTm@6%6FNFLJ16@r&}*}v
zf<ovjp?VeFr8w$o-NN#10PQ!@pIfdlqD;fN4VQ_J3eTz_=8REdCRWQ*9|e_j3gdtj
z+S6kP=%IqjLsa(ml-J?Eh^hY`?2Ru&6C{CwlVp^|X;?jmxWVgrMj6!4KKtxvpM7@s
z-FFXm=E>j!2NnnQ!Y(&JL8Rb?8bU}FNae9AI^>G9K#_EJqrFW!DydZrgjL>P&y)7d
z%`sR7W+Xflogl2y6)ed;X+owo{nVRuUl(moukk+QVw$A<az8iWf=ZtTv;y_!r<AE8
zKbiaR1BJv=`DV*>Nk`{i*V*RqUQky>7ebo%{-(sgG?p#)WIj84>TN<4L+yuBWGE)d
zoj5w7+&gmCWApZb#->Lnd<_F^s`aKkhctHw{uUq**1<to3yO{2^JgZ35KviigzpDK
zF#MgW&5Wt$#d>ahk<>^4zr%QPBmvU&<cV80Kf6ejL|Ppx;iiWwSRj1Q*jl*9JERMN
z3!>m4S&N~c7<JS2yl}D|oj9E|@I55><XH>sK5$gZKc}90tKj#{2*n_n4}}_q0^kaG
z@C3ILx{rfl5J-!I$^gH5>_T~VGi|~i(Rq};N|vhn2*e5(m$_N&xkTes@?8!S=L_AC
z;KRC$MFk+VFbC+^CB<z5d!)EyjYC?Xu~spepEeiWLX1MIB-o!J)I^>RYK=zo%rioG
z41oCf_}(t6uf6tKYU-)wFMFVeBRy;te#1pdB8Bf15kiEZu+w!hCzg1Jes%(dJE2q(
zApT-xPZ#iMY}ziHhG`O)yFZK*!Z*1(St5pg<euJ{KI8wUC-e`u9<8`cESqbTdI1|W
z7{t%A%E2Qa1tPWBngIN$cgs`XM>eiqVlFSbzEb+$a-gcaJ2s`!-%7w(f&quw*(De(
z-9$7ZxqX?$KyzH^+YYujXcJ0L{Ff6dG|e$K?3?nx!r8OHqYoAi<NYNSJ>XWkP53jL
z2<-it=>X=>sj{;7F0Sh3lFibp$Q&<Uv3QHPsJeDyd(k^gPR3tM=)NDagy8{rvhSaT
zop3oRKk**4)zBu$o~ab*M8+o-=5RX5l-$R*vGu~E$q%bin7-GBN$UbXFI_#+z%z$-
z{16H$Gr$4LzOO?I0Um?Z@EA3ocy1?r3gi8_!gGmgZQQRSuHU97gYY!%4yVXFBFw@v
zQGyO%3+2=Ex~T^H>)xSm8fxL;@qP{41n=$vB~yimM~#!?!d#uIC0KX1$*jspAAR()
z%P#YIH6kM7`0?Y%k00-~R!=|uw83DQJ$v@-*|Yf$XMvJ7@=vhnoNC2H5mY~jBRb;H
zEB}(~qjPXRVX~9o&gyfM+<q=$c2dyFq$o~6Cj1`&+Z_O=lU1P1heQ=i<tbEtMq!-M
z2<2OEy*03%VPj+C`0?Y%j~`!IS-E%b-uK^szpAQg-MV#+jiTr99fpm9de+axcup%g
zLou{t?e<M58tEu$W3Au_oL3ZfvZjHDeY7-328=3J$1)WPAprggCt!sYV2hBbRSS>6
zBk+5;Se`$VRpfhazS$GcF||`t2e5q`FD~bTfzs)8TCKh<rTRUr`1tred-g0?uplHP
zBqJjuG<3u_dK$qfX#>2?Ce9VK_yJv`&0qm(U(0Oj^{qC>5O=Px{JDbJ$q(xR4#H$u
z33mdpLP!iW!A0;hxMslHgB1WJc1r97Sy<;{^FHFArQj$)aV<OZs2VkuRD(c$&pr1P
z6^WZ%@OW6jYp=a_!won5*MI%j{rBJB(9qD?*;!cFw{b5nE*7*&zU}DM89j|LGTTZ0
z5AK1}?X1y?wed{qZ7&3v;m@_G&8!&bEp-xMa<is^jUzj{Z&dMS6m|9d8>WH*RvLL<
zj{U|#sYg@P2XEWRPyuYPu~tRd=afw$Cz%y<0Mf<)#HP??rmKyx5g_1DT<jHT9j?yI
z@bs|e%$c)x?b_ad7Z(?&jnB=TPhnvA!T@g?_@)!xOFt3nYwV{jXVSw(!q5V1<r92N
zfDnfQ{-L8uAo*6<rOftYYC43Yt#X-4A^Nq#r+=TG7~zXnqy0!~4v|1#-334}Z9#n2
zKXgLFW&qvE(r=>7%n#~g!h1hrqZ}vo@r_P=f<uTozR*sCkWqclI--Tvc@FpmZ1M;<
zU6Rb)Xu)nSmc_Heq>7lzAU+W4e`7%ub2hXP^MQ9*7sJy6)j0-M3MZTKtt9rPRy(8&
z>6h<e^9x+$(LXWB1s+%2L#W_fytgOpoj1MrVr6p2lz=`1z5FoQeuQ;X>R@SaU@-=S
z_bNW4{W8Ip*%4#I9d}S72v6uN%z4f`qz67kf=>bEPvzkQa%IKyutgZE<<PKRn%jq-
z=g@c#@1(V!%%$`c-j~1d_+t7JBxflPL@AzJ<z!<o;T<Yk{n`cq03ZNKL_t)A_LiSO
z*F!DT!Ns6iNag}>r#MpsYo1}_Etp!^vI~G(vc{2Mq{2)}xKytOeIj38TqAu?`wpoN
zPo087VAHnNP5<b)H_G$V`|1*7j(ZQ6!>bf_QT*`Qw6s2NXSfDSJ!j4wVArl)gPjQj
zPxI}H^zDUa9u6TzU?9pts0KU7SO$wB75>r9s{O1?AjKzEJ@W86EEw#ZL6N9qvLD+z
zi8>iAxVe^1NhkNbMj+}cC@ZF_jLf9~Da`mY&tJpB3B&<L$WTz|CR4O}P}qqU{NVtM
zhd%;a362$7TiQvC1c(cztbvu65Ncq}E>_N<-~<Uq+Pa84jdIO-Kw4VbaP3q-l)HBA
z8ltuO4yS=C4*$V!!FkF`|1JC$Vxa<l9Lf6W0PD(qx?zksTtDE^er9NKQ)DN7Kz5I*
zn`K8>Bo^+DYy^gqo<(dT4JLpI#Em6tHEUj@uNHTng5m+&p?W(@JYMr8xB-5?kkvO~
zYM`(;2gr{sPiBAi95%c~c{_O(q=d6#8mm5MTMNtgQrLufKmzgz5(x2NcplbwIActP
zT_Ec+oErqn|1kQIpr@>pRr^6n_5$kLNRA_Aghlihz!sQrT3xz95$n%}DiW0^3B0a3
zLS_1>aj$^LI3nW+4CS9|xF;LnN-}^N>i^4Cf2V>)O3GXmx=D9~vrn6}%1(-wdKeE!
z;J9Z)92ZGkBxWnY{*-sJ?*C`+%md`As{H?{ed+4z0M0-$zkN%0Iw4DE0YVmb!jc36
z3gWIL8Pu7eqN0vC!pKMl{W)M5MMqH?hv0%)+!X~C(_ss12x$_sb=F?8^xjq7)wRF(
z`=h(M&Z*AU>8w!Sf0EnpRn>d<-Fx1>-*e9QJk6|XCYDk&g_T=bI+NOFuKha=AECOP
zo61<%&7E7ATf|Kwr+b`O&D9<R$P9ow<*;W>)u}<BqE=2W{0<&E55I%pJa)U8Sjjh*
zbH~k0KLZHYL3m&utb*fEk8F&vBh0)47M8Kl!_aztbv?{2qWXl!yPX=8R`dQlczQ<g
zD)8bN&)6aguebOsEid_DiIp9Y*Ur)yK$Qcaz&2p?_d*dYTg{_i8u+$5|CgFVs+`of
za`%(0{tQ(Gkk70V*0-_Yy;SEyJAOO!in%DCb=^E-P;c^(4Zl*O;G!ZP*+F7Ib2*(D
z*$QhLgqi3HZ?L3SlZD-JRbwcw1auP6ErZ<hamdluMqWl5z0Np`>y`iP7A%5DZ!cA$
z?dvQ#&4Mtzx+6R}nBJB(mBg;}r7!!#2NPFsPJcF0k}xXMEb$h1%O`Wv%{Y;Ous<I%
zjL-FnST>6a)V3w)TE;J)<jadve-eS(z*FB;%^8<dAc*0QJg>YK=e!D^NH@(ICEV@U
z2AjdeZv&IA-8V;lr3c{sRea+MJ8${sv$DQnv4`u05p`<G7psT9E7Q$huS{{l3M)t-
zZiXqs3xUoe07h@bC8S!Sb%dY@rovkIfX)xuHlKEpTvGh*zB@jje#}){!aJX5<!a_#
z3qXY`htX8ZE2H@I_qlf=zbR(YcBXwNeUz9Y-JLs_B1)pTIx`bTR(@YK7ihJar$u4_
zaQQZTA~7TSJK`dq*Hwf=dT|!0@+-~7Ikl;=+BS~I7XQ2quDOGQTejvkm(8r+TJhfi
z)ldy)5|T8=S=Gs1)$~r`%m0t3Uggdo@wuD0xtd>Y<fESe`1c<`l677t#VPBfHIIF9
zN}H%_q5fX3_!nB{lKcLFYp}y*(18DWl6NLU@3rLEu+{7fxl@T3FKuHRTwFq4YTX(P
z5fXipe}BikDd`+5>#JQY&!&%hbIJWODp6?o7}*NzA5czpcc@w#9f!`Vh^Qfwh|?pI
z2Q9izyKUT%U6a=HOX?N=xOOap8G5Olkk}eLi~BL*eQIb6mx<0T@F!%MrStYNRr*IR
zPv{)0{f?ZpcO*_ux0u}%lZoll6KdDv-b3RUYQ{2UP|`@pr9Y*`1FqVW&dC5K|57LV
zoCngTPtoteR*|@4((+vCTU!TQ;d3ci_O%X^O_ixnyvxId(yNtJG+W|73B5nvjOmfK
zhw^U<lU+buXXa$4RE^%K{uE37)M$7atlGWy^fb1M1d*?nh9}EN!5%1JS%4*Ws`FtY
zj7=ZhRn7ga<kawoxd5LxnI;{UQMDX%t;Qwah<>%_36}CznaKnTg!dMB`drn#{4o%T
zK9Lx4G2UEdS~}_+<&<r8JC+Vk1wZ#cWW09gvXTyZVElaS0sWNY5Te@7U6la7DkjzN
zr&j@P`!F*;&VMZ>Sq@NDP7uBdfE&IFm%-O-0Os7pGk@W`-{IEn%KPy;&T8nvai`gH
z9R;_(5%NGUY=%F<@&asLdKuV*cORfvB!T*-9Po>LN@EKF;U{%`6~>HI7s)zx>Quj9
zc19K_9{FcL2YYbO<@`$6HqtS3BH@Or%w_b^QRo5aJWN(v?-u-Ld5B<<pRYW`UsrO^
zB3^o$=UzVqBc6d-lUaTr(`#6*SA~bfOSNoV$9ooX&qMs~P>UWH*3md$C-{4q0awE9
z@CCTZkKf889n=atbt+-qvSrJ0N;PD0%<%iD-%H)Gin5(V_@PYeB%gK^!?vTHlY#1d
zfGtC)tA6;V64MLwvk^W&Uw@SQm(x&BUL}t-vSL3g_H)}N8vD|<Dh4s{A{Jf`Fy}h%
z6avbJ#9vlYPz2!f^T8pa$$%5<I6S2|c947o-V0xV8-#~sW#IJKEGtl2dPXK$y}j_z
zzw_{N8m0LCX+lBfKKnGo13%QF8^?mB*RbX=VG*8jbY$jkoUNESn~UcXdx81Il({H#
zamN_G(MaAV>W-nyHtLL=P%UO7z#U7NlG?isO%IM?^pJ33?Za3Am%x4S&mzwL_9E^m
zVr78N7^m23+3&sg-p0n^F3D3Uf{BnTgTUbc{7N(pO{gaEXb#IQm|dH9$advRNZ8&~
z&CWeo1iMj@AS@`cX~aJ#gj$@tl9K)}k0{Eybc5zy_9+#Y!W@1ioqV<g&NSaMH5OO2
zl%2X+_obUw=$nX@nwwes#-geD%clnyA@FXg%LQ!{h>EdTbe^8gqZIAeTSZjtH{Cw$
z;!4a&tIQOrwC}&b!`=kj!n`Y&ijUGE7(<tdPrkR$!y)-;_`DDjF=28|nxU~06&?#1
zvdhT37`GsFW`+oe=ic=e4bSHe-p9n9;b~pzEl{$TgJSpRY`}JakB(yOv)p+;WApKf
z?UFS@c|I>MqxcR0$5tJ5AJOS8Ihs$j>M@Yy0U@XZT1055b26@;l*5Nju#>hj&DVA)
zAIDT{e7WeK$%$xT1fUSksi4cs(>(z7oy;wwCWoal9zMX_JZ5_#!P>tw`&>Hv0LDz@
z%{mGT*|3&REaLmCc_7u*<{k>Bk++>Z81#&N621;?u+RoDy*m8qJ)x!N@Y_Akd#5;-
zZRJanVSEo*@WXU?6rTKF{!q;OVMY;vbvqUrUy=_ZbHGy##kz*Y@L|8~vdjMCKeV^w
z*fANV!Ud?3l$+oOqFJbFQaHnkJ+zF&BUAPhLkc7b-WPCaeWHB_hzKv?xti>5Cn(Dk
zIEz#Q$k<-|(k|GvM%B;M=&T;gLit@|ql^;-bwt(vX`J)f85dulZYSJyP6x;f^88XJ
z-ehXC`)<8bs5_q)aT~d9IAx|_p-Po#o<z4$5G#9~+}G00Bu~gWGQ&rWmlZvHK9?JE
z>6pt7p?FnhD1&7WVHaD43EfOsoo@DxI@n!vaIhHxsU8OWRW~=zQjGIgBlyIdX-h}$
z@3B><yb1T*o*3m!8=);7ypjGXic|CFOR#?hr7z$WL&_PXu!#v!J_Eq%)PY_;E&mh+
z(Ou-J-Ll`pf+tY;SJirAnrc2ARcxGhvi3fe>e0Ce#7#~2hNjDR%U*q9xm+?CAQ-1R
zPG=8WnyEJ_3o|B}R$*Lx4a=Sdn7f#p?%>Wv08Ov*`N3K9VF;xpY*N_ZpwPk&gNF^~
z+vp|~n1kO=l{?l$>^&u{YD<@|H>an4KEXaJujiov-Ek^krr|QGf1vZs{u@3hn*Aa9
z`t>|qSeGnWGJE#yqn(z;i9n8@nptemnA)crYr@<leR5|2!h}7_pdAwyr&deBBz5Aw
z?Sz8kD!+W@!|$Nl%XAkDZ9Hmco|O$YR$oKIk63&SK*Nvde31o9xN`=;eXF_t-;el>
zFx^JAm5Xf5va-dX$~_Qmds8{h5v*;LWX9nP!0%=HD1b39v$&WY$FM~mIYt)Nki~FO
zHIZL@<7g+HL_FBV0^bQ1<Qh3Vc=pm)qP9%Kz`77CJ9t!tHI0GoN0SSEdtka1MiI<m
z$w&Ccuj%Y17xwJ{_`e29s)v;AIt=_4=JoGCQuL1B&h2BlvzKLU03{%H4GJ4Bq$9@q
z02jHBSfctw;0znNZ+nzhQ&n@i>dtp2sj@RKBQ*C6fEy<eJI1*+1QXpX>!VSS10_Sm
zIo{szd#Ntrp?_xjIRG^m@kf&qEp`kv9xZ6IHo=1;<kxTKGw?Tf?C`1kemm3sT$InO
zJi1SQk7o_mDaS?gSDhw7+yCid_8FP2V#nGwVgbS(TE%GR;maK~o}@JN(+z*>RpdR?
zI_I{}Gv<6Ae3aXsq;@ZjL4rg2VP%r~A#Oh()aWdTq3BQUsXqBs!^m<&{{eNdM++%+
z!Uxi=Kr786+BLb9zL(NXx50Et!dZq~(N}d-t1gvylN3hXy%?Jqq*vuBe!AteGzj9n
zcpVjj;P2hc0g=Oz>%b?+`$6p!x)?-gVz`_pQC?*~`(b0c?MiT+AOS9k(oEy~sIDCN
z!>@F8>x6bjg#*~d_?uo!u6PGd{<C5+!}V%eaiO2fQ+H7t<~?7h`aS8Rwyn5Dc5765
z4sc~uC&*FG6R(#~=@JZI$l##9qFQ@qs<f)ujmkTBc~jd}zu_l211$8DyFc>lWZISB
zq~0#_e$KsxF)|LPo5n`iLD6W)r?{1Cn*f4vIaHZ?PZXf8l^JmwoK#z34BU`D*mEvl
zdLZ2_%5?c~>CLKn?~NamP1{$V<5MCQ{uPj}(^f6+7F30T@M-({5x7S*--4Ct*8Y20
z_k6n94Z<D5)%&>Z!=jxSDBW^L1f1OcpKS2a(a5UQ3*xSn5KS-vyQmpOV=mPx`!wq(
z+E#rD{w$i0g7kP7>6C?prr6iR&!G(-<hXI-S6Fq23FpgV0Djl3X{>$qXs4Y}1P#{5
znSG{etxUL^B_mk+<sVAub-7Mq8pP8{7F$?*nijdI5f689M*(Iv)1Az7b5}029Ly43
z*PWoo!>nR<=+v<@3MbfL1^ak<9k>1MXy?7NF>xHt8M$^R8SA3VaxjF`I3uvBj^z$f
zrE2)_J6L``TeM&B3x^7ctV>X3VTpq^$4Qr+WvJ_7#SR+dG{%|hWx9SVT-(Py@t4j7
zRW2sxv0en+ooP712Fvf|t{Zt&EVHv1it0+LDrnqywDV3Jibh}#PByk}IkaBruy9j3
zQ*}klp=!86gBcNK+sL=D>@@j3P9+}POh=Td<*W*@D!^3jgKbQ({7|dfd?&MtdGJkw
zBV*7T_Yph^p(P_8DOnsBW?#ZL9->w$vYbS8oY^~|akn1rjHAKtV7k^D+pp=ApuwGC
zRy(M+0xY!>d|TVB#t^`~a;E1p%R`leaoPt_ZKpX&<B*(czms{D%qnL0$jr^!b*$5g
zK_>(wBi%ia2LTXiy}@o;L^7*4PL-&>YVIQ`3eURYn$ojmuHyqbFE&rRA#y*Um8T4?
zLmX4|weK~2;-%5Jb;AC}^l!Vi^cD&cr-KIV;^ZWjgh)EDZZz#?dQ;&o;ex&5<kExr
zb6!ceiH_Cby({wHOFna#aqZ_fEq7p~Y7O+mg3~HqB;#Vw=ETymrk&&1JTZOpgeb*=
z*>(7_iIU59Bh>?)&<9qPJCYM&tWc7&R$&)uE4B!Ti2SI@9!YJO_@vf~g{lmn+_S3X
zo{B@9@I3dnr=(Ipx#+HKOBeP=Rtz4kNFFE?IWoDQ-RBm$UnSLA3$aZl?-n(4>0A6X
z#)mA-b>J}Ri|{*xJxRV|quAt`IL{iqXmU}4yfS9WV695N{Ks_54gL&6ujlZ)wd$8^
zdxJc^?^QJz+ifcAEZ#>#9G+56sazpeC`VS~=$UJ8N>>vJ6m;$sUI;h5OMJA*;H5n*
zThGl?*tQ2E10wNhEF1Xu77MFFcpACL1@Kfi)t-UQEDF(KLN}dW;^M-J+}Pz!7C~jY
z)tyIgYCfKG1Izw{i!LRt1(OS9ndwc#g)&&*`qsCanwlCK8vg6Q{;RI8?z!il1Ln+`
z<8ry4eDcZG*4Cv<mrk1|@)6wNh89&?J=)2>f25n0HB?G3*qe~9@dG|xH75#U!~lJ|
zIB=ehUGr3F&r1WyH%3C$Dam<UC64WH_C2>X{V2{OCTHC4q@1lpumpq=an%|*!bdi%
zgxTV;<$UZFmoQjvp<>*RMV`e2|9j|)MZ1HuS-Xgvy(!ac$72!KU7ElnZt9IO>=n!6
z2@#dbY#*nElad2IvX)M!RzgA<bVr<g$xHjSUofPSS|Tx;M6#Q$n?jL>;&KcroAVdd
za(q@T=6}P46@n8FLqtS1RPutPnDVRej1}!Fot<h?`KPu{o=xJg@L4ZFEKW<Dk3lz(
zqz%@>i*O07O>%BNGsP|E_A*7%uO^wPy*dwW48q|J#uKV6PLF<j)(6!tHK=nOtDKa|
z6rw^9MH_eImMrq6JtjH&E&l;s8fp1T{&zh;2JcpyAZLb7pNypA)2iWi3rh~N2CjjB
zz%7A+&MY)|ILbT^0XK26mn=eiT?^EB3;F0woorU{wIArhcTUI|QU`08PzEay2>kGe
zKU}kB&Et<hURztce*Joj#d6tYm(|qNeDRB4EG;dKM0BjnNkd(jEfE$L9PPZb9eyX%
zi{H<rT3(*VWnQ^lJfUa~anqSext~xpcCu<6^K~WnhRozS&R|aY1fZ#&9}js)9u9sB
zQ^5zTpj^D6s;$hmjcC{G^OJ8qVu`=wg5t0qc;JEF-rnzg=R1oQEwb5cR;$%wu>kkq
zfB$cO^P440mRPN)i1Qlfd<R&o6pAUB#-t5wIogTGkGtQ*f4DHp(~hMJxjVfdJZ=QT
zeB&u@DPy{Rho283XbIp=PTHH{bR!f57!SizjH-rP^u7{k9)Yzwc~AzZt*z~&AN}Zk
z_uV&R#te(aVw$FDnucLK`|Pv#-+%wdKK8NifBzWN3{M37XRuf6WFQAmXNGX{i9;~P
zV!>6Nakypnw~uzpvEY#xSn_VFhb$z6I<EPEifevbAr$0~kMRDvM?3FO7+SmN76*4m
zXwWkDryXibc*PZ0Tz>iGfB*a6ySuwT^O?_l;uD`pBod$g^rt&JJ3sl!PliIFJMOsS
zw%cy&KY7`*WvgdH036C6<8o3dIMIAV%Q+cpbB>#K8IcsGSi3E}lHmg8>t;s&g<Vs6
z=d0i9xP(5zF%;GIUcEQnEKLS;1<M{%aozYaE5zQt1Kto_mw?7l`nq=*d{+=)7WmNx
zXeg=hhUMKoSPm{~YgiryW-e-%FOqsYIpQh#45kSZ!JAOT%KlDgFqSs<RmCx1PVz5X
zSTG~CNsuf|OsS#c(c(}+JvvNAOaBIh#cJP*;xPfnsn*tv<FQHURZjWD<^T0>_w4(2
zb!t_R+^T98c1|E6^ov4%Vrqe8A<z2sQ%3y+I<K=Px0s_IOLtH>d3}PJ@W5`qnh$Nz
z#si~LcV>m{BE2paCNGi>^2QvpP!WRVpyYP|t50!Qo)|(Shk_m~qQ~+y-RkPmb)j38
zBa7;CCZxyAyV!r3I9LVAw^;QQi5Ize4p&bic}x1J#|KgdCl2pMV%pzTGn%@rGPvK)
zXTQO1i<mK(B9gCj%46D%(}tN~r@;g(fTK^-cv~R?5mv5TxpL(|<o=_NKFV;Q3|4);
zqPYL%U;c$57$n^nELgxlUrQI%sF-4Z7{BC1BU-Y%Ol_}Yakh&KcH1e|LnKcsZ2?#7
z<`z_ip7B~E?J~s#zm-Rm*eqNf+d2KW;^eSOmW}CDoEpp-V#aO7{(`WSo7#*-TYC6a
z-^YR<rav2;&yR$R834j>3QHFLiC~n);k65UfHiBL%vqLpj;?XiD~$4|qLj(09|hs9
ztJ~rJOCo*t7j9#eH@xT>%zJ^%f*_d&D*(Gi(@-yiJXArWxIw)%SO%H7fWnmt{MLr_
zn{JOz!AP^U%g_CLU7V@Xqav36Tc@&7lzVsuekx9+$_92`jPYUHvgx+pMH0D@w883P
zW-VhIxUHRQ+o6w^Jup3WXIq}&nI`~RQ||7&Z=`zguI)n%)}^ZD6cu*>^3`P*gjAo5
zijgaNEJJ=RcZ+WC*N%mUE?`1>%v?(DO#t);x&Kanw~WO%v*c4$&jX-G+mw9m*Xq+=
zFSu04*0hTeVuk<f;s5@P8)l^csGe-Ms;rU!4Aws}XD!QXSU3v$G>26Jc@Spq+_^X1
zbQ8ygVRKUc6pnUc7H0(3?PIylfI9;*wVXAt6C8=!?k#qnK6J3zI@Zk<#q`!6ioojk
z-(5K7%qrb@Wb_7k<}ciO2hF`ax`b+xCwl^s=b|cyTA5{fMELEjKA%PP)DDf@O`)u;
z?6}RyunkrgXAwI3xON3MO*-0nryGS{7R+beNQ!jiTc|%YOZ*s*mZnzP2j~&Ir8K5+
zyjonR30v#9|4wGjPD`_&Jj||O<&I(4ta(MOKA&qhjWkyNkjNUWGXuN3nLGAqCk+Fy
z6Z{8VvsZ*0K9B!waEV<sh4nf}czEz1I^8Bh+k7EB7a(wq{{Ax@JGXG_9gI39p(^<<
z9-YI=9UMF66%m+yQXJD+gLP)0BTU(e5ff@8{5rugxUZvN>usKDov?DoqPv%>VY%Xu
z4Q@)R7~Pr*uC1s3B>c$R9Q}5uCZD?D1hDx>mPB(nSWNRc!!2rSYq{szd1cV1b$^nY
zf3<5zo<$o;yNa%kCV5`JX0KD_mopLXQE|6F)<sXgSVXz(wPGD*`yWt~ZY3&6h%2%0
z!Y<2Ul&N~?Y=O^<W?W?y8A&37j<GDl^F1Ui#s8z)e3AaR`FQ}1I#~TnutqdLq}kX{
z{g^UQ?_tG$aIrAu#_!rmNCac&jU`vwFPDyG{~Uo2rJK%f3S=O6Tod+T_+Qa<>p~>J
zb*lCEkJ2MJ`yVp%S?Tau`CDGfWA&Zf_$W~S<p)Ps+&y^E@hfcKN}s#2jh!NIJP6&e
z9U9>}m|Mv-*?loq?5E7iJP&}S9aA`t6N!PqrGc+nTu}XJx`OrM-o&H-@iG#VRSWH3
zW40dZ8Nz1klzZg4gY#bAn+`SXq^VbA7=3JF_|k&3`>lB?d#*`;HVEImjhmNHR1MHU
z`8`63V`)$1uDY}wThMb+-i7}(*!&3L2l;mdG9q67B%PJ8i}%KG01@I(@cK&j$*6j_
z537tovT9%Fqq@g-!zOBXz{1t@=FQ`{kX>QDeemEmniFYjb(X>92Z+jiyJJECmc#2x
zQBmP04=W?A2yt5%t53`-_VlB?hS12$&kPrC9M>^T`qfVhPR_b5i{rq^{8Np60C!C}
z+IeR(ifV{<4_K~a#yWVHj_u2U-^xRI>`t(--&(ybl&}h_*!(orC!cYbC?Kba$|k+Y
zI>pGYu+9vuKjNHw=b&>q)NTq#EzRk-GW{eFz^51IW&}~cGmte{X9nuEWXkZ7@1TBU
z0!F7F=N!)z9NEJ3Y{kUcY<~J^C!GXbj+WFp<rvwPuo|X74WyY$+PAP%=&=<Tl#75q
zn-8CiaI?}STn~{L(?oS|7^0q|eLS|EDj@xa$%<AlkLyZLAJj4&-LN*@3i@dl*1Kdc
z6;hdNqE0es))U16A{5vX#wotpYK13J_^>Di0bfdd@w?0TQYzwqqu#aDX^oRTl*q`(
zT~`LXt_%R%H&fHZhIbJatY_8J6bd(lt3YS*M^y1)M=g1xyKEt?+8Ni0J3U&9#tC~-
z(BahQDakMX=I(e=^mcOQ3&u=HHuKQGGx3+U)~`CAO&_(ss+OOgh+2$(A6~bm|56rK
zIWIj4T4jw@QyaKKJYwi3{A~Dby1xqBBAMk_Gj^*PM|UgAc~39<M1M}&ONn66Y_9zt
z(s_Cw+6<B8($W*#AU^BtWs2zhM39=TJW<c}>j4h>!c0nQ4AncYUoqw9gUx1_gD0el
zQ2dWKKPQ@;qpNgJ+#p-{9&S!7OE)9zds@6(Cz%wRsYz;qb95J(dIAREEhS!d92l9w
zDuH6SK;_1TV569D<FpEz%b`3|$s{2$XY6^DWt0=jh+fvq5UWLH2Gr!~8_LMfnxgI9
z`hjX9{Rr%*Sv<$w41dQb^^<7P*`m6ljHL&=3`1P2(|myUouit2xMg`-2kjMYuo5~K
z^N-{h!WZ>{^MWA=1^My!_{>9W^Q%czu?epns~#QY9RuNRv#r1;O0zIty@xUl7P`hm
zLgz@K)t;a-rhLa1cY|~;B`5v!z+(R)Cy(4}{|B~*#o;kZ|H<J<|MD+jxs_RRpAgXM
zY|19%(~r^a!6`;m(YCIlH$|;_n@%WYNOWn&UbJ5AShnnBhscPWbftQmT5Vh9P<865
z-FQ&tw9M$ESpHTg<!yZ4LD%1yJ_gI^0F%<sW|?dJGM6B?dO%2K{(3^suWwdHJMBRP
z;M<xPOR`>0!L`r_gBg62OtGXeAy@mb4V^IA19eKg>^P9!5}pz0KISmuL*kXcFn0kt
z!(*$@Ncb%P-#sQ`ynouv(Qf#64yIPoamq=6@8oK6001BWNkl<Zqtb;Z8(D*O`f*?j
zqYvkkKQ=5~#H5RkcGB5``L?ykHZ6WVM7-l2g~A+GD#g=NfnFtvd>W87SZ6)nsO9eE
zZ;RzsKgL<t^btJT8tXN<5Kd0Q@7aOsVrnXAJjUqr-bOm==%^z(B7v)o!;}U*;TRc*
z6^3?#5h-3o#L2YV(#?WiyuwxObm?$&gB&5XVzaYDr|gf>1J{}1oE`^NqUlkTR%?J>
zLI0F>lOwkeT`F|j)5Tc9A{SWIb%p{Q6k*D_EtKkt42_bZ@1$BX^v+E7XL&-E6LB`P
ztBe_`znM<OVZAX`5#_l=9fG9UR2#Ld;+9)*zX?$6A}V?yr&aS|aZN30`Pzs<r##tU
zlqB6_I`G0muPjp*!zUjEndD>p8|ve?_?aW#m5DKinVaH5{OtIDmKPo{XQv1wZ!Zqv
zd+CXc*#iGZ=$jO^fX|HhjP$dy+wcf-%~egg@U-MWvruifL!O+!CgoD+)|wf#)h7o!
zcr#XlQxxwxx{o};;6x(q7c5?PL_La0TBhA^!y|@zaXV6vHr!5o13(Eh9wdH%<wDg9
zSTuS&KrqR=1GK>lD@(3jRy-GA>kGH@;G4{^qT=@clRL_k342R(@Ka|blmC`({^{G?
zBf7|I(QdJb+BA0VCM=ja*H-*}#6o0D;$dNA25TF1vF<umt@^t@7F;gct(tf<RnrCq
z!c2tXJpLLIUsf%g8G3$Iy1#TX#H$U|3zx*LTgZ`|{brR~QC^`3t3w&AgRk=&K}cU_
zp;XMbc9U8$va8W0OS?gjr-d;oFT^W%;}KFzo?EbpNBYvk>=YKOj01zrZ=jK-*J7Rn
zgB3BZ)5RdR>NKf<c5G>b)tTTG@t2A>86`8WW+>s#M2t3}qhv3|r|wB9@Bs}UMb-nj
z^4&N^_r@Mh7~&qEgSzgtR997kJi&H1_PD=G$6O}9fd{Y}?JEB&R~f9_vE+*EkhUPt
zijzINC>J9nr;_rF4UZQ2>by%ey&0i%?@_G$v`*E-FE?^PlnZIpKG=6_nJKFqoAm1Z
zw{r5d_wif<z-zD_-t`d{iaFId`Uyt2QrE(aU$XjxlrU%G@9e1sc==6_JhNg0GiLCI
zRpj^y1^+tpX{ysy`CnDtdjjVf{&NN<)q}rbbqN<s%No^hrhsy$H+-LZ@mA~i;l~(}
z!OCt4Pd~OEZ!poORjih=1#cJZ9patqN6?_Qk9&GL<}xw=5Td^CICSbTm0*~<2G$;c
z6`R@8PT&sajV+m1VV87)Ycb=e^WzQt_)dU7Kf<a_90Tfh0nE!-F`RN_4b~X}xwbnT
zVnN<~kbSiT8i@wkxB`C_=jwO<+XK^JGrR)Z;9kh@;K42)?BbCCz*jwEv(}2#=Dwd%
z3WlkDi#s1@OEWz8BEU@(@E=~|t9%?)HLQA&$L^-SgP^1^4vj61%vVCW$AgiDG{8u=
zgjtLj!5*H2j~?x`L9Du#tCrGHPf-oC?}8Ijzu^o+IeZj;1y{f#_-T>a60S*RhJ3v<
z;epHGai}@K<Bzl8avuE@K;!5uxLQa`9}+o!E}GBYEzDocvSr+H3Db|_z*zKc%#8Tw
zVdB@&1QXzjqn&vKWDVA7#$W!CsZ)PAIQ>ky9WTs&JFcoS;Dg0SJ12``xE1o@!=F0Z
zsY5Y-I_2Y7x`@T+^6;xHxkfTs8CdZQ%LVm*e7Fwg0BDkIm^g$U)}xO;dhfmWe(PJ`
zI&$Ub+S=OhUi;o^JRchyzBE7WjcW{1E8K$PKw5i8C3#~-I|(voGH-}Vh}42Y>s`Ut
zq)M16)LY!=b@_{G9p;^<n`6hTVm}AEX%cj-T?aWgVE?_K4llrXzu;F}@jNCrdDS__
zUE^iAwNjFL@Nw#Yq-e+o-;GZ?QWvBzG}y-@zh!<U(?_L_cIup={i+Z&PW?&>YNl7~
zVG&m`_2C#zd(%y~=9MfiJTlM8GNe$HcDHx6k`%;nNc-O^K7(6KmhCSR5RCphm6AZU
zj#e+%3}B6&G4hrxbPAD8JxC)=<kfW3V!<QsB4$#j<MlHL!wk+n*N#<q2{$I_6m#oE
ze2v0~J%ezm2yPGPn9Qpea-P%|Yl1gKE5vhRaF*(#CXR2C_a)H|!9$`MgH%RGFv(v!
zn0*tSVYalf@O*&C14y_&iRCvu7-GJY=?=I+kEq=$S%Qh$*{PB2V)YJgfYqyyUk@vD
zg|&YD`n7A<R#sMi?|a`njg_C@A*ic|;W$nFUVy?Zfs@6FfZxW#(t%`}HG2jO*5NQt
z=29ImX2JbOJLkwyq<cq4$Jf60wTB;m7&yI^pWiu{l_6qzL^QTRFV)9F(PxpxF{3(<
z1?TbLi_{$?7^JC@U#$QL23cxi2~cIle<G%S-iQO`)m1$8)KhhJb?2RT-lk2Py1Tni
zW98>}4qA?;8(F)K+BzQm9hUd8>UnNIM#d|PEQW)5<Cs65jy~?bll#9xX&Ilnjj}v$
zSc#d(!V|Qp51``Ujst2-c<ZgV-g@h;z=aoH_}R~X_K=mIgTaxzCbLMR?1;L3!8j{7
zu;41@&)}{P0??%8X0piQ1fkzgctwiCDk@^>LzH}3i9L)Ed*P>WZA;6b85wSA!b5ur
zUw{2|4paGAma6}Lj`KR7k(Z3^6^SXmF)GCQF0GD2AFYyrj?pL$pbLlA2g}g-mQ~t;
zHZ97b#b$`=ETBRyPSdL6&tts=#CNrJE7mxQEiAsfTdzz_9qwxqFBB2wc_g|t16`2v
zBy=30$jTQqm+Om&yGpX`8NN0Ry4l+ZP~3tqLrq2I8?}PaN5P@j&`A)IOyMX&!O(Xn
zX%_sw8B;X6TlcLMJo3?4*`^*d)mliPDWT%(i}a&uYg9+yd_mUqphdInJ=$Uy>7y=F
z^Z8>{fIcN4(l9K2I7KwS)dGU-2PWu}C{FD=aq9G=n4x|-W~x!)ASRt~T_j|OMH!V8
z{e576*wedzu$i}yV&N?*Ho02lGbEF276UAQ2N(Q0-K0%t9fWl*1bPZVqJJ|8Q-LjX
z{X*STeiwE*-b!`xk&H~8PBj`ky4VN*3~Sb`S+izf4veT&{jp&12No<?z%AEKc5@&^
ztLQI7N8dWM7c169ySRrayeN)ZgXC&`uZ+~rj0zt?-P<`sCY5aF=?mMd4VKxWa=QY$
zMoa+9?!)(0#vHMb@Ct)vh6w4&T=4sW|EzCjegTs+;`<_C<H0@Dhp5Tn>(DBS?i7#L
zEPV#E(5(%YRhzR>%1$Jr1Vn=5zO?%Ypa3R`W<2Au##9tw)TBwr9?F*%(4xIyCHlhm
zKdFvxSM_D^YZq-$Cq!EHQ7d}Rc(oI;TBX2Pdaz657gZs=&cqP^Mn-RIgVm!AmZ3+5
zi<tCE2Xw}o1&>9%Oj@2zNVPGyevTeLqlJ-fkF#5xT)iCrDB2#<Sx$g`NXodp4oEHn
z2I}fefd7QQ^cyb#@;h`VT%f(fG5u|c^c-t68DsE1D1ZeE7Mw6IVHT$n^?lU$vBXQY
zTyOLr3Ng2csXnYBBGLwBk;O?y|C^{kv}>Y%10FY4ncNFO_#ag2@o;e>sQ)aE3&A8c
zUX~q3gKcgRYh=t+7FnDs_+bKcz!>-|Ogq~7M@5|p@KGy4c;C@ZJ~B>>!O}aPERG4?
z3GQwI4zaufnj-+?PLOvdi!6qP55Sx7Q<$l#fa5@|$<rpcTiIs95LRcn7}=K4fCRQU
z--WwVH`c3TUvu?NJ~~9;Btxk!sBLIeA8*jT*GW%CB&)oiUZy04@OUd70U`U}qpLZ@
z^wPH%OI%!uiJ2>?5rcR_Xk6Jb4+v_v9Wuz<{ah)VeW4eCBrMM5<sRBJ`%@AJgS(qC
zF9nk#O#(V1iY92c&?ndpBdU%jtcqr_MzIRYCKOT>VTSHJ`biC?CIjNDBW7m)c1Edz
zu;#FKGfEOqT?7*1trx**74)NlBO#uJn{uaka8bgI;@9G)88FgU=IFsAneu#cw~A&L
zUU@W3ryK+^3iYE2=)sziBeF*i=0<(XL5h=teKa+@=ptbgJrF0^BA6$;8SjylVO$B<
z9Ef5U&&mc{<6;syarKJpauDrOp~`N%V%_`i*q$Q3&eO>eJz=Xu%(1+H>5inkp*K!U
z48M*r&l$9t{Hv8sCOb_Q>mu>Tfsq-k2!J!rH^pdf*P#|;w9rpRDT9P2E@N2Q6;X~=
z8>~NRgJnb2Nb%@F;A5;!5ScGRRE)7)DqJFupjFRUD>jQVokpR!(l~u`EFPdFF7DFM
z)l>qEdx;wPmzHfW00-e`a3gG%E-{!|$m$-J77vW~QtiaC%eMi%snd@#NY4(PAD^>V
zF{Fu12a!Ur&Vw4SQ*+`bdxTb$bqJFy`~(2i+5xhOf`s@(Au)%Tu2H;pF*1|5Owr6^
zlv@mg?X<om+$Vk`?h{X{@_EKRKiI;j3n#h8;T5AZ7H6kO1#?>{)c%lW&9ZB;ES}Jg
znus3Tev0h6h=m-h9@^c))3A?8JH;#DC9^joQ|Nk?qqK<lR59dsBrLy3r2E#{XHINO
zPof0Y-8zeEiXH`nPq}~e3*A6~SG%ksap50sOGuZ-c3nGZ3H$&qgkKue!4jA-;+UKh
zvn3qiqVb)HKA5V)3<mJ=?UXsVYZO^zarWXlcqo;9SvN9;28ofau(B8d-h%6g6pq>f
zbBdXF*tGd9vN&t73;Ls94h+c~KW?0u6;>9*MF6&l`fh{h-=M5G#>xU&WO1hA64;gs
z!}`oo&K(A_2J7^|ErRhf@QEUtW7K6muUVYUaKn4x$`cV4A#n&jtYbuNZS7C4{q#8U
z<AmdaF-;PeO)CzYaE^GX9wh{_43a&9HMZi+;~WHRDusi93V|k%u~xCGb>OiGM$nF1
z66YDs444>(AW7TVy<b$a&!eV9_yGg7G>8foKE{t0wdzewLvY^~EOt$Ej$&3Fn|87L
zDy}G@cs)OsN4N2ojjjt*%_%yIrAKqU2X)wYn8~q2&|(9~xJN5?`Dm=z?aGa%?jlB{
zm2|Nm3wDoSg-wjG@+M)`l(d0evdm+-&Mt22AY1#ya&s&n@El%ODM?ITScZGViQg$c
zYr$&OZXAgOk)Y6HOA<f8y1}Lei&b+bA=-K{a#KGyLDv;+#1j+uSk5Ib@!XyG<dxX*
z%FcJ-l|wy7Z&dh8T>7RQc-^AHa+qKf&kQ!bjH*DDZ<gO|Vnnr5$gKtR<GRFCGNZDK
zhM|udBu(KZ%Xl;)+I!i*gO-0<{mpEo`&kA*x#JmEGNjnT0eX7qy7rKt$>I#auZaNf
z#aW!4JEsHLE#c`wke*%a?$fX62#Hr|7+DeMEKV-oi}QLMS!;E=kTqDR2bEMj&aXn+
zVEv74*DxXPm}=-`ae7dUy-L#*S)58_4c6&F4qjk0TD}DGmC*bELhx-^1*^1Tdlp%|
z(=eiyt&hqg#@Na82ni1ZBcZ~rlcs3euvi2wXOavHHqoXTdcdLS>Qe1<a8o2%J(HN^
z{47{Z!3f&%XpY$>DOHms>a%XYNPBYN_K5IUKjVvq@V_0KU7LRSyF*F$UJe59=bHZi
zhv<x9^k8hF>jIk3<pPTc&+4S$O}gFmgz4A~Gnw2gj9P*|t4Q6k5qAi-+r*Gp0_g5a
zYUhC4uIOq-IlD1)M2A?ggh^+$n3zRefPhCw-q`V41m#>nh5T|qQ=~V*i(58h^d^LB
z!9k8>udO&_1i1sBQ{-cr81fzsk`a-fWx<vwGGLM<a)rE;1%kSOwNrDBIl6DhVV6E1
z4_@u_QQ~9<ad}}ml)1zJOA<HLG%=E*2kdyA;$#bs3N8~zUE0rJv5EdMG2D9A_h}6$
zLuG>DrPL}`lukVIrXqAF#4aJJZz@Slew&HmB-c5zSoaYbEH4Ef3Ve*p5f9S9NQ#R#
zNf;vb-yj|lZU}>fHX@D;zf6Xg(5pv`1HT*u34-!5XSguzxaDuz@LJWE8#0x`jw>l<
zeuhI0tbQ>jObo40r(nTq*XyPrJAq3%|89QpB^GX^v6h~R*yoX3&%tw<Wr_<6QZ|mt
zaS$eYB`%XhL|MatMRc!?9#agUBYSO<#FOH)7Oe6fE!d<XuwavSVURS1FW7?BrSlzp
zS|-j#uKZ~qWzv3m@JqG31X0;Eh#TTrWH{99xV6E`aR`HDkdT#?5=3=}*qy?un(=Be
zNF=q(EH^U&Nx`E%p;m3btomq-UO8r5<Vwe124eKeq*^;J`7A8hat8{k9s)ysof%Hk
zm`bO~go7M8P2Bo9#t9k91*t4}f`}YoR;+ftN;nQ=w}ht+Ufheh`Ma#Br3ctS!iy~z
z`z~~DVG5qisQ6*y0Ksk*1(8J-XBn~v>$JgxdpeVra2qg>-1m@QjdvV5O?1?<X`f1@
zJu%op+xK`mYqPRAdoiNrSr#LN2X{3UAE9UifdU+TB!OBE&SdoDk=8>x1g>Iy4Q1}E
zlPim}79%oP7Hl@07LYsDB#F2pbQCu<ludMt0=ic08FWufQ*?GgPcXn?rD4UX845Ks
zP0WO1)6m;#)2jHLgy!!vWV=ks3TF`ZfK`z}mW-ob%{W}hKV1lM=)(j)M4nCX3=TXN
zMLQ%UQ{bXVHWS3<g&D+kB?O98T(Sy$w_vfTrqxgkm*T%Hm|nqBS+M44mffitnIwI(
z4-E{>?>H>tVOcUws?(`>I;(tARvbFL!K1mhgggq?T=6EbV6%xaV8J5Cj6tGMl`Y4i
zcjXDaR?Tn_0g^(sU_*7F(;=u{J03k6Et2rE;c=;ZbW5(yf<1#glE}CzKt5K$t;1+^
zF?FoEw+#|{7+5v8Yq5yCaN%(1(U>IW(ceE@WE!mReCIoExBGLS``pie{_`W3st4@2
z9e7NVaY6LP32R4;%8fAfRJUNwRpUO>q1LVR^~u=7dMMFjR~b9IW<4YHN*9KfSHp(W
znmJF>&?W<eWKtOd6LnJ=3_TgrNqI_?3GWf9+#$$Fl~7b}1~Rf(7-Co@;)EkY#4y7>
zk^vhIpRiC??75jsLe-hw?2>P_1PQA$yFQy5it46@m;rSc2FavoI&e#~Vvvxsd9EQ2
zkR!l?)voiL^qV--zvj_|bb^SfdizLE%7)FV#<sF<He7OmS#j9JU6>g5!{=iSff3iK
zIT@Y=Ie7sa4lSmrN><o#N;_*MAq#QXG{xR;u-qrmOZeDhk3IF&Qza!OhG9JR*kjpJ
z^;u+*#Tf-<uzvc}pFa8IlRlsC!V53#FIC_F_xta^|KW!pZfR-x+Sk5zs4vMPi!8D@
z6;L}Ym&?`H*Oy2nEEbE!V#$`O&mxN~&M2rA)_wQg_wkQ^-05_F<ttz5>+3sYsruU5
z+WwQ*uU~(tFU%r~EV4Kfr0E#0&WtixvuDr#``^<tPlqg3KWMN3H$`45Y}h0%=fEr7
zB?bw}w%T#YJRTE6>i(D*G0g}TGetHX+Dq6=ljJoEnRtXaS~vOH^OBLlkOAe~JQ^&L
zV>U?ksR9m`jDGfN4_cp&6m{T`*V(D*YBzZ@&O3+ljM4^mY<iv-zrgK41F!DmAA^U+
z^4@aMpBdr%HvQl-0`x7|hcLU+lVHX(U1sV@Svkj0{h1LI?mG66z@*M7vSD?o&X4s8
zYCI#--NZ1&(IHXb>PcC!dbAtbp&wkvb4(CEg10Wje(6>0WKWNf;)>BDb6g6UnkSqX
zUP|3!)+C8Y;@pbUdH5)K3l_V&DNDwqiD!DB)X}8&X$*n7UoQnR4Jbxf`sHH;`)KW>
zC7&ve7sC!~kQT94^mrC$2r8*qz=w`@Ru);DHq4qeYq$%mK^*IAEsN8OFws}p{Bxc>
z+KH4<JX9Aji!9zA$QrD(08O-fnTP%4T+F#Z1N+-m)z>Vtc&A|4Tf!_(J#z4TlTWtL
z`2&6jznjgBa!TIAoTGg%i!9!57?HuUVz=S4Vb?L@wxkM&wPa+6NLt;*kQCrh&N8EH
zgjGk7+wn-+&(QfaR%|jt-Oy@T1~C~2Z_#&QlE?_^l`t{or6!4ogtDFmY>LjeWMn^N
z)N3HKe`5VNwS_&vYMy>o==?2$J1KFJ2X6oeDUk)Mi%`WIl5EWrNz@Y@yT-fH&HV}6
z8<F$?$YFGzP`m<5M(67^DKjGO!o<vY%9>*DFTs%RUI!i-*loda#KnG6K4mA(R#BVY
zU_w$9*3RcZAsYsp77SSq!^Dujf-sg%`bo8_oGSZN-X)F_)1m({RacvR?2HNm+X8Zc
z6)`GLq-0I@G!;711*^?3_L*bT{Ip|luJymuPpUIHV^=&)y2l9wd1J8Y#*u%Bte`<y
zwyJxyPT^X4*sF7@3_S;%sgwB+VWN_QpF~oAILNpl`Gd_2wI~NUd5q5;hA-<77^%Us
zVz=VZ8CZrzc_mCO8mFYA`m+&9!!!?XCV-3!w&KVzSn{_roPZeyD+4y%(r;xDPwIS{
zBOpOvh<$@i8%~$-E?Kc>B*esolGIQniS%2;=Xe3=w^knkFBHIsp_-=$Pi{n|W;t~!
z={zldRO$FlrReMkYGY~TUkk;|&wxQfJ~)%a5VNb0k=9eh6ls#|IJ7l9QrZe2JJ7)a
zJ3kq0#`&g{56h>cSRAzuk$;lt(w@!)u7e_L%Ey$9Y^{WLs73U~d=IsK!d5!SmlL6a
zB@Xd$svL}RZc2Z6Y^o+@KLSnimt&@?36~@iX4PQRo{<Kn5k-`x0v~hH2Fr2y0!2Uv
z+xmoHRx9;MvEZ0R!Yd|VfX{@5J#teZUrSDMtXj$II4~lEmBmRyACcWOl~Xc}No(2g
z8PZ<s^!qD|k)a1(g1<7b`klz(Ljz7NYG0>iHAE=0GSNo0wzwO4YcntRzjwwn4E_IT
z>fN?0m7kkSZs`dt$$W~CHCSgB_R?HP&IYysh2$;a2Ol|OCeqsp&%^KGI=C1@&<jsM
zE{rDT&rDn&5n>ooj4t9;9Ehi)^STMV7<#K%B<5DVM)PX`^-a`_X4YtmY;>5cic)V<
zz{^nr`Q*L8%U5#QY%c3yZ7ci@fElUvXaAaZ4a|T7sE5a(2Bts(yaZ!l8*7|QYp0`_
zqINod#^dw3zKYQ$lmKHHn@cWGss&@u9ApjF8H9y=vHw3GV*Wwe$|?Cd5A5RCF|pCe
zVz`ij$N>gc7r%$^K^Hv3Kt!ewHWPi3mO@;)IC5~5k=Vcnz>hzdTpvE5kFfVhgwX(c
zpq0L7iA}&(fUOiqfWTm&>A6gv>Fy|Y0CXA@T4555Cf|p@f7kf>z&3B56X3bX)YptE
z;3K@hgyIkl1{A;~h(L&eJzM{3>tg^k!(6xpLhv*A8N3(9fRDLXa1AMZm=DIVhUo2~
z2iVJAU;}T=WnLb6<y3HnVMGSYip`EQPBh4V@#m;Tu(Kv{O|vNG){4!mPBtw1<P3e2
zQTl>M5tSTPlrG7#M{p*@7CJ^WDNgRx`Z7*j9zg{ph{wfl+@sgtQ54-8ClDabeOqza
zRqbngToZF)9nIfQA%y7t1Q!86W8EcGy$bJUQj+~P@@z~RJh_9)xR@{@O;@+EyO-^Q
z%>c1JaTf)KB^FEf&%T6dRjN3qp}96Kc!HJ+vGi#U%hW1MWA!~&UhKBYQV6f90u52@
zl8N<dQl<f;Q~6T5tc)6LCJ7+|n2Tu|Tp%^U?_qz69IJsU3l0O|axlPvuGCHC!Yo|p
zGOLGVfTSC17ab<A0teU{B+@}>6J6I(btNAjLje%h9F`rLp$*=EOW}ODLQt94+VYC5
zgR=L%RhLtc2T<MrhjUOR&kNz=bnBcRoUir&Hw2*o&4VHHIWT63!y*plkbwOVhI6@m
zP}VDll90}<$f49pP60VUHRU0?Co!symo~5~hiSP)Tj=uQ$b)hSlAjdAuY*unOxQ3j
zasUPBH3Vbj#_yvb!&5O_jL2X)a69p|uyY$fAN*qu;R3}#tHBr16rh((6{QU7HT^{Z
zoTr=Tqn7Vo+TV6SmvIlF%ti-n70oh)kRbGFb2bjfie@KD2`QX<#id{|M9135E!AXp
zyZU9O5a_ot{bgq^1wW4^;3qtN2^4dcLv&Id=gF!tcHDC2Z^haoW^V!v(Fd<Z6hB)?
zqD*y|kyQ4}q8)k4Jz@}PM{vd@k~}Y(%tv8dHFhx$!Ur(ZE;71M)~*6Y%(kFJ(ISkG
zi^y0vul$*xm5^IW;Y7;D)8c_3RKUmKa~3WVV?1Wkk(yFT2xHqJZpumNIaKB`kU3(I
z3~>M`O)WIu2N4o~<Y(6~As=@}Qp;=X^ujcl4L~(9x45lyT@RE<HfjQD`BH&Wbu2{=
znc~Ku$b6Yr{UYy!deMxd)U}<AnaG5}rUh%OdT^BKN>qEP1*)Km@m;jE@#jj|3juf)
zUIJhWIag8@A@OITPRLD-_ObjmUyQdVYzM>;8qb1JOo)ywgm6R#D~sbs45Nj(8*2%U
zW_Ud{Twez}gc(@1?}2A%xJuNX$|6c4PO_UwABls6dTA>m_fjToW&dw@t)2t7LoIBB
z(O^Ha?{Ng!MMr`t5F!Tbp?e}dGAiV4fC7Bu8EvI$aDg_5b7R<A2sP1@gQJ^7fW9u0
zqj63ihKS(tBWtiu5B3mkCIMWDt$(#J1)c-o6JnWp4s3!01S@nF?iq-PSQfniVYrd>
z&Mgq7u8s8_0QCWuH357au`<_K+`oeZAPhSJ_yqt{XfGm^i!%quREqK_&Ziid!I&wG
znn@YdO-GBmo@hMF&T}YogBOPv2XH>6Vd_o25q`U(jGQuZfG|c9dkkYEy^VzbLF`8G
zK`%#)l^6lC2J18+hS5x71e$eu001BWNkl<Z*8ndQ`WajZ#ZUt);0hRwy79nFN}gu#
z2lTrAOuzwxKnp!STsb)VHw)(j@GSf*JPdchUf2v>bu1XiE$0AK`}o1yfi2;WfJ1Ew
zW6+)2+WaB)_iq3Yp*${L=5Dd4bK|JMeLg1<ZtOQOxr=ZGxfxr+cPyM(r{UX8`2Elh
zLGZ(7coU-%vj7WFe5AnOVIXU;hJjxKPy!y<ILs&*iD-)Ze;4jD9CxH%%N4K?8ic{>
z$41&ez!`^^*;`D`L`p)$US{t#5QbLp!RKK#+zRWV2xh~LmnaVFsPKb7MM!tS+M}F)
zO5w&{p{vGb;lw(boMM=e;;^Q|M6BDO38M~k(NO|}j};>_SP7DG;-!o);@-hOcGIwn
zC4<d#VTSlfP)8moU^No93?q=ChxLkbrSK1$Wli7=L82%@tt6E%{aoc-e3{^GO<h8!
zn0%+$g*DUk9sZ}0*m@Gc4KNc-SPe5Fl;ZyT-6vsa!|Zp71u<<z`e<^e?t3fUoy1G=
z*@<@J?1lyadZ7!Z!*+728S6@&Uqa0++5&hgDFR&hWUTN$-UyMVuVaK{ICWANYWGs+
z7BkI7nIVYZ0QJ56V6bV|LGz_>G4wzXx*-4?;V-n$p#f-0?Pmw^SZX`C2Y|PJdy$m!
zYG!?0P{XB!oZ>n^WydXs_sk@dh4U%SF6k-gH*JIHmM%j$VDPf|Y?6vTk$w~>ig2oW
zIXIn_gUu*Bi^5#pyhb-Y)3M0GU8wy{x1)kIL)v-%yVQ;sfMgg$IMXunGm_dP6s9;S
z+@=Lgnl-@t!+`5Rrf-=#bB>(@;ame~=eM$%&Nu_0o9%`$Jr{9Tu9&bP+GU2rC0ubi
z{_Sj=N&PJ>Fo-3I?mv>c*3mH{gLM|+HQF6S&Z8s`*Fi!a>@Gr4>_w^2@i0->;d`T(
z-an)U$#|@P!vN;N9k3rF@T_v&m{E*<m_Q!*h<J(g5YNT&G#v}IePvV}!MEoO4DJL8
z5F|i=1Q>!2?ivyZ5Fog_Cj^HK?(S~EA-KB-cS&%U4DP<2_dn;I{qXkee%boaGt;NL
z=62P!s=oJki_#R7;GdxpRO=m_>w%c7?~1hti&exQ&su8XC47{-dawiERDE!mFD8>W
za+k94mUY!HQAQ!fQ{K0}$wzD3x#?b|E_W2AOntU)@YkE|QVUo$i@$bhDoL+<4Q<<4
zOe!)AmcV~(*KT1&N3<@)>hYm4XZE%d2Vt4g@6TYV=2(>zQOM7-(eg|5Y7qTANo1zI
zeAgr?DYuHRGn55!&fO})8PWIR6-T22C#A1clL;spDGBDW+}E6sJH7MtZfVPmVNx0|
zNJeUGve%?Z-jfB*@EP!G;i&mNuSh)vbv|YZXOZi1ySmbfR(g*EF($`~FUP{~FpGa?
zVFtx`c$0{<MSLQzL$1;3axJ+QIL6dsaIl5s1}-mg-G?xuwpYaWm}{$4>Sb-vN8%YB
zM-!-(5WH%Puo0aos>OSPt2K~A{VBjqBbtRQ5#R;$dv?XgXJ3CBYvw_pdx+bUJTJAS
zHCXrTkqu5LUC+}912aUcgxD#EP?-OCVEqF>A#$le3$Dd)Tl=_&0=2$6JK<R`ufQ<R
z-q3_ney|{u5-?Be?==t~@hf@$4ZPEqNb$$Q%{LI4{wyNy{`-2R?@z(LjH(fRMuZQs
zy+BK-3ZZ&z?*h72A#>LyD~Y>;xp00@5OxA(V(g1&65IW}$8I+(o%wjGa&?IxQAGrj
zMFzGRXgj=OZ0Nm}$uoX9P&MjWcSwdvoO_1<6>5OLrHnCQ>pm9~U~M+X@J?JmW2!H>
zg$7PQE8iW*Xu+Eqde#0o2%M<K=nXulk>e1r$UH>kzkDb2Qq?#6IQ_hpyCWX?d~bdo
zqV~Giu2*T0#o<Ndwg-PaCNWhAKL%oF6?3^I+*P%qiKvV!5xmRuB5(KwP#T@osPpwu
zTWSv>Ex{XUJGiGPc6IzU!)17Yb0;Gxh|Y_+DNBLNhQIfXX#VVbX~~#)Q;R(|v&=9P
z=h!F(yY7$L<ujkU+cl>4q4)VNr9wU3>yw1^FAQ0&^Tq}{JM)pXqM4*Ku@+E7C<8oB
z;>g5jOx_RL2d>917`kn?XP-h5(R;I!{3T1L`Ad_j8DSIV>V9*msSas!YZk$Tc)@J)
z_)vmmQelsqQVQ#oo`*MgBg0H@9Q;S89ZZacWQcd{8nDSSSLG2gjfC}1PY1Z$j`@Vo
zpeaUV>TzxT`Z9=BPC04{0%7kpakA2pRM^VbF(Il*vy=7SyQ?FS0OgC(uozbC=Pb!v
zIfCR$O{kgfj6$g{txrXy;>4Q<=X1QlMm_8x=FZTQ_X^>%B2FS+LH)tV3;|r93I5Q0
zOuGe(NS&Ux5b^isbRta#?$WEzh6PW)VP5IkjmBX2OcIdo9_JG!VNnhllG2DcjM7fW
zq@MVhwDbE>eHro?%!h_<tI$~a#Q{Xj<?euE>n`JvEZ?)MQ91CQ5>2;gmnavOLVjhC
z^P#3r`3VhIp^H1-2QPbi-|ZWNO8uzxyAG0x8l2=|AUfU6BBuDl+Icsw6rMt~xfd&t
zXM`v2;@;~UY7|^@PB+-2!h`YI&$$Q4d4KEV5Z3zUK(1(t-FP7Rr0&Q<Iar}N^;6`*
z{0~H8q+r1~=3wC(5e==L_iB`96k4xgj|RrW)8(iTe+cc$#)_9Q!;zmKEC}q4?n6i@
z8U1$#;=Hv-j6xK_%#a2DtBUJFjBAfi)OZ!t#3Ty|*3flRs-9DAK?iSCg*M}3k4X`^
zLR?=#IOG^&llOqi8wG*I$?vH+rw%VKuK4)IXH*_a_EYwchCe-csaC#6Sh9@va|MiZ
zdH1!Kfu1Zd=x<on^G;&p*xy{KHHzr#Es+PR?>qg~1;>jP{8?fz+Ku;<qyQCORKWN#
zhGh!vZFDs?`Hg=?347immRs*a+1lQ^m)v50dIF2I=?|CBfEKdTB{p^xxa~1DDJ?m@
z42DV;!^0NXKdzMQGdMP**Q2R}8?rzRVNIM3=rqKY!4(#T2%o|vcx1T#bnY4#w8cL%
zaD6+&9B-A6ZoKqaK@()cGzado`XUPQ0{qy3=c*p<iOjIFI2m6S@T~NwPP6TN#af3q
za;|hZhf&FG?DZPQs7^;CuIxK8asH;ruGOa=Fz5+M5cS-@0*eri+JU=>+-dhPtB1s1
z(ND|Jd%fu_e%XQA9hfDsZYPn5$^>=!yT;mGN%#V3oUG^|D#;H5HuO2+789>IeE8(6
z30m3MWSqairqc9Y>Fdz-9mE<wqVqu~3k>nZnV*4YVvjX5!pEqk+hs-C()7zTsu@H?
zL|9o5E*I?P>Wk4<7mEKC^4S`UZ@*hQ@=3BxruCS_gYm5jhK6`GWDESc>c$bW*T(^^
zf}$|FKp6PL<W+Z1K9n((%7h4ChXTrzTS98$hJ8t7cxIUQ^A)^s0B?-u5qFW_=j+C+
zAoP#gk*o@W<e_bZ9Ieku-Zqhnk*ms?0l<#_{{9#k7)|=|ad9rK%LoLbsj0~q4R2p-
zrv2&h&Tac@+5ITp&-IGFKy2Nyyw6`{II9p<j8mHaw6cMuu5H%|NyKD5-_bE~wGAcp
zd?i`k`&|}<7ac4LLqDLQ&(;yFj}f^FZKfojXJ-L#VRDKqg;V@ZN~x9j?j$Q8z{D=`
zYC_h)v*)jU@RG5ZcNcWDT&y(%^a-wGBn!`a+s(qW7zKj1v1Vi*mhIF92S5Y_a8)C%
z5cz{AyKgupcwX-g;I=ldeN2j<llp}~GNAh8p6ozOTKs}{SN_ubFUfX-RR`;rd@yP9
zs}#WwGE;;Wl3^pl{y4pkaS>r=#P|#GF_^LlaY<Xb%fJqc55k>zf68m=Wvnf>zxval
z_`rLr_q}&dXZo)&O4pk#DKN|}x%`-%$a_IWvl14d1N(w@^k*Vev1vS-vYX#O0-FEF
z%)T*UFHK3-^hn9S?~;k}P0J!72VwKJDt3#*Vsn_J={^mW?52)M%dl21n7kciAo2*c
zPFJWJaKt-G5l*7E4MU>0ZVOnWb)#QDBIX^K=Fbb9hVHo?hHwg{-!(lE*KWDa^>@yd
zX$97GV5nKy^O8%TULGt27D#W>BP(g6cMXtRn{3Cn>>lvEz|uZ`S0N{hGLCx!B3x}7
z`>fE8+JNn0BFOk1t&KelD3tCHb7McHb-fyB?DeX;{dCCUCLD2FM#y+FSA0LIIq{ri
zjO7h|XP@n5ZdP`}jZI$Ho+(d|dIuIE{t!eNwA<5|o&%mi;BpP@_DU1g%7{2!uS8rT
zI8@b7=$a`5d{DiPGQV~>L9(&h{n9WikQ2T=w-1ZfAzo%)oUhMWXNr}?5jmYl<K~z;
zX_tAAd}d3(k<YXZRlebFCmg3ssoWNi4@|x+?%g6c<>MfIp|IU8;V-`i<1+lF;=O9D
zARA;CsXJUdSlhP(3%|zs*mKH>6TI7rn^uqoTabI>;G?RHw@)OkW~sDiSWFoRW$#Qp
zQqQiKlg?F<VelET%OoXUwls}>3C<Z-q`F?}bu|qj=%BqDaPj`lX@c~0@tP`1$%8y`
z|LqOQsvzmm*^Odd&e2bpow{*-KTf}c({=uUQD4}EA_GI64Ot9NGj~lbD8c*}N6toy
zrfUvr1C9&0!5VuO8zlg#z@S=&X*87vo%5Gm_jc~Q2T}JcBk+vr-gJ@5FOEsmPL4=Y
zc+qNN8N~7_*-{o?GB#{4+>^9olj&oyOmmLn3pe|A0lGes#20?Ge>lbPpJ)gVfS&L%
zL>_~TxS7iOg8AoH`M}&LIgfNjxT6gf$bY&BOD8qYgT*52O1~0?LG(ctluxD2SOky>
z!eBzwSB4*HLA)*V_R{=%uhqoLd<oIkh(_}z<n{Y5#Ir_x4~0WQL+e|FbB=IaUQ&Zj
z!zTI;MbmvoLGVwa-9iNu0=VqA;!w;*1=E~&;*Rb{KHb5$$h<Y={z4S$CSusfEbS>e
zW0kA+$F>cW*U@Kg%O%c)?PEQ9H{aHXgsVf(#>>n5xlKWOcnxP07_q2pV7*CXGh)u>
zO{wmyYr0s<<om_g%v`Y*UP*T6%3NITVIkek*8P&}c?(iQ4Ur@I>FPTg$5m2_ug9G*
zFQy=eFTc5+_A1TK8}is$G{+ESNPq7mQK6a`v<O6+jL^>tEsweE$)Rh@D<8UmqRVnu
zJA@n875f3s@uum?@d9kxC;1i1C*d%banC0rYD40sQu(i3%JXl1nXfColSjg2U_6Jk
zn3V`O1KVkyx8H&#5|jFtet|k7e(>g7B0-!qrKKHToHbJO4lWDMXs<p4u~E1<qf<8~
z@%0n%W)2IEwo3h|c&4RzOks15+LYTs^t^?WkrC}B!7Qv)MuvtF>&bz$f(Umb`*t8)
zezeclftWXxS>$CUW`ukLQWjfNbMk8{3PK~LRAZLSn2?^z4%IA@a4K^FIIJTGEQa5%
z-$lrhAh{v$O(nCFT7MLp@x9*$w7>SHOV#&}+haA1SjP$*qZz4Rl?F8k%nw3_7rbBo
z0H;C6>cp5`ZgnSR)ubOft+AK_lC`w3y=<Sc2StcEau}vB8X`{|kV#pJw(1F{uIz6^
z#O`C(AXXDOQWP-T_X-MaXQKk#A2<cBlWEX%KG^$gowyilM1+J)a+Ps+j*+&fI<;7$
z<JxabQTdYzqbvqljtyfKj(=XUBH*CVFbw>wmt0!|@+zXrT4TvHl}Boi!nvNX00jc}
zw*{@KD{=(}Co>FKUt4Tz?MQz=8S$Bd^YIN`9UbTJo<A&l<sBWyq9mHYIMyn?$Pa{p
z^IgCbc|`gReLaA=au$p4^!N}VVQ;n%H<7vz690sKRaZP9YsOh2;0D6K_HQD3RBb0t
zxeo{`xmI#wqxM@3)Wf>YKF(jf&#D-NmH+A&2L<hTCP@kJ1@?$&>s;HDVZ{8Z2?j;m
z&=QCA)dc%7%?9fF(Vs;tB5G;ro%ndNf4@Urujh%=W0UQdu%5(?_xv@;t!=9Hgl4~>
zr;Qx3afe)Rr>)mfHezqXZ#J41Naa@Od3Q-C6R}unBoo>>wsU&tWQ?TdGjVJ2UVxDB
zdKb+}0Zo)!plC{M9wn`-Pw}-&j*W+h+S%WY>w(f(f_D%>&S)LW{vS(FI{9y+upk;6
z&8tS7Set}b#lLrI=~hhc^R3wKJk>n>MT?cfcT#y*-tl&l3pkb1paYot9yeQw?pKFG
z05Deh+}*T_qN=LuhfHA|JTE8|I?I`ukdU!zR)yh1ld=O*K+l`=FDrY*vAOIvM8Og@
zW!On3d@UXJyz=YLXu=o&t^`)Cs_JTiZy_zd`#>5$fbn^;n;Tgd$4)d{QYW<{6RNh*
zay2o1Xr}91qgwF?F}tcRsx@SVgBwW4#x{mM1cK@`O}Br467y|9A*2ORam~ik1<aId
z<>loA@u-(`WvmS47o;sM_bZM1-xu=?@4XZrers!a#d+k(ReQI!wS_~*LBAQag+5sa
z=JTNm`c6at86x%>m5{}UhDD+TV%6n`vDEC0W`Djrfd-_LN6S$C2D$>0xb;4Mj7m*y
zHs}rjfcynsL;x5C4Gj$dP(fffyN63j9o3lzl$kl!gpZs18<1q33bS)+)HcvWHS)MQ
z-2#wo&y(1VaH#}$?Rj<G4{DFYkCvLR+U^c1_`YoRQ-9a~{5dH#Rp9&M{f{3%?5(Rx
zKUW&g6sv+9Hvs>O`SojzmFs9a86X1nc=79)Com&TTXVYZW%>CG9z)(lBqaYz#JQ=|
z$JLwMTk3xN*D3%?DePL<S^u?z1-o{e0qKwPvl-l02}4^UaVG6vRdy?Fy`iLmc7hdl
ziHV7f3=G@b+xs|F5f2>&RXWX1Qc_X`>{JY8<c=#%->lTsNPLcvc`T<3DFvK@a+2cW
zK51(sF5JFfl<3tO6YH})ZLxiC1pGmPZ~JjEF)^8$)NLkj`$|em)XOx1aPFTt6g)(v
zq%u7&6@_qn>)X%mUJrFP3;d3|S;hOe7k}x@Jv!iEz!eAHw$3&<Y%F`6ji77d`qe%G
zuhVPeT2XvvS|kE-fyJWT-u^cF?4LC)>0>X#{F9D0oll>7E)VPxXu|P8ny!lRf}pYH
ztIxtd2gp38!%3<oY6Dd*#se{IZP#4~P9gvn^hoL{V3Nr=O^5LzFPWK{s}jI1xBDfv
zwYAJ@r7pnewgbO{p5u`a<41g;MnOTz{Ql_b;n6}?#%?`dQKnTd{B(atP>(>f5~1bN
zI1330sj1-t0q^@^%e(c=<|Y449l-|ray^})$h-;}!AAB3PbX#_-u!B(FCHa7J$l#v
zOv%~V8TSVY!3LYdE2I7hoqAgxOUvSXiyS8DaSk}pZ;?s?RJVl}Km}Hw!Tk?8PR&bu
zj?ywRSU&hgn192lgl>WObU6iupFe*_lx~TBzGL08N!I7adzupdY!zrn)_Er<8Aj)_
z5)t&EuV^9}7AU;{vAR9q!y;jkm6c86`<hRKF0AeXA|oTaxVVU+&nqbC@2e9gFNg->
zcLh&+o>j2`GB&sMMG7DZC_`v9UtzFNZ`TgU?r<_EouT08PoS=fJtJTxA1U(KFk<Td
zc(aqPU<L^vONFt)VCqD@{#c|L!k+v-3CJf_`)@JS&HyBa-ONjX!N|z65lzoaYrE^i
zRv+kT`v_X>WO}?R3OH!{ByvYG4k3UO!$iy1BZ?0(c=NxBDl!I%7=D5`B-EU^iP3=h
zAKy((zOvzv#pkF0ENR6D;93E76M2D;fmlXBTD_6c^7Hb(9sKz=mLUYHv;G|u7v~5B
z;$QxqE3c|L9bn89^te6-ZrOd74}c{|D8(%<YBw)sq^BP^HIJq7aoR1n+~40PbDAY2
z!m4Fx#kj<Mq-qiX71nm#g=N=r(ua&Ec>HgnHZ$XS`P*!v&L&mlyO)$yFEGjNcZbc+
zE1v3!8NktbwZj{D@KXProE-T?s7{NE0|0NW{!?3{$~ZXRLcLr!JT!En)!jMD5M!O+
z=Ie`BUIQ+IRH-AVqDn+!_{z%4sMyc5GDQG&?B?dSGYTO7A1>CG*r~h&1w}@ReAx_Q
z3Lz#Yo|%~eSlnXnXZmA#adCG54q%BFfh>kwU>V?81nCt0N4``j$w;~YOi1Vfa4>If
zZm7TCS9<AA)x4AlDAjBC0_ckWuZEb6vunMW1@QSOo;{0=i{tXTzXpb)xY+If`sDWZ
z7GRY3>HOa|`ogPNfU2|XvXu}A%2P*1T6hi<0FQ6|^!PwQPX6Z28$kWfP7*^1F#j`^
z<c^H!T^v#R1P}u<*$Q-@3he)BifPb?0BZ65Owif63Mwipe*A!R{3GQt;p@m-gD$^T
z_p1y6=TQV<BAcGBuC5n=6{D`}npavnJY5y>g735wje2@^HiDdczsnyhKOwK$cBu*A
zNT^=h;&j#E;9yl%6`-$xyQa#tU@AjM6^6YbySuvunO=VoZ6_nJ6~WVAbfD2p5rE}2
zRfhQ8&Uf!`&WsEVPft&Qvv!smfO*uZF^^^$n#%uBQdY)YUdoVZsH!UQS_vyy%op`_
zNKWzoc(#PrAO$ozIXUI#RonG?Xi7dYDd|`;XFsBf>%-ky=99;QO+9ex3ESjgrTyuo
zpL%R~n5(%I&|l$HLi{k;6(HyA&85I<n@vE#ee#DKv-(f2W|njSEgcy22}Bi8MHXPF
zTi|(zQ(<F>D!}eKoUBVF4#h`D8#rlrD>|6J3b>B_xT30r@0Y+Kja@B&ZX?TQGDuWC
z<WtGGI~dPA0{sgd0o4GM>a92Mq8LqL-<VPq0Tk2HpIWOCXh~L9R$-wzkRT3dMbpH*
zGoVT-t9HXUqN)iP9A)KGAdu;De`W|#g#>fuE@z>viyME3tyFQ&O2Pdh^cPifSto*!
z`OO=K4=FvivKG1;xiXQ$0Qh+`W1*{w%|cySX{j|}1^`>$=5&x!WD-s(a5<SLPZcRM
z#Z`SIzw$8s_u|$`1n3!rhDSvNF#s+G$T(8HpOB#jc1-#ZRp%dc85Poazii4@4^0}#
zW0dva`(^@BQF_YC>lbd-)zxRGr<#LQBpMnTfH8qJ_q2K5;ZpFF0s!!UePLx|gVmP)
z!(-QN`ApPHs^@WXzBiShutmqr{4XISKcN=5Fj3b{E@2B$EzR0cm7za?7XlDz!?O_g
zS|v@PpvVOLT*T`h0Q0nMyD_c4TLWYc3k&NRA>sMR%m-Gi4xn5_9pDiCE1m*?t^@zq
zQNUZ%jHLvK0F9TI_`RGgKIG&~;=h=lrjr(s5x-sDVij%eMP(8n8uW;?w6ugO$ScRr
zH&7_m$@|y*#^OlEq7Z91HaZ+93WQMsMxvn@Fc;;DK%pUz0#z0bUC8Hvz(GL7mN<k1
z4GA*~L?wzM2v6iY2EK!-I&cd6$Yx~spNd71jbz5k8JFN8lS6RQ<O4XWDEFw9^lS3f
zZML~c?gs|RZ3d5-#Ovnh@VZ82OtRgy-!50QQ>IEExHH&(r}V`pozZ^01!`ZI229B6
z=CZf+wcjE_Gp3VLRl-x%lG89__3S`L7x#HvaGzo5@Kzg4h8<BcqmQ#M=b5i1Qci8&
zCy&SsnIs4O3bR<x`#XzNu6kqbHP`j@*)B|36pYFqzEje?GUDo+rk*5!KWf$T`-><_
z6DZ|G?K=6QRUHJvfTJtHnMCnlVubfsWkBe+Tj)gzY0#r2Ws&@_3ZKz@lL#?Sn`0w=
zN#W#JUFD!FZlsI^!HCg6_XPV)&)?C&oP?BY5vehd8~1y7dbX9?t`7>RD0w#q`S(Yr
z=95xM3cSLfA8$F|AhY**d(l;fz1>saWBVPBOz{NSSv2|O2PLzWup$Ss(Mi~DgelH>
zke#<F*|qFfZ(P(zO?ArNjy8w)kD7dv;25=nudC!S<+6LA#k>@a$)>s0K?*`@+OZEe
zgH`SE{q8&Cjz{kS17UY_F4hZosuFg^Y3|ScmBa7OPRIN#7c&QUqZRXh58+hLi3oUf
zZAvMybvGQfSSDRm=RQ}zp)!eAv<wr3JYsFqL(=9-cFblIU(Vk!$g^9P&0QVBHDO=B
zdhZbD&Uf5XZM2jh3i+m9yy6DoJ9<vMl9P;sGB@??4$_Lo;pdek9SO$j9r`MOcT6Ov
zjlnrR>2u{VVakppG5*r7F(7tKrP~7gPq{S%gvKvh3<@it4Cp@Bd@uitbN}D9``FaO
zI}4+lpfLlXw@p2yp>$+z%N&_R$hz^#f=$2e<X`$?G@P8a&EI-296!6{_})Zal{!$)
zkuB)8yhtS~wnyA|l60YX`RcbU<&3>f+hJ}gu%E?Ue8hod?!nGqb#gQ_$v{4qcy5KN
zceX2rZ~X&zA%Takp<~YFR&1|647qktU7cHS$rCzH{`G5VEVsKfGXCmR8HoJd7#Z39
z&r_Yk<C$aEv~Jmb)tND0XKAyiPjR#4FyiCMZ&u!O#U4EER-M!oS0}70o-@}*u^*PT
zS6$2U?o$X-ax+iJ6Co+86wHN4Dd->Iw4(THD3X=zm@<(+PVkCO{8xkU*|Ko)C!RNd
z72@i5Ze*hk44ytgIh1vijavSW5~OUTNR$-xVhK-|@Z|$MR^xftM+_^l2&NYUdQO?&
z3)!HzD4{gy>pOmZ|930;ABFONo)-2-vr7;NnN3bo?9+dItp7H^|F>KJe_y^<b?{mB
zFa<fVy!d-l>3cVKJ^9}fGUHrjd4j-}!<X0TX{n8_tzN*cktKB|^O743w6k7R8M=b@
z*5u+5wOsS<*I%mjY7<#H4YL(n-y>~O;<u`^;slXF8BLYyv7jKI<KNxof7$r<7pJfW
zh3^jz%7ss|*`BJIpICW1Bl5HCfVCo5Ak9!4@`nqa`?V`K<u+9mpl>fw6}(^lX@o)C
z2mhJT5Je_CaPF$VU8yZ4&*R}=_&!7_;gwthy%JHsCx<$Wqxv|CB?LsTasI%qgn`_c
zJO?QL&+7@+j?iRO(URtEA?xN@JG60JIDK&S;ojbPN0|kP>x0&ULHI%T3bURF<I+R^
z(Wzp%uin!Y=Z0|d-QM-wxp92=1GN<+=(>DQ>%v!m90sgu|Ga+)Y5ekZ>w9oG6<fY(
z@~!n{<6y~*OU-=1(Cd}bG+lONxZB9Y=GFpZ+m-;yOTp@skFmdz{)ui5jybg8@zoHI
zL2RKbb%YR?$uvAxbvdQDy8h6sOGWRf#<{d<V+HIY?5h1<!Fuvn<q)I(ecGpq00YN=
z%jrL&i&Z0S8)?ulCRf>aPiE?g>1P26Z8Rl^cX;oSk0mUU8G@8Hu)s`uBBbV<y+TL0
zwkyv%@-;MWkMBqA*w9R|{)?N<aX)l>uLxpv{4H(wQZv7ApJUx?wUv7^ei|u{Q@44i
zrIJ>O+F|n`;H034LIaG;>BZOZl&x>VE(f=yKYN`;gtmTpJv1zr5g~*GXmjZ_eHO{l
zeb7VKv2;OyLkG`|m%DufpDc=jfrYs%1^tr?0;pcb)jOnF(s&^r)77OV>&h6=*Z=VT
fc$UEb$mI9br)++!2UHU)AfU-fDM<bl*Z28fRK3#g

literal 0
HcmV?d00001

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 9a9ae40fc..96ccab213 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -167,17 +167,23 @@ using default values for everything else.
     import vcs
     import cdms2
 
-    # Open sample NetCDF data file
+    # Open sample NetCDF data file. Depending on the location of clt.nc
+    # a relative or absolute path might be needed.
     data = cdms2.open('clt.nc')
 
-    # Initialize vcs and then plot the variable
-    canvas = vcs.init()
+    # Initialize vcs with specific width and height
+    # and then plot the variable
+    canvas = vcs.init(geometry=(400, 400))
     clt = data['clt']
     canvas.plot(clt)
 
     # Close the canvas context
     canvas.close()
 
+The script should produce a plot as shown below:
+.. |clt| image:: static/clt.png
+   :width: 100%
+
 As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data.
 
 .. code-block:: python
-- 
GitLab


From cab7fdd08d9cb721e4e3c52316704cacb55089a5 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 19 May 2016 23:01:43 -0400
Subject: [PATCH 079/196] Added sphinx configuration and fixed style

---
 Packages/vcs/docs/conf.py        | 275 +++++++++++++++++++++++++++++++
 Packages/vcs/docs/user-guide.rst |  24 +--
 2 files changed, 287 insertions(+), 12 deletions(-)
 create mode 100755 Packages/vcs/docs/conf.py

diff --git a/Packages/vcs/docs/conf.py b/Packages/vcs/docs/conf.py
new file mode 100755
index 000000000..7147eae21
--- /dev/null
+++ b/Packages/vcs/docs/conf.py
@@ -0,0 +1,275 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+#
+# Tangelo Web Framework documentation build configuration file, created by
+# sphinx-quickstart on Thu Apr 11 11:42:23 2013.
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys, os
+#import sphinx_bootstrap_theme
+
+# on_rtd is whether we are on readthedocs.org
+on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
+
+if not on_rtd:  # only import and set the theme if we're building docs locally
+    import sphinx_rtd_theme
+    html_theme = 'sphinx_rtd_theme'
+    html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+# otherwise, readthedocs.org uses their theme by default, so no need to specify it
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+sys.path.insert(0, os.path.abspath('..'))
+
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.extlinks', 'sphinx.ext.doctest']
+
+# turn off doctests of autodoc included files (these are tested elsewhere)
+doctest_test_doctest_blocks = None
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = 'vcs'
+copyright = '2016, LLNL'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# These are set to None here, but this is overridden in CMakeLists.txt via -D
+# flags to set them explicitly using a variable defined there.
+#
+# The short X.Y version.
+version = '0.1'
+
+# The full version, including alpha/beta/rc tags.
+release = '0.1.0'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# Define an external link to refer to the base Tangelo installation - this is
+# the actual installation if the docs are built locally, or the default location
+# of localhost, port 80, for the documentation built on readthedocs.
+import os
+on_rtd = os.environ.get("READTHEDOCS", None) is not None
+extlinks = {"root": ("http://localhost:8080%s" if on_rtd else "%s", None)}
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+#html_theme = 'pyramid'
+#html_theme = 'bootstrap'
+#html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+#html_theme_options = {
+#   'bootswatch_theme': "readable"
+#}
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = "tangelo.ico"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+html_domain_indices = False
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# Show "todo" notes.
+todo_include_todos = False
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'RomanescoDoc'
+
+
+# -- Options for LaTeX output --------------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title, author, documentclass [howto/manual]).
+latex_documents = [
+  ('index', 'Romanesco.tex', 'Romanesco Documentation',
+   'Kitware, Inc.', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output --------------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+    ('index', 'vcs', 'VCS Documentation',
+     ['LLNL'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output ------------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+#  dir menu entry, description, category)
+texinfo_documents = [
+  ('index', 'vcs', 'vcs Documentation',
+   'LLNL', 'vcs', 'Visualization library',
+   'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 96ccab213..4c839c3d7 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -1,8 +1,8 @@
 User Guide
-**********
+==========
 
 Document Conventions
-====================
+--------------------
 
 This User Guide is written for end-users of vcs, rather than developers. If you
 have suggestions or questions about this documentation, feel free to contact us
@@ -14,7 +14,7 @@ vcs specific entities will be ``formatted like this``.
 .. _concepts:
 
 Installation
-============
+------------
 While there are many ways a user can install vcs, installation using conda is
 preferred for the end user. Currently, to install vcs, you need to install entire uvcdat
 pacakge. ::
@@ -25,14 +25,14 @@ It is assumed that conda is installed on user's system and is available on the s
 
 
 Concepts
-========
+--------
 
 The VCS module can accept data from the CDMS module or can use the numpy array. For use on
 how to use either of the mentioned modules, see their respective documentation. For examples on the
 direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts.
 
 VCS Model
----------
+^^^^^^^^^
 
 The VCS model is defined by a trio of named attribute sets, designated the “Primary Objects” (also known as “Primary Elements”).
 These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or numpy array;
@@ -40,7 +40,7 @@ the graphics method, which specifies the display technique; and the picture temp
 each segment of the display.
 
 VCS Primary Objects (or Primary Elements)
------------------------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below.
 
@@ -63,14 +63,14 @@ A graphics method simply defines how data is to be displayed on the screen. Curr
 * **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
 * **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
 
-**Picture Template Object**
-
+Picture Template Object
+^^^^^^^^^^^^^^^^^^^^^^^
 A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows:
 
 * **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
 
-**Data Object**
-
+Data Object
+^^^^^^^^^^^
 Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation.
 
 VCS Secondary Objects (or Secondary Elements)
@@ -348,7 +348,7 @@ Create the following VCS objects:
 
 
 Get Existing VCS Objects
-''''''''''''''''''''''''
+^^^^^^^^^^^^^^^^^^^^^^^^
 
 The get functions are used to obtain VCS objects that exist in the
 object memory tables. The get function directly manipulates the object's
@@ -431,7 +431,7 @@ The show function is used to list the VCS objects in memory:
 
 
 VCS Reference Guide
--------------------
+--------------------
 
 ``init``
 ^^^^^^^^
-- 
GitLab


From 2e3f6116b0d9463dab106a5667e00569839abdc6 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 19 May 2016 23:02:34 -0400
Subject: [PATCH 080/196] Added makefile to build sphinx documentation

---
 Packages/vcs/docs/Makefile       | 177 +++++++++++++++++++++++++++++++
 Packages/vcs/docs/conf.py        |   8 +-
 Packages/vcs/docs/user-guide.rst |   9 +-
 3 files changed, 187 insertions(+), 7 deletions(-)
 create mode 100644 Packages/vcs/docs/Makefile

diff --git a/Packages/vcs/docs/Makefile b/Packages/vcs/docs/Makefile
new file mode 100644
index 000000000..377f99e0d
--- /dev/null
+++ b/Packages/vcs/docs/Makefile
@@ -0,0 +1,177 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS    =
+SPHINXBUILD   = sphinx-build
+PAPER         =
+BUILDDIR      = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4     = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS   = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS  = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+	@echo "Please use \`make <target>' where <target> is one of"
+	@echo "  html       to make standalone HTML files"
+	@echo "  dirhtml    to make HTML files named index.html in directories"
+	@echo "  singlehtml to make a single large HTML file"
+	@echo "  pickle     to make pickle files"
+	@echo "  json       to make JSON files"
+	@echo "  htmlhelp   to make HTML files and a HTML help project"
+	@echo "  qthelp     to make HTML files and a qthelp project"
+	@echo "  devhelp    to make HTML files and a Devhelp project"
+	@echo "  epub       to make an epub"
+	@echo "  latex      to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+	@echo "  latexpdf   to make LaTeX files and run them through pdflatex"
+	@echo "  latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+	@echo "  text       to make text files"
+	@echo "  man        to make manual pages"
+	@echo "  texinfo    to make Texinfo files"
+	@echo "  info       to make Texinfo files and run them through makeinfo"
+	@echo "  gettext    to make PO message catalogs"
+	@echo "  changes    to make an overview of all changed/added/deprecated items"
+	@echo "  xml        to make Docutils-native XML files"
+	@echo "  pseudoxml  to make pseudoxml-XML files for display purposes"
+	@echo "  linkcheck  to check all external links for integrity"
+	@echo "  doctest    to run all doctests embedded in the documentation (if enabled)"
+
+clean:
+	rm -rf $(BUILDDIR)/*
+
+html:
+	$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+	$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+	@echo
+	@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml:
+	$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+	@echo
+	@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+	$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+	@echo
+	@echo "Build finished; now you can process the pickle files."
+
+json:
+	$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+	@echo
+	@echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+	$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+	@echo
+	@echo "Build finished; now you can run HTML Help Workshop with the" \
+	      ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+	$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+	@echo
+	@echo "Build finished; now you can run "qcollectiongenerator" with the" \
+	      ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+	@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Girder.qhcp"
+	@echo "To view the help file:"
+	@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Girder.qhc"
+
+devhelp:
+	$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+	@echo
+	@echo "Build finished."
+	@echo "To view the help file:"
+	@echo "# mkdir -p $$HOME/.local/share/devhelp/Girder"
+	@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Girder"
+	@echo "# devhelp"
+
+epub:
+	$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+	@echo
+	@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo
+	@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+	@echo "Run \`make' in that directory to run these through (pdf)latex" \
+	      "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through pdflatex..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+	$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+	@echo "Running LaTeX files through platex and dvipdfmx..."
+	$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+	@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+	$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+	@echo
+	@echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+	$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+	@echo
+	@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo
+	@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+	@echo "Run \`make' in that directory to run these through makeinfo" \
+	      "(use \`make info' here to do that automatically)."
+
+info:
+	$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+	@echo "Running Texinfo files through makeinfo..."
+	make -C $(BUILDDIR)/texinfo info
+	@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+	$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+	@echo
+	@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+	$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+	@echo
+	@echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+	$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+	@echo
+	@echo "Link check complete; look for any errors in the above output " \
+	      "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+	$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+	@echo "Testing of doctests in the sources finished, look at the " \
+	      "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+	$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+	@echo
+	@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+	$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+	@echo
+	@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/Packages/vcs/docs/conf.py b/Packages/vcs/docs/conf.py
index 7147eae21..cc3fcd57b 100755
--- a/Packages/vcs/docs/conf.py
+++ b/Packages/vcs/docs/conf.py
@@ -56,7 +56,7 @@ source_suffix = '.rst'
 master_doc = 'index'
 
 # General information about the project.
-project = 'vcs'
+project = 'VCS'
 copyright = '2016, LLNL'
 
 # The version info for the project you're documenting, acts as replacement for
@@ -197,7 +197,7 @@ todo_include_todos = False
 #html_file_suffix = None
 
 # Output file base name for HTML help builder.
-htmlhelp_basename = 'RomanescoDoc'
+htmlhelp_basename = 'VCSDoc'
 
 
 # -- Options for LaTeX output --------------------------------------------------
@@ -216,8 +216,8 @@ latex_elements = {
 # Grouping the document tree into LaTeX files. List of tuples
 # (source start file, target name, title, author, documentclass [howto/manual]).
 latex_documents = [
-  ('index', 'Romanesco.tex', 'Romanesco Documentation',
-   'Kitware, Inc.', 'manual'),
+  ('index', 'vcs.tex', 'VCS Documentation',
+   'LLNL', 'manual'),
 ]
 
 # The name of an image file (relative to this directory) to place at the top of
diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 4c839c3d7..f4c783746 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -74,7 +74,7 @@ Data Object
 Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation.
 
 VCS Secondary Objects (or Secondary Elements)
----------------------------------------------
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objects’ attributes. Currently, there are five secondary objects with more to follow.
 
 Colormap Object
@@ -181,8 +181,11 @@ using default values for everything else.
     canvas.close()
 
 The script should produce a plot as shown below:
-.. |clt| image:: static/clt.png
-   :width: 100%
+
+.. image:: static/clt.png
+   :width: 400px
+   :height: 400px
+   :align: center
 
 As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data.
 
-- 
GitLab


From 13517c5029063f09992e1ea9377d8650dfd2189f Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 20 May 2016 02:32:50 -0400
Subject: [PATCH 081/196] Updated style

---
 Packages/vcs/docs/user-guide.rst | 26 +++++++++++++-------------
 1 file changed, 13 insertions(+), 13 deletions(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index f4c783746..785b615ba 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -49,19 +49,19 @@ Graphics Method Objects
 
 A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
 
-* **boxfillobject** - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
-* **continentsobject** - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
-* **isofillobject** - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
-* **isolineobject** - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is “Gi”.
-* **outfillobject** - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
-* **outlineobject** - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
-* **scatterobject** - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
-* **vectorobject** - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
-* **xvsyobject** - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
-* **xyvsyobject** - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
-* **Yxvsxobject** - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
-* **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
-* **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
+* ``boxfillobject`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
+* ``continentsobject`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
+* ``isofillobject`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
+* ``isolineobject`` - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is “Gi”.
+* ``outfillobject`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
+* ``outlineobject`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
+* ``scatterobject`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
+* ``vectorobject`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
+* ``xvsyobject`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
+* ``xyvsyobject`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
+* ``Yxvsxobject`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
+* ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
+* ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
 
 Picture Template Object
 ^^^^^^^^^^^^^^^^^^^^^^^
-- 
GitLab


From b7a168e422325639f0b742a27c9c763932650c73 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 20 May 2016 02:44:38 -0400
Subject: [PATCH 082/196] Some more style fixes

---
 Packages/vcs/docs/user-guide.rst | 51 ++++++++++++++++----------------
 1 file changed, 25 insertions(+), 26 deletions(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 785b615ba..6ab873e9e 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -44,8 +44,8 @@ VCS Primary Objects (or Primary Elements)
 
 A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below.
 
-Graphics Method Objects
-^^^^^^^^^^^^^^^^^^^^^^^
+**Graphics Method Objects**
+
 
 A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
 
@@ -63,61 +63,60 @@ A graphics method simply defines how data is to be displayed on the screen. Curr
 * ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
 * ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
 
-Picture Template Object
-^^^^^^^^^^^^^^^^^^^^^^^
+
+**Picture Template Object**
+
 A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows:
 
-* **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
+* ``templateobject`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
+
+**Data Object**
 
-Data Object
-^^^^^^^^^^^
 Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation.
 
 VCS Secondary Objects (or Secondary Elements)
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objects’ attributes. Currently, there are five secondary objects with more to follow.
 
-Colormap Object
-^^^^^^^^^^^^^^^
+
+**Colormap Object**
 
 The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
 
-*colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
+* ``colormapobject`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
 Note: VCS colormaps are objects, but they are not referenced like other secondary objects.
 
-Fillarea Object
-^^^^^^^^^^^^^^^
+
+**Fillarea Object**
 
 The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows:
 
-*fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
-Line Object
+* ``fillareaobject`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
+
 
-Line Object
-^^^^^^^^^^^
+**Line Object**
 
 The line object allows the editing of line type, width, and color index. The description of the line object is as follows:
 
-*lineobject* - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
-Marker Object
+* ``lineobject`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
+
+
+**Marker Object**
 
 The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows:
 
-Marker Object
-^^^^^^^^^^^^^
+* ``markerobject`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
 
-*markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
 
-Text Objects
-^^^^^^^^^^^^
+**Text Objects**
 
 Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows:
 
-*textcombinedobject* - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
+* ``textcombinedobject`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
 
-*textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
+* ``textorientationobject`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
 
-*texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
+* ``texttableobject`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
 
 
 Getting Started with VCS
-- 
GitLab


From 10aa33613035e59edcb0affef993a6f56d27fdcf Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 20 May 2016 09:32:15 -0400
Subject: [PATCH 083/196] Fix minor grammatical issue

---
 Packages/vcs/docs/user-guide.rst | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 6ab873e9e..d8a9789f5 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -16,7 +16,7 @@ vcs specific entities will be ``formatted like this``.
 Installation
 ------------
 While there are many ways a user can install vcs, installation using conda is
-preferred for the end user. Currently, to install vcs, you need to install entire uvcdat
+preferred for the end user. Currently, to install vcs, you need to install the entire uvcdat
 pacakge. ::
 
     conda install -c uvcdat
-- 
GitLab


From 9356d962eceb9b6521273e89b4cef2f5f88e4e45 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 20 May 2016 09:43:22 -0400
Subject: [PATCH 084/196] Separated reference to a separate document

---
 Packages/vcs/docs/developer-guide.rst |   2 +
 Packages/vcs/docs/index.rst           |   3 +-
 Packages/vcs/docs/reference.rst       | 129 +++++++++++++++++++++++++
 Packages/vcs/docs/user-guide.rst      | 131 --------------------------
 4 files changed, 133 insertions(+), 132 deletions(-)
 create mode 100644 Packages/vcs/docs/developer-guide.rst
 create mode 100644 Packages/vcs/docs/reference.rst

diff --git a/Packages/vcs/docs/developer-guide.rst b/Packages/vcs/docs/developer-guide.rst
new file mode 100644
index 000000000..cf18c40ac
--- /dev/null
+++ b/Packages/vcs/docs/developer-guide.rst
@@ -0,0 +1,2 @@
+Developer Guide
+===============
diff --git a/Packages/vcs/docs/index.rst b/Packages/vcs/docs/index.rst
index 0bb30b293..1fcb27b4e 100644
--- a/Packages/vcs/docs/index.rst
+++ b/Packages/vcs/docs/index.rst
@@ -49,7 +49,8 @@ Table of contents
    :maxdepth: 2
 
    user-guide
-   developer-docs
+   developer-guide
+   reference
 
 API index
 ---------
diff --git a/Packages/vcs/docs/reference.rst b/Packages/vcs/docs/reference.rst
new file mode 100644
index 000000000..0af233efa
--- /dev/null
+++ b/Packages/vcs/docs/reference.rst
@@ -0,0 +1,129 @@
+VCS Reference Guide
+--------------------
+
+``init``
+^^^^^^^^
+* Initialize, Construct a VCS Canvas Object
+
+.. code-block:: python
+
+    import vcs,cdms2
+
+    file = cdms2.open('clt.nc')
+
+    slab = file.getslab('clt')
+
+    a = vcs.init()
+
+    # This examples constructs 4 VCS Canvas a.plot(slab)
+    # Plot slab using default settings
+    b = vcs.init()
+
+    # Construct VCS object
+    template = b.gettemplate('AMIP')
+
+    # Get 'example' template object
+    b.plot(slab, template)
+
+    # Plot slab using template 'AMIP'
+    c = vcs.init()
+
+    # Construct new VCS object
+    isofill = c.getisofill('quick')
+
+    # Get 'quick' isofill graphics method
+    c.plot(slab,template,isofill)
+
+    # Plot slab using template and isofill objects
+    d = vcs.init()
+
+    # Construct new VCS object
+    isoline = c.getisoline('quick')
+
+    # Get 'quick' isoline graphics method
+    c.plot(isoline,slab,template)
+
+    # Plot slab using isoline and template objects
+
+``help``
+^^^^^^^^
+* Print out the object's doc string
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    ln = a.getline('red')
+
+    # Get a VCS line object
+    # This will print out information on how to use ln
+    a.objecthelp(ln)
+
+``open``
+^^^^^^^^
+* Open VCS Canvas object.
+* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.open()
+
+``close``
+^^^^^^^^^
+* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.plot(array, 'default', 'isofill', 'quick')
+    a.close()
+
+``mode``
+^^^^^^^^
+* ``Options <0 = manual, 1 = automatic>``
+* Update the VCS Canvas.
+* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed.
+
+.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user.
+
+.. code-block:: python
+
+    import vcs
+    a = vcs.init()
+    a.mode = 0
+    # Set updating to manual mode
+    a.plot(array, 'default', 'boxfill', 'quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
+
+``update``
+^^^^^^^^^^
+* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual).
+
+.. code-block:: python
+
+    import vcs
+
+    a = vcs.init()
+    a.mode = 0
+
+    # Go to manual mode a.plot(s,'default','boxfill','quick')
+    box = x.getboxfill('quick')
+    box.color_1 = 100
+    box.xticlabels('lon30', 'lon30')
+    box.xticlabels('','')
+    box.datawc(1e20, 1e20, 1e20, 1e20)
+    box.datawc(-45.0, 45.0, -90.0, 90.0)
+
+    # Update the changes manually
+    a.update()
\ No newline at end of file
diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index d8a9789f5..d74b3aec2 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -430,134 +430,3 @@ The show function is used to list the VCS objects in memory:
 +=================+==========================================================+
 | ``show()``      | list VCS primary and secondary class objects in memory   |
 +-----------------+----------------------------------------------------------+
-
-
-VCS Reference Guide
---------------------
-
-``init``
-^^^^^^^^
-* Initialize, Construct a VCS Canvas Object
-
-.. code-block:: python
-
-    import vcs,cdms2
-
-    file = cdms2.open('clt.nc')
-
-    slab = file.getslab('clt')
-
-    a = vcs.init()
-
-    # This examples constructs 4 VCS Canvas a.plot(slab)
-    # Plot slab using default settings
-    b = vcs.init()
-
-    # Construct VCS object
-    template = b.gettemplate('AMIP')
-
-    # Get 'example' template object
-    b.plot(slab, template)
-
-    # Plot slab using template 'AMIP'
-    c = vcs.init()
-
-    # Construct new VCS object
-    isofill = c.getisofill('quick')
-
-    # Get 'quick' isofill graphics method
-    c.plot(slab,template,isofill)
-
-    # Plot slab using template and isofill objects
-    d = vcs.init()
-
-    # Construct new VCS object
-    isoline = c.getisoline('quick')
-
-    # Get 'quick' isoline graphics method
-    c.plot(isoline,slab,template)
-
-    # Plot slab using isoline and template objects
-
-``help``
-^^^^^^^^
-* Print out the object's doc string
-
-.. code-block:: python
-
-    import vcs
-    a = vcs.init()
-    ln = a.getline('red')
-
-    # Get a VCS line object
-    # This will print out information on how to use ln
-    a.objecthelp(ln)
-
-``open``
-^^^^^^^^
-* Open VCS Canvas object.
-* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
-
-.. code-block:: python
-
-    import vcs
-    a = vcs.init()
-    a.open()
-
-``close``
-^^^^^^^^^
-* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it.
-
-.. code-block:: python
-
-    import vcs
-    a = vcs.init()
-    a.plot(array, 'default', 'isofill', 'quick')
-    a.close()
-
-``mode``
-^^^^^^^^
-* ``Options <0 = manual, 1 = automatic>``
-* Update the VCS Canvas.
-* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed.
-
-.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user.
-
-.. code-block:: python
-
-    import vcs
-    a = vcs.init()
-    a.mode = 0
-    # Set updating to manual mode
-    a.plot(array, 'default', 'boxfill', 'quick')
-    box = x.getboxfill('quick')
-    box.color_1 = 100
-    box.xticlabels('lon30', 'lon30')
-    box.xticlabels('','')
-    box.datawc(1e20, 1e20, 1e20, 1e20)
-    box.datawc(-45.0, 45.0, -90.0, 90.0)
-
-    # Update the changes manually
-    a.update()
-
-``update``
-^^^^^^^^^^
-* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual).
-
-.. code-block:: python
-
-    import vcs
-
-    a = vcs.init()
-    a.mode = 0
-
-    # Go to manual mode a.plot(s,'default','boxfill','quick')
-    box = x.getboxfill('quick')
-    box.color_1 = 100
-    box.xticlabels('lon30', 'lon30')
-    box.xticlabels('','')
-    box.datawc(1e20, 1e20, 1e20, 1e20)
-    box.datawc(-45.0, 45.0, -90.0, 90.0)
-
-    # Update the changes manually
-    a.update()
-- 
GitLab


From 7a599261680593869bc2708a2a0697f5696f9f48 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Tue, 26 Apr 2016 16:34:06 -0400
Subject: [PATCH 085/196] ENH #1885: Show info at clicked point for point
 datasets

---
 Packages/vcs/vcs/VTKPlots.py                | 197 ++++++++++----------
 Packages/vcs/vcs/vcs2vtk.py                 |   1 +
 Packages/vcs/vcs/vcsvtk/boxfillpipeline.py  |   3 -
 Packages/vcs/vcs/vcsvtk/isofillpipeline.py  |   3 -
 Packages/vcs/vcs/vcsvtk/isolinepipeline.py  |   3 -
 Packages/vcs/vcs/vcsvtk/meshfillpipeline.py |   3 -
 Packages/vcs/vcs/vcsvtk/pipeline2d.py       |  32 +++-
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py   | 124 ++++--------
 testing/vcs/CMakeLists.txt                  |  41 ++--
 testing/vcs/test_vcs_click_info.py          |  26 ++-
 10 files changed, 197 insertions(+), 236 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 8cfe83a28..37382c0e9 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -46,18 +46,16 @@ class VTKVCSBackend(object):
         self._renderers = {}
         self._plot_keywords = [
             'cdmsfile',
-            'cell_coordinates'
-            # used to render the continents
-            'continents_renderer',
+            'cell_coordinates',
             # dataset bounds in lon/lat coordinates
             'dataset_bounds',
             # This may be smaller than the data viewport. It is used
             # if autot is passed
             'ratio_autot_viewport',
-            # used to render the dataset
-            'dataset_renderer',
-            # dataset scale: (xScale, yScale)
-            'dataset_scale',
+            # used to render the dataset for clicked point info (hardware selection)
+            'surface_renderer',
+            # (xScale, yScale) - datasets can be scaled using the window ratio
+            'surface_scale',
             # the same as vcs.utils.getworldcoordinates for now. getworldcoordinates uses
             # gm.datawc_... or, if that is not set, it uses data axis margins (without bounds).
             'plotting_dataset_bounds',
@@ -138,99 +136,92 @@ class VTKVCSBackend(object):
             d = vcs.elements["display"][dnm]
             if d.array[0] is None:
                 continue
-            t = vcs.elements["template"][d.template]
-            gm = vcs.elements[d.g_type][d.g_name]
-            # for non-linear projection or for meshfill. Meshfill is wrapped at
-            # VTK level, so vcs calculations do not work.
-            if gm.projection != "linear" or gm.g_name == 'Gfm':
-                selector = vtk.vtkHardwareSelector()
-                datasetRenderer = d.backend['dataset_renderer']
-                continentsRenderer = d.backend.get('continents_renderer')
-                dataset = d.backend['vtk_backend_grid']
-                if (datasetRenderer and dataset):
-                    selector.SetRenderer(datasetRenderer)
-                    selector.SetArea(xy[0], xy[1], xy[0], xy[1])
-                    selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
-                    # We want to be able see information behind continents
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(False)
-                    selection = selector.Select()
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(True)
-                    if (selection.GetNumberOfNodes() > 0):
-                        selectionNode = selection.GetNode(0)
-                        prop = selectionNode.GetProperties().Get(selectionNode.PROP())
-                        if (prop):
-                            cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
-                            if (cellIds):
-                                # scalar value
-                                a = selectionNode.GetSelectionData().GetArray(0)
-                                geometryId = a.GetValue(0)
-                                cellId = cellIds.GetValue(geometryId)
-                                scalars = dataset.GetCellData().GetScalars()
-                                value = scalars.GetValue(cellId)
-                                geoTransform = d.backend['vtk_backend_geo']
-                                if (geoTransform):
-                                    geoTransform.Inverse()
-                                # Use the world picker to get world coordinates
-                                # we deform the dataset, so we need to fix the
-                                # world picker using xScale, yScale
-                                xScale, yScale = d.backend['dataset_scale']
-                                worldPicker = vtk.vtkWorldPointPicker()
-                                worldPicker.Pick(xy[0], xy[1], 0, datasetRenderer)
-                                worldPosition = list(worldPicker.GetPickPosition())
-                                if (xScale > yScale):
-                                    worldPosition[0] /= (xScale/yScale)
-                                else:
-                                    worldPosition[1] /= (yScale/xScale)
-                                lonLat = worldPosition
-                                if (geoTransform):
-                                    geoTransform.InternalTransformPoint(worldPosition, lonLat)
-                                    geoTransform.Inverse()
-                                st += "Var: %s\n" % d.array[0].id
-                                if (float("inf") not in lonLat):
-                                    st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
-                                st += "Value: %g" % value
-            else:
-                if t.data.x1 <= x <= t.data.x2 and t.data.y1 <= y <= t.data.y2:
-                    x1, x2, y1, y2 = vcs.utils.getworldcoordinates(gm,
-                                                                   d.array[0].getAxis(-1),
-                                                                   d.array[0].getAxis(-2))
-
-                    X = (x - t.data.x1) / (t.data.x2 - t.data.x1) * (x2 - x1) + x1
-                    Y = (y - t.data.y1) / (t.data.y2 - t.data.y1) * (y2 - y1) + y1
-
-                    # Ok we now have the X/Y values we need to figure out the
-                    # indices
-                    try:
-                        I = d.array[0].getAxis(-1).mapInterval((X, X, 'cob'))[0]
-                        try:
-                            J = d.array[
-                                0].getAxis(-2).mapInterval((Y, Y, 'cob'))[0]
-                            # Values at that point
-                            V = d.array[0][..., J, I]
-                        except:
-                            V = d.array[0][..., I]
-                        if isinstance(V, numpy.ndarray):
-                            # Grab the appropriate time slice
-                            if self.canvas.animate.created():
-                                t = self.canvas.animate.frame_num
-                                try:
-                                    taxis = V.getTime()
-                                    V = V(time=taxis[t % len(taxis)]).flat[0]
-                                except:
-                                    V = V.flat[0]
+            # Use the hardware selector to determine the cell id we clicked on
+            selector = vtk.vtkHardwareSelector()
+            surfaceRenderer = d.backend['surface_renderer']
+            dataset = d.backend['vtk_backend_grid']
+            if (surfaceRenderer and dataset):
+                selector.SetRenderer(surfaceRenderer)
+                selector.SetArea(xy[0], xy[1], xy[0], xy[1])
+                selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
+                # We only want to render the surface for selection
+                renderers = self.renWin.GetRenderers()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(False)
+                surfaceRenderer.SetDraw(True)
+                selection = selector.Select()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(True)
+                surfaceRenderer.SetDraw(False)
+                if (selection.GetNumberOfNodes() > 0):
+                    selectionNode = selection.GetNode(0)
+                    prop = selectionNode.GetProperties().Get(selectionNode.PROP())
+                    if (prop):
+                        cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
+                        if (cellIds):
+                            st += "Var: %s\n" % d.array[0].id
+                            # cell attribute
+                            a = selectionNode.GetSelectionData().GetArray(0)
+                            geometryId = a.GetValue(0)
+                            cellId = cellIds.GetValue(geometryId)
+                            attributes = dataset.GetCellData().GetScalars()
+                            if (attributes is None):
+                                attributes = dataset.GetCellData().GetVectors()
+                            elementId = cellId
+
+                            geoTransform = d.backend['vtk_backend_geo']
+                            if (geoTransform):
+                                geoTransform.Inverse()
+                            # Use the world picker to get world coordinates
+                            # we deform the dataset, so we need to fix the
+                            # world picker using xScale, yScale
+                            xScale, yScale = d.backend['surface_scale']
+                            worldPicker = vtk.vtkWorldPointPicker()
+                            worldPicker.Pick(xy[0], xy[1], 0, surfaceRenderer)
+                            worldPosition = list(worldPicker.GetPickPosition())
+                            if (xScale > yScale):
+                                worldPosition[0] /= (xScale/yScale)
                             else:
-                                V = V.flat[0]
-                        try:
-                            st += "Var: %s\nX[%i] = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, I, X, J, Y, V)
-                        except:
-                            st += "Var: %s\nX = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, X, I, Y, V)
-                    except:
-                        st += "Var: %s\nX=%g\nY=%g\nValue = N/A" % (
-                            d.array[0].id, X, Y)
+                                worldPosition[1] /= (yScale/xScale)
+                            lonLat = worldPosition
+                            if (attributes is None):
+                                # if point dataset, return the value for the closest point
+                                cell = dataset.GetCell(cellId)
+                                closestPoint = [0, 0, 0]
+                                subId = vtk.mutable(0)
+                                pcoords = [0, 0, 0]
+                                dist2 = vtk.mutable(0)
+                                weights = [0] * cell.GetNumberOfPoints()
+                                cell.EvaluatePosition(worldPosition, closestPoint,
+                                                      subId, pcoords, dist2, weights)
+                                indexMax = numpy.argmax(weights)
+                                pointId = cell.GetPointId(indexMax)
+                                attributes = dataset.GetPointData().GetScalars()
+                                if (attributes is None):
+                                    attributes = dataset.GetPointData().GetVectors()
+                                elementId = pointId
+                            if (geoTransform):
+                                geoTransform.InternalTransformPoint(worldPosition, lonLat)
+                                geoTransform.Inverse()
+                            if (float("inf") not in lonLat):
+                                st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
+                            # get the cell value or the closest point value
+                            if (attributes):
+                                if (attributes.GetNumberOfComponents() > 1):
+                                    v = attributes.GetTuple(elementId)
+                                    st += "Value: (%g, %g)" % (v[0], v[1])
+                                else:
+                                    value = attributes.GetValue(elementId)
+                                    st += "Value: %g" % value
+
         if st == "":
             return
         ren = vtk.vtkRenderer()
@@ -859,9 +850,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = crdate.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
@@ -896,9 +887,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = zname.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index 86cfcfd0a..aa4a228ac 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -809,6 +809,7 @@ def doWrap(Act, wc, wrap=[0., 360], fastClip=True):
     if wrap is None:
         return Act
     Mapper = Act.GetMapper()
+    Mapper.Update()
     data = Mapper.GetInput()
     # insure that GLOBALIDS are not removed by the append filter
     attributes = data.GetCellData()
diff --git a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
index 1ea81febb..f2a3ea602 100644
--- a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
@@ -130,8 +130,6 @@ class BoxfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
 
         for act in patternActors:
             if self._vtkGeoTransform is None:
@@ -211,7 +209,6 @@ class BoxfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def _plotInternalBoxfill(self):
         """Implements the logic to render a non-custom boxfill."""
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index af3b037a8..55098f9e5 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -158,8 +158,6 @@ class IsofillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in patternActors:
             self._context().fitToViewport(
                 act, vp,
@@ -226,4 +224,3 @@ class IsofillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index 2d9b66472..1560de7c1 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -273,8 +273,6 @@ class IsolinePipeline(Pipeline2D):
                 create_renderer=(dataset_renderer is None))
 
             countLevels += len(l)
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         if len(textprops) > 0:
             self._resultDict["vtk_backend_contours_labels_text_properties"] = \
                 textprops
@@ -332,4 +330,3 @@ class IsolinePipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
index 7101a4729..64a95c4e3 100644
--- a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
@@ -195,8 +195,6 @@ class MeshfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in self._patternActors:
             if self._vtkGeoTransform is None:
                 # If using geofilter on wireframed does not get wrapped not sure
@@ -270,7 +268,6 @@ class MeshfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds
diff --git a/Packages/vcs/vcs/vcsvtk/pipeline2d.py b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
index baa2f8915..dc12f3f5c 100644
--- a/Packages/vcs/vcs/vcsvtk/pipeline2d.py
+++ b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
@@ -50,6 +50,7 @@ class IPipeline2D(Pipeline):
             applied to points.
         - _needsCellData: True if the plot needs cell scalars, false if
             the plot needs point scalars
+        - _needsVectors: True if the plot needs vectors, false if it needs scalars
         - _scalarRange: The range of _data1 as tuple(float min, float max)
         - _maskedDataMapper: The mapper used to render masked data.
     """
@@ -79,6 +80,7 @@ class IPipeline2D(Pipeline):
         self._dataWrapModulo = None
         self._hasCellData = None
         self._needsCellData = None
+        self._needsVectors = False
         self._scalarRange = None
         self._maskedDataMapper = None
 
@@ -276,6 +278,8 @@ class Pipeline2D(IPipeline2D):
 
         # Preprocess the input scalar data:
         self._updateScalarData()
+        self._min = self._data1.min()
+        self._max = self._data1.max()
         self._scalarRange = vcs.minmax(self._data1)
 
         # Create/update the VTK dataset.
@@ -313,8 +317,6 @@ class Pipeline2D(IPipeline2D):
         """Overrides baseclass implementation."""
         self._data1 = self._context().trimData2D(self._originalData1)
         self._data2 = self._context().trimData2D(self._originalData2)
-        self._min = self._data1.min()
-        self._max = self._data1.max()
 
     def _updateVTKDataSet(self, plotBasedDualGrid):
         """
@@ -327,8 +329,10 @@ class Pipeline2D(IPipeline2D):
         genGridDict = vcs2vtk.genGrid(self._data1, self._data2, self._gm,
                                       deep=False,
                                       grid=self._vtkDataSet,
-                                      geo=self._vtkGeoTransform, dualGrid=dualGrid)
+                                      geo=self._vtkGeoTransform, genVectors=self._needsVectors,
+                                      dualGrid=dualGrid)
         self._data1 = genGridDict["data"]
+        self._data2 = genGridDict["data2"]
         self._updateFromGenGridDict(genGridDict)
 
     def _createPolyDataFilter(self):
@@ -339,6 +343,7 @@ class Pipeline2D(IPipeline2D):
         elif self._hasCellData:
             # use cells but needs points
             c2p = vtk.vtkCellDataToPointData()
+            c2p.PassCellDataOn()
             c2p.SetInputData(self._vtkDataSet)
             self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
         else:
@@ -349,6 +354,27 @@ class Pipeline2D(IPipeline2D):
             self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
         self._vtkPolyDataFilter.Update()
         self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
+        # create an actor and a renderer for the surface mesh.
+        # this is used for displaying point information using the hardware selection
+        mapper = vtk.vtkPolyDataMapper()
+        mapper.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
+        act = vtk.vtkActor()
+        act.SetMapper(mapper)
+        vp = self._resultDict.get(
+            'ratio_autot_viewport',
+            [self._template.data.x1, self._template.data.x2,
+             self._template.data.y1, self._template.data.y2])
+        plotting_dataset_bounds = self.getPlottingBounds()
+        surface_renderer, xScale, yScale = self._context().fitToViewport(
+            act, vp,
+            wc=plotting_dataset_bounds, geoBounds=self._vtkDataSet.GetBounds(),
+            geo=self._vtkGeoTransform,
+            priority=self._template.data.priority,
+            create_renderer=True)
+        self._resultDict['surface_renderer'] = surface_renderer
+        self._resultDict['surface_scale'] = (xScale, yScale)
+        if (surface_renderer):
+            surface_renderer.SetDraw(False)
 
     def _updateFromGenGridDict(self, genGridDict):
         """Overrides baseclass implementation."""
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index bc34e3c9e..0badc60b4 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -1,75 +1,47 @@
-from .pipeline import Pipeline
+from .pipeline2d import Pipeline2D
 
 import vcs
 from vcs import vcs2vtk
 import vtk
 
 
-class VectorPipeline(Pipeline):
+class VectorPipeline(Pipeline2D):
 
     """Implementation of the Pipeline interface for VCS vector plots."""
 
     def __init__(self, gm, context_):
         super(VectorPipeline, self).__init__(gm, context_)
+        self._needsCellData = False
+        self._needsVectors = True
 
-    def plot(self, data1, data2, tmpl, grid, transform, **kargs):
+    def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Preserve time and z axis for plotting these inof in rendertemplate
-        geo = None  # to make flake8 happy
         projection = vcs.elements["projection"][self._gm.projection]
-        returned = {}
-        taxis = data1.getTime()
-        if data1.ndim > 2:
-            zaxis = data1.getAxis(-3)
+        taxis = self._originalData1.getTime()
+        if self._originalData1.ndim > 2:
+            zaxis = self._originalData1.getAxis(-3)
         else:
             zaxis = None
 
-        # Ok get3 only the last 2 dims
-        data1 = self._context().trimData2D(data1)
-        data2 = self._context().trimData2D(data2)
-
         scale = 1.0
         lat = None
         lon = None
 
-        latAccessor = data1.getLatitude()
-        lonAccessor = data1.getLongitude()
+        latAccessor = self._data1.getLatitude()
+        lonAccessor = self._data1.getLongitude()
         if latAccessor:
             lat = latAccessor[:]
         if lonAccessor:
             lon = lonAccessor[:]
 
-        plotBasedDualGrid = kargs.get('plot_based_dual_grid', True)
-        if (plotBasedDualGrid):
-            hasCellData = data1.hasCellData()
-            dualGrid = hasCellData
-        else:
-            dualGrid = False
-        gridGenDict = vcs2vtk.genGrid(data1, data2, self._gm, deep=False, grid=grid,
-                                      geo=transform, genVectors=True,
-                                      dualGrid=dualGrid)
-
-        data1 = gridGenDict["data"]
-        data2 = gridGenDict["data2"]
-        geo = gridGenDict["geo"]
-
-        grid = gridGenDict['vtk_backend_grid']
-        xm = gridGenDict['xm']
-        xM = gridGenDict['xM']
-        ym = gridGenDict['ym']
-        yM = gridGenDict['yM']
-        continents = gridGenDict['continents']
-        self._dataWrapModulo = gridGenDict['wrap']
-        geo = gridGenDict['geo']
-        cellData = gridGenDict['cellData']
-
-        if geo is not None:
+        if self._vtkGeoTransform is not None:
             newv = vtk.vtkDoubleArray()
             newv.SetNumberOfComponents(3)
             newv.InsertTupleValue(0, [lon.min(), lat.min(), 0])
             newv.InsertTupleValue(1, [lon.max(), lat.max(), 0])
 
-            vcs2vtk.projectArray(newv, projection, [xm, xM, ym, yM])
+            vcs2vtk.projectArray(newv, projection, self._vtkDataSetBounds)
             dimMin = [0, 0, 0]
             dimMax = [0, 0, 0]
 
@@ -89,22 +61,6 @@ class VectorPipeline(Pipeline):
         else:
             scale = 1.0
 
-        returned["vtk_backend_grid"] = grid
-        returned["vtk_backend_geo"] = geo
-        missingMapper = vcs2vtk.putMaskOnVTKGrid(data1, grid, actorColor=None,
-                                                 cellData=cellData, deep=False)
-
-        # None/False are for color and cellData
-        # (sent to vcs2vtk.putMaskOnVTKGrid)
-        returned["vtk_backend_missing_mapper"] = (missingMapper, None, False)
-
-        # convert to point data
-        if cellData:
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(grid)
-            c2p.Update()
-            grid = c2p.GetOutput()
-
         # Vector attempt
         l = self._gm.line
         if l is None:
@@ -129,7 +85,7 @@ class VectorPipeline(Pipeline):
         arrow.FilledOff()
 
         glyphFilter = vtk.vtkGlyph2D()
-        glyphFilter.SetInputData(grid)
+        glyphFilter.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
         glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
         glyphFilter.SetVectorModeToUseVector()
@@ -163,22 +119,20 @@ class VectorPipeline(Pipeline):
 
         plotting_dataset_bounds = vcs2vtk.getPlottingBounds(
             vcs.utils.getworldcoordinates(self._gm,
-                                          data1.getAxis(-1),
-                                          data1.getAxis(-2)),
-            [xm, xM, ym, yM], geo)
+                                          self._data1.getAxis(-1),
+                                          self._data1.getAxis(-2)),
+            self._vtkDataSetBounds, self._vtkGeoTransform)
         x1, x2, y1, y2 = plotting_dataset_bounds
-        if geo is None:
+        if self._vtkGeoTransform is None:
             wc = plotting_dataset_bounds
         else:
             xrange = list(act.GetXRange())
             yrange = list(act.GetYRange())
             wc = [xrange[0], xrange[1], yrange[0], yrange[1]]
 
-        if (transform and kargs.get('ratio', '0') == 'autot'):
-            returned['ratio_autot_viewport'] = self._processRatioAutot(tmpl, grid)
-
-        vp = returned.get('ratio_autot_viewport',
-                          [tmpl.data.x1, tmpl.data.x2, tmpl.data.y1, tmpl.data.y2])
+        vp = self._resultDict.get('ratio_autot_viewport',
+                                  [self._template.data.x1, self._template.data.x2,
+                                   self._template.data.y1, self._template.data.y2])
         # look for previous dataset_bounds different than ours and
         # modify the viewport so that the datasets are alligned
         # Hack to fix the case when the user does not specify gm.datawc_...
@@ -200,31 +154,29 @@ class VectorPipeline(Pipeline):
         dataset_renderer, xScale, yScale = self._context().fitToViewport(
             act, vp,
             wc=wc,
-            priority=tmpl.data.priority,
+            priority=self._template.data.priority,
             create_renderer=True)
-        returned['dataset_renderer'] = dataset_renderer
-        returned['dataset_scale'] = (xScale, yScale)
-        bounds = [min(xm, xM), max(xm, xM), min(ym, yM), max(ym, yM)]
-        kwargs = {'vtk_backend_grid': grid,
-                  'dataset_bounds': bounds,
+        kwargs = {'vtk_backend_grid': self._vtkDataSet,
+                  'dataset_bounds': self._vtkDataSetBounds,
                   'plotting_dataset_bounds': plotting_dataset_bounds}
-        if ('ratio_autot_viewport' in returned):
+        if ('ratio_autot_viewport' in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
-        returned.update(self._context().renderTemplate(
-            tmpl, data1,
+        self._resultDict.update(self._context().renderTemplate(
+            self._template, self._data1,
             self._gm, taxis, zaxis, **kwargs))
 
         if self._context().canvas._continents is None:
-            continents = False
-        if continents:
+            self._useContinents = False
+        if self._useContinents:
             continents_renderer, xScale, yScale = self._context().plotContinents(
                 plotting_dataset_bounds, projection,
-                self._dataWrapModulo, vp, tmpl.data.priority,
-                vtk_backend_grid=grid,
-                dataset_bounds=bounds)
-            returned["continents_renderer"] = continents_renderer
-        returned["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
-        returned["vtk_backend_glyphfilters"] = [glyphFilter]
-        returned["vtk_backend_luts"] = [[None, None]]
-
-        return returned
+                self._dataWrapModulo, vp, self._template.data.priority,
+                vtk_backend_grid=self._vtkDataSet,
+                dataset_bounds=self._vtkDataSetBounds)
+        self._resultDict["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
+        self._resultDict["vtk_backend_glyphfilters"] = [glyphFilter]
+        self._resultDict["vtk_backend_luts"] = [[None, None]]
+
+    def _updateContourLevelsAndColors(self):
+        """Overrides baseclass implementation."""
+        pass
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 9e8cf78e3..fd253ac23 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -947,33 +947,22 @@ cdat_add_test(test_vcs_colorpicker_appearance
   ${BASELINE_DIR}/test_vcs_colorpicker_appearance.png
 )
 
-cdat_add_test(test_vcs_click_info
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info.png
-  a_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_mollweide_boxfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_mollweide_boxfill.png
-  a_mollweide_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_meshfill.png
-  a_meshfill
-)
 
-cdat_add_test(test_vcs_click_info_robinson_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_robinson_meshfill.png
-  a_robinson_meshfill
-)
+foreach(plot a_boxfill a_mollweide_boxfill a_meshfill a_robinson_meshfill
+        a_isofill a_isoline vector_default)
+  string(SUBSTRING ${plot} 0 2 plot_prefix)
+  if (${plot_prefix} STREQUAL "a_")
+    string(SUBSTRING ${plot} 2 -1 plot_name)
+  else ()
+    string(REGEX MATCH "[^_]+" plot_name ${plot})
+  endif ()
+  cdat_add_test(test_vcs_click_info_${plot_name}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
+    "${BASELINE_DIR}/test_vcs_click_info_${plot_name}.png"
+    ${plot}
+    )
+endforeach()
 
 
 cdat_add_test(test_vcs_mercator_edge
diff --git a/testing/vcs/test_vcs_click_info.py b/testing/vcs/test_vcs_click_info.py
index f37ee651a..8d55e77c6 100644
--- a/testing/vcs/test_vcs_click_info.py
+++ b/testing/vcs/test_vcs_click_info.py
@@ -2,6 +2,9 @@ import os, sys, cdms2, vcs, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
               'a_mollweide_boxfill': ('clt.nc', 'clt', (222, 322)),
+              'a_isofill': ('clt.nc', 'clt', (200, 200)),
+              'a_isoline': ('clt.nc', 'clt', (200, 200)),
+              'vector_default': ('clt.nc', ('u', 'v'), (200, 200)),
               'a_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322)),
               'a_robinson_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322))}
 
@@ -10,21 +13,32 @@ src = sys.argv[1]
 plot = sys.argv[2]
 x = regression.init(bg=False, geometry=(800, 600))
 
-# data
-f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
-s = f(testConfig[plot][1])
-
+vector = False
 # graphics method
 if (plot.find('boxfill') != -1):
     gm = x.getboxfill(plot)
 elif (plot.find('meshfill') != -1):
     gm = x.getmeshfill(plot)
+elif (plot.find('isofill') != -1):
+    gm = x.getisofill(plot)
+elif (plot.find('isoline') != -1):
+    gm = x.getisoline(plot)
+elif (plot.find('vector') != -1):
+    gm = x.getvector(plot[plot.index('_') + 1:])
+    vector = True
 else:
     print "Invalid plot"
     sys.exit(13)
 
-# Has to plot in foreground to simulate a click
-x.plot(s, gm)
+# data
+f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
+if (vector):
+    u = f(testConfig[plot][1][0])
+    v = f(testConfig[plot][1][1])
+    x.plot(u, v, gm)
+else:
+    s = f(testConfig[plot][1])
+    x.plot(s, gm)
 
 # Simulate a click -- VTK Specific
 location = testConfig[plot][2]
-- 
GitLab


From 0376fde263b24867e86836c86450d97073e9090c Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 20 May 2016 12:09:12 -0700
Subject: [PATCH 086/196] Moved vcsaddons imports into functions, to prevent
 circular dependency issues

---
 Packages/vcs/vcs/Canvas.py      | 4 ++--
 Packages/vcs/vcs/displayplot.py | 2 +-
 Packages/vcs/vcs/queries.py     | 3 ++-
 Packages/vcs/vcs/utils.py       | 3 ++-
 4 files changed, 7 insertions(+), 5 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 3289cab04..a7df15f15 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -68,7 +68,6 @@ from xmldocs import plot_keywords_doc, graphics_method_core, axesconvert, xaxisc
     plot_2_1D_options
 gui_canvas_closed = 0
 canvas_closed = 0
-import vcsaddons  # noqa
 import vcs.manageElements  # noqa
 import configurator  # noqa
 from projection import round_projections  # noqa
@@ -124,7 +123,7 @@ def dictionarytovcslist(dictionary, name):
 
 def _determine_arg_list(g_name, actual_args):
     "Determine what is in the argument list for plotting graphics methods"
-
+    import vcsaddons
     itemplate_name = 2
     igraphics_method = 3
     igraphics_option = 4
@@ -2442,6 +2441,7 @@ Options:::
         return new
 
     def __plot(self, arglist, keyargs):
+        import vcsaddons
 
             # This routine has five arguments in arglist from _determine_arg_list
             # It adds one for bg and passes those on to Canvas.plot as its sixth
diff --git a/Packages/vcs/vcs/displayplot.py b/Packages/vcs/vcs/displayplot.py
index dd66fac1d..80638f226 100755
--- a/Packages/vcs/vcs/displayplot.py
+++ b/Packages/vcs/vcs/displayplot.py
@@ -25,7 +25,6 @@
 #
 import VCS_validation_functions
 import vcs
-import vcsaddons
 
 
 class Dp(object):
@@ -210,6 +209,7 @@ class Dp(object):
         return self._g_type
 
     def _setg_type(self, value):
+        import vcsaddons
         value = VCS_validation_functions.checkString(self, 'g_type', value)
         value = value.lower()
         if value not in vcs.elements and value != "text" and value not in vcsaddons.gms:
diff --git a/Packages/vcs/vcs/queries.py b/Packages/vcs/vcs/queries.py
index dc4ffa418..3974ff90a 100644
--- a/Packages/vcs/vcs/queries.py
+++ b/Packages/vcs/vcs/queries.py
@@ -44,7 +44,6 @@ import dv3d
 import displayplot
 import projection
 import vcs
-import vcsaddons
 
 from error import vcsError
 
@@ -69,6 +68,7 @@ box=a.getboxfill('quick')  # To Modify an existing boxfill use:
 if vcs.isgraphicsmethod(box):
    box.list()
 """
+    import vcsaddons
     if (isinstance(gobj, boxfill.Gfb)):
         return 1
     elif (isinstance(gobj, isofill.Gfi)):
@@ -134,6 +134,7 @@ print vcs.graphicsmethodtype(iso)         # Will print 'isofill'
 print vcs.graphicsmethodtype(ln)          # Will print None, because ln is not a
                                           #         graphics method
 """
+    import vcsaddons
     if (isinstance(gobj, boxfill.Gfb)):
         return 'boxfill'
     elif (isinstance(gobj, isofill.Gfi)):
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 03815f71a..5c2a64590 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -20,7 +20,6 @@ import colormap
 import json
 import os
 import tempfile
-import vcsaddons
 import cdms2
 import genutil
 import vtk
@@ -1621,6 +1620,7 @@ def monotonic(x):
 
 
 def getgraphicsmethod(type, name):
+    import vcsaddons
     if type == "default":
         type = "boxfill"
     if isinstance(type, vcsaddons.core.VCSaddon):
@@ -1635,6 +1635,7 @@ def getgraphicsmethod(type, name):
 
 
 def creategraphicsmethod(gtype, name):
+    import vcsaddons
     if gtype in ['isoline', 'Gi']:
         func = vcs.createisoline
     elif gtype in ['isofill', 'Gfi']:
-- 
GitLab


From 732c8766ffadfed0d8ba5a206293b903ac1cfd68 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 20 May 2016 12:09:42 -0700
Subject: [PATCH 087/196] Finished polar, added some convenient defaults for it

---
 Packages/vcsaddons/Lib/__init__.py | 111 +++++++++++++++++++++++++++++
 Packages/vcsaddons/Lib/polar.py    |  24 +++++--
 2 files changed, 128 insertions(+), 7 deletions(-)

diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py
index 1ba782945..c38e5b29e 100644
--- a/Packages/vcsaddons/Lib/__init__.py
+++ b/Packages/vcsaddons/Lib/__init__.py
@@ -4,12 +4,123 @@ import polar
 import EzTemplate
 import yxvsxfill
 import continents
+import vcs
+
 
 def createyxvsxfill(name=None,source='default',x=None,template=None):
     return yxvsxfill.Gyf(name,source=source,x=x,template=template)
+
+
 def createhistogram(name=None,source='default',x=None,template=None):
     return histograms.Ghg(name,source=source,x=x,template=template)
+
+
 def createusercontinents(name=None,source="default",x=None,template=None):
     return continents.Guc(name,source=source,x=x,template=template)
+
+
 def createpolar(name=None, source="default", x=None, template=None):
     return polar.Gpo(name, source=source, x=x, template=template)
+
+
+def getpolar(name=None):
+    if name in gms["polar_oned"]:
+        return gms["polar_oned"][name]
+    raise KeyError("No Polar GM exists with name '%s'" % name)
+
+
+if "polar_oned" not in gms:
+    # Create nice polar template
+    try:
+        t = vcs.createtemplate("polar_oned")
+        t.data.x1 = .2
+        t.data.x2 = .8
+        t.data.y1 = .2
+        t.data.y2 = .8
+
+        t.legend.x1 = .85
+        t.legend.x2 = 1
+        t.legend.y1 = .15
+        t.legend.y2 = .85
+
+        dash = vcs.createline()
+        dash.type = "dash"
+        dot = vcs.createline()
+        dot.type = "dot"
+        t.xtic1.line = dash
+        t.ytic1.line = dot
+
+        left_aligned = vcs.createtextorientation()
+        left_aligned.halign = "left"
+        left_aligned.valign = "half"
+        t.legend.textorientation = left_aligned
+    except vcs.vcsError:
+        # Template already exists
+        pass
+    # Create some nice default polar GMs
+    degree_polar = createpolar("degrees", template="polar_oned")
+    degree_polar.datawc_x1 = 0
+    degree_polar.datawc_x2 = 360
+    degree_polar.xticlabels1 = {
+        i: str(i) for i in range(0, 360, 45)
+    }
+
+    clock_24 = createpolar("diurnal", template="polar_oned")
+    clock_24.datawc_x1 = 0
+    clock_24.datawc_x2 = 24
+    clock_24.clockwise = True
+    # 6 AM on the right
+    clock_24.theta_offset = -6
+    clock_24.xticlabels1 = {
+        i: str(i) for i in range(0, 24, 3)
+    }
+
+
+    clock_24_meridiem = createpolar("diurnal_12_hour", source="diurnal", template="polar_oned")
+    clock_24_meridiem.xticlabels1 = {
+        0: "12 AM",
+        3: "3 AM",
+        6: "6 AM",
+        9: "9 AM",
+        12: "12 PM",
+        15: "3 PM",
+        18: "6 PM",
+        21: "9 PM"
+    }
+
+    clock_12 = createpolar("semidiurnal", source="diurnal", template="polar_oned")
+    clock_12.datawc_x2 = 12
+    clock_12.xticlabels1 = {
+        i: str(i) for i in range(3, 13, 3)
+    }
+    # 3 on the right
+    clock_12.theta_offset = -3
+
+    annual_cycle = createpolar("annual_cycle", template="polar_oned")
+    annual_cycle.datawc_x1 = 1
+    annual_cycle.datawc_x2 = 13
+    annual_cycle.clockwise = True
+    annual_cycle.xticlabels1 = {
+        1: "Jan",
+        2: "Feb",
+        3: "Mar",
+        4: "Apr",
+        5: "May",
+        6: "Jun",
+        7: "Jul",
+        8: "Aug",
+        9: "Sep",
+        10: "Oct",
+        11: "Nov",
+        12: "Dec"
+    }
+    # Put December on the top
+    annual_cycle.theta_offset = -2
+
+    seasonal = createpolar("seasonal", template="polar_oned")
+    seasonal.datawc_x1 = 0
+    seasonal.datawc_x2 = 4
+    seasonal.xticlabels1 = {0: "DJF", 1: "MAM", 2: "JJA", 3: "SON"}
+    seasonal.clockwise = True
+    # DJF on top
+    seasonal.theta_offset = -1
diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
index 8aff589a7..0b0f6a9fc 100644
--- a/Packages/vcsaddons/Lib/polar.py
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -126,7 +126,7 @@ def convert_arrays(var, theta):
                         names.append(None)
             else:
                 magnitudes = [var]
-                names.appned(None)
+                names.append(None)
         elif isinstance(var, numpy.ndarray):
             if len(var.shape) == 1:
                 magnitudes = [list(var)]
@@ -167,6 +167,7 @@ class Gpo(vcsaddons.core.VCSaddon):
         self.g_name = "Gpo"
         self.g_type = "polar_oned"
         super(Gpo, self).__init__(name, source, x, template)
+        self.x = None
         if source == "default":
             self.markersizes = [3]
             self.markercolors = ["black"]
@@ -203,21 +204,28 @@ class Gpo(vcsaddons.core.VCSaddon):
             self.theta_tick_count = gm.theta_tick_count
             self.group_names = gm.group_names
 
+    def magnitude_from_value(self, value, minmax):
+        if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
+            min, max = minmax
+        else:
+            min, max = self.datawc_y1, self.datawc_y2
+
+        return (value - min) / float(max - min)
+
     def theta_from_value(self, value):
         if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
             # No scale specified, just use the value as theta
-            return value
+            return value + self.theta_offset
 
         minval = self.datawc_x1
         maxval = self.datawc_x2
+        offset = self.theta_offset / float(maxval - minval)
 
-        pct_val = (value - minval) / float(maxval - minval)
+        pct_val = (value - minval) / float(maxval - minval) + offset
         rad_val = numpy.pi * 2 * pct_val
         if self.clockwise:
             # Reflect the value
             rad_val *= -1
-        # Adjust by theta_offset
-        rad_val += self.theta_offset
         return rad_val
 
     def plot(self, var, theta=None, template=None, bg=0, x=None):
@@ -230,6 +238,8 @@ class Gpo(vcsaddons.core.VCSaddon):
         Otherwise, if theta is provided, it uses var as magnitude and the theta given.
         """
         if x is None:
+            if self.x is None:
+                self.x = vcs.init()
             x = self.x
         if template is None:
             template = self.template
@@ -309,7 +319,7 @@ class Gpo(vcsaddons.core.VCSaddon):
                 m_labels = None
 
             for lev in m_scale:
-                lev_radius = radius * float(lev - m_scale[0]) / (m_scale[-1] - m_scale[0])
+                lev_radius = radius * self.magnitude_from_value(lev, (m_scale[0], m_scale[-1]))
                 x, y = circle_points(center, lev_radius, ratio=window_aspect)
                 if m_labels is not None:
                     if lev in mag_labels:
@@ -390,7 +400,7 @@ class Gpo(vcsaddons.core.VCSaddon):
             y = []
             for m, t in zip(mag, theta):
                 t = self.theta_from_value(t)
-                r = (m - m_scale[0]) / float(m_scale[-1] - m_scale[0]) * radius
+                r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius
                 x.append(xmul * numpy.cos(t) * r + center[0])
                 y.append(ymul * numpy.sin(t) * r + center[1])
 
-- 
GitLab


From 022a3aa788d39cef37242b3e86cb436214e2b88e Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 20 May 2016 12:10:07 -0700
Subject: [PATCH 088/196] Added tests for polar

---
 testing/vcsaddons/CMakeLists.txt              | 25 ++++++++
 .../vcsaddons/vcs_addons_test_polar_annual.py | 40 +++++++++++++
 .../vcs_addons_test_polar_degrees.py          | 29 ++++++++++
 .../vcs_addons_test_polar_diurnal.py          | 44 ++++++++++++++
 .../vcs_addons_test_polar_seasonal.py         | 58 +++++++++++++++++++
 .../vcs_addons_test_polar_semidiurnal.py      | 44 ++++++++++++++
 6 files changed, 240 insertions(+)
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_annual.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_degrees.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_diurnal.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_seasonal.py
 create mode 100644 testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py

diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt
index 06ebff600..a0fd4e90b 100644
--- a/testing/vcsaddons/CMakeLists.txt
+++ b/testing/vcsaddons/CMakeLists.txt
@@ -59,6 +59,31 @@ cdat_add_test(vcs_addons_test_convert_arrays
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py
 )
+cdat_add_test(vcs_addons_test_polar_degrees
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_degrees.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_degrees.png
+)
+cdat_add_test(vcs_addons_test_polar_annual
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_annual.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_annual.png
+)
+cdat_add_test(vcs_addons_test_polar_diurnal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_diurnal.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_diurnal.png
+)
+cdat_add_test(vcs_addons_test_polar_seasonal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_seasonal.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_seasonal.png
+)
+cdat_add_test(vcs_addons_test_polar_semidiurnal
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py
+  ${BASELINE_DIR}/vcs_addons_test_polar_semidiurnal.png
+)
 
 if (CDAT_DOWNLOAD_SAMPLE_DATA)
   cdat_add_test(vcs_addons_EzTemplate_2x2
diff --git a/testing/vcsaddons/vcs_addons_test_polar_annual.py b/testing/vcsaddons/vcs_addons_test_polar_annual.py
new file mode 100644
index 000000000..420b724cd
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_annual.py
@@ -0,0 +1,40 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+polar = vcsaddons.getpolar("annual_cycle")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+import cdms2, cdutil
+
+f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = f("clt")
+cdutil.setAxisTimeBoundsMonthly(clt.getTime())
+averaged_time = cdutil.averager(clt, axis="t")
+averaged_time = averaged_time.reshape((1, averaged_time.shape[0], averaged_time.shape[1]))
+averaged_time_for_departures = numpy.repeat(averaged_time, len(clt), axis=0)
+
+clt_departures = clt - averaged_time_for_departures
+clt_departures.setAxisList(clt.getAxisList())
+avg_departures = cdutil.averager(clt_departures, axis="xy")
+
+theta = range(1, len(clt) + 1)
+magnitude = avg_departures
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_annual.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar_degrees.py b/testing/vcsaddons/vcs_addons_test_polar_degrees.py
new file mode 100644
index 000000000..46d34168a
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_degrees.py
@@ -0,0 +1,29 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+polar = vcsaddons.getpolar("degrees")
+polar.markers = ["dot", "circle"]
+polar.markersizes = [3, 5]
+
+polar.magnitude_tick_angle = numpy.pi / 6
+
+theta = numpy.array(range(0, 720, 2))
+magnitude = 9 * numpy.sin(5 * 2 * numpy.pi * theta / 360)
+polar.datawc_y1 = 0
+polar.datawc_y2 = max(magnitude)
+polar.plot(magnitude, theta, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_degrees.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar_diurnal.py b/testing/vcsaddons/vcs_addons_test_polar_diurnal.py
new file mode 100644
index 000000000..ac0664171
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_diurnal.py
@@ -0,0 +1,44 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+import cdms2, cdutil, cdtime
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
+temp = f('t')
+levels = temp.getLevel()
+time = temp.getTime()
+# Break up temp by level
+magnitudes = [temp[:,i] for i in range(temp.shape[1])]
+for i, mag in enumerate(magnitudes):
+    mag.id = "%0.f %s" % (levels[i], levels.units)
+
+times = []
+for t in time:
+    reltime = cdtime.relativetime(t, time.units)
+    comptime = reltime.tocomponent()
+    times.append(comptime.hour)
+
+thetas = [times] * len(magnitudes)
+
+polar = vcsaddons.getpolar("diurnal")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors(list(levels))
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_diurnal.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar_seasonal.py b/testing/vcsaddons/vcs_addons_test_polar_seasonal.py
new file mode 100644
index 000000000..42612ddae
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_seasonal.py
@@ -0,0 +1,58 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy, MV2
+import cdms2, cdutil, cdtime
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+# Trim first few months and last month so we have even number of seasons
+cloudiness = f('clt', time=(11, 119))
+cdutil.setAxisTimeBoundsMonthly(cloudiness.getTime())
+cloudiness_time_axis = cloudiness.getTime()
+averaged_seasons = MV2.zeros((36, 46, 72))
+# Average the seasons in cloudiness
+for i in range(36):
+    averaged_seasons[i] = cdutil.averager(cloudiness(time=(cloudiness_time_axis[i * 3], cloudiness_time_axis[(i+1) * 3])), axis="t")
+
+averaged_seasons.setAxis(1, cloudiness.getLatitude())
+averaged_seasons.setAxis(2, cloudiness.getLongitude())
+
+regions = {
+    "north_polar": (66, 90),
+    "north_temperate": (22, 66),
+    "tropics": (-22, 22),
+    "south_temperate": (-66, -22),
+    "south_polar": (-90, -66)
+}
+
+def get_region_avg(var, r, axis="xy"):
+    avg = cdutil.averager(var(latitude=regions[r]), axis=axis)
+    avg.id = r
+    return avg
+
+magnitudes = [get_region_avg(averaged_seasons, region) for region in regions]
+thetas = [range(4) * 27] * 5
+
+polar = vcsaddons.getpolar("seasonal")
+polar.datawc_y1 = 0
+polar.datawc_y2 = 100
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors([-90, -66, -22, 22, 66, 90], split=False)
+
+polar.magnitude_tick_angle = numpy.pi / 4
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_seasonal.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
diff --git a/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py b/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py
new file mode 100644
index 000000000..900d570b4
--- /dev/null
+++ b/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py
@@ -0,0 +1,44 @@
+import sys,os
+src = sys.argv[1]
+pth = os.path.join(os.path.dirname(__file__),"..")
+sys.path.append(pth)
+import checkimage
+import vcs
+import vcsaddons, numpy
+import cdms2, cdutil, cdtime
+
+x=vcs.init()
+x.setantialiasing(0)
+x.drawlogooff()
+x.setbgoutputdimensions(1200,1091,units="pixels")
+
+f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
+temp = f('t')
+levels = temp.getLevel()
+time = temp.getTime()
+# Break up temp by level
+magnitudes = [temp[:,i] for i in range(temp.shape[1])]
+for i, mag in enumerate(magnitudes):
+    mag.id = "%0.f %s" % (levels[i], levels.units)
+
+times = []
+for t in time:
+    reltime = cdtime.relativetime(t, time.units)
+    comptime = reltime.tocomponent()
+    times.append(comptime.hour % 12)
+
+thetas = [times] * len(magnitudes)
+
+polar = vcsaddons.getpolar("semidiurnal")
+polar.markers = ["dot"]
+polar.markersizes = [3]
+polar.markercolors = vcs.getcolors(list(levels))
+
+polar.magnitude_tick_angle = numpy.pi / 8
+
+polar.plot(magnitudes, thetas, bg=True, x=x)
+
+fnm = "vcs_addons_test_polar_semidiurnal.png"
+x.png(fnm)
+ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+sys.exit(ret)
-- 
GitLab


From 2dd2b3554fabbd1ec63e8e2c877f05cad4c61ecf Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Thu, 12 May 2016 11:34:31 -0400
Subject: [PATCH 089/196] Reenable autot_axis tests. Keep max Y to 500.

---
 Packages/testing/regression.py            |  3 +-
 testing/vcs/CMakeLists.txt                | 35 ++++++++++++-----------
 testing/vcs/test_vcs_autot_axis_titles.py |  9 ++++--
 3 files changed, 26 insertions(+), 21 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 68ded76d9..b9cd2cdeb 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -24,7 +24,8 @@ def init(*args, **kwargs):
     vcsinst.setantialiasing(0)
     vcsinst.drawlogooff()
 
-    if ('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs):
+    if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs)) and
+        ('geometry' not in kwargs)):
         vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
     return vcsinst
 
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index fd253ac23..c6ccf8517 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -983,23 +983,24 @@ cdat_add_test(test_vcs_large_pattern_hatch
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_large_pattern_hatch.py
   ${BASELINE_DIR}/test_vcs_large_pattern_hatch.png
 )
-# crashes on mac commenting out for release`
-#foreach(x_over_y 0.5 2)
-#  # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
-#  foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
-#    foreach(mode foreground background)
-#      string(SUBSTRING ${plot} 2 -1 plot_name)
-#      cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
-#        "${PYTHON_EXECUTABLE}"
-#        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
-#        "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
-#        ${mode}
-#        ${plot}
-#        ${x_over_y}
-#        )
-#    endforeach()
-#  endforeach()
-#endforeach()
+
+foreach(x_over_y 0.5 2)
+ # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
+ foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
+   foreach(mode foreground background)
+     string(SUBSTRING ${plot} 2 -1 plot_name)
+     cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
+       "${PYTHON_EXECUTABLE}"
+       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
+       "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
+       ${mode}
+       ${plot}
+       ${x_over_y}
+       )
+   endforeach()
+ endforeach()
+endforeach()
+
 cdat_add_test(test_vcs_boxfill_lambert_crash
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lambert_crash.py
diff --git a/testing/vcs/test_vcs_autot_axis_titles.py b/testing/vcs/test_vcs_autot_axis_titles.py
index dcc0f00b1..2728cbd6d 100644
--- a/testing/vcs/test_vcs_autot_axis_titles.py
+++ b/testing/vcs/test_vcs_autot_axis_titles.py
@@ -8,11 +8,14 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_robinson_isoline': ('clt.nc', 'clt')}
 
 # Tests if ratio=autot works correctly for background and foreground plots
+bg = 1
+if (sys.argv[2] == 'foreground'):
+    bg = 0
 plot = sys.argv[3]
 x_over_y = sys.argv[4]
 if (x_over_y == '0.5'):
-    xSize = 400
-    ySize = 800
+    xSize = 250
+    ySize = 500
 else:
     xSize = 800
     ySize = 400
@@ -39,5 +42,5 @@ else:
 x.setantialiasing(0)
 x.drawlogooff()
 x.plot(s, gm, ratio="autot")
-name = "test_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
+name = "test_vcs_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
 regression.run(x, name, sys.argv[1])
\ No newline at end of file
-- 
GitLab


From c98cc7376ed2b5ed33886af8177e03a7b942872b Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 08:59:16 -0400
Subject: [PATCH 090/196] Removed redundant object suffix

---
 Packages/vcs/docs/user-guide.rst | 41 ++++++++++++++++----------------
 1 file changed, 20 insertions(+), 21 deletions(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index d74b3aec2..97a55b7d4 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -46,29 +46,28 @@ A description of each primary object is warranted before showing their use and u
 
 **Graphics Method Objects**
 
-
 A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows:
 
-* ``boxfillobject`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
-* ``continentsobject`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
-* ``isofillobject`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
+* ``boxfill`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is “Gfb”.
+* ``continents`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is “Gcon”.
+* ``isofill`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is “Gfi”.
 * ``isolineobject`` - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is “Gi”.
-* ``outfillobject`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
-* ``outlineobject`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
-* ``scatterobject`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
-* ``vectorobject`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
-* ``xvsyobject`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
-* ``xyvsyobject`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
-* ``Yxvsxobject`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
-* ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
-* ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
+* ``outfill`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Gfo”.
+* ``outline`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is “Go”.
+* ``scatter`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is “GSp”.
+* ``vector`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is “Gv”.
+* ``xvsy`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ‘t’ represents the 1D coordinate values. Its class symbol or alias is “GXY”.
+* ``xyvsy`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ‘y’ represents the 1D coordinate values. Its class symbol or alias is “GXy”.
+* ``Yxvsx`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ‘x’ represents the 1D coordinate values. Its class symbol or alias is “GYx”.
+* ``3dscalar`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is “3d_scalar”.
+* ``3dvector`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is “3d_vector”.
 
 
 **Picture Template Object**
 
 A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows:
 
-* ``templateobject`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
+* ``template`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is “P”.
 
 **Data Object**
 
@@ -83,7 +82,7 @@ A description of each secondary object is warranted before showing their use and
 
 The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
 
-* ``colormapobject`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
+* ``colormap`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
 Note: VCS colormaps are objects, but they are not referenced like other secondary objects.
 
 
@@ -91,32 +90,32 @@ Note: VCS colormaps are objects, but they are not referenced like other secondar
 
 The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows:
 
-* ``fillareaobject`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
+* ``fillarea`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is “Tf”.
 
 
 **Line Object**
 
 The line object allows the editing of line type, width, and color index. The description of the line object is as follows:
 
-* ``lineobject`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
+* ``line`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is “Tl”.
 
 
 **Marker Object**
 
 The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows:
 
-* ``markerobject`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
+* ``marker`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is “Tm”.
 
 
 **Text Objects**
 
 Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows:
 
-* ``textcombinedobject`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
+* ``textcombined`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is “Tc”.
 
-* ``textorientationobject`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
+* ``textorientation`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is “To”.
 
-* ``texttableobject`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
+* ``texttable`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is “Tt”.
 
 
 Getting Started with VCS
-- 
GitLab


From 6f529d921fce1a14e9a04ea2d6b62b12c853962f Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 09:36:53 -0400
Subject: [PATCH 091/196] Fixed styling of references

---
 Packages/vcs/docs/reference.rst | 24 ++++++++++++------------
 1 file changed, 12 insertions(+), 12 deletions(-)

diff --git a/Packages/vcs/docs/reference.rst b/Packages/vcs/docs/reference.rst
index 0af233efa..73db692bf 100644
--- a/Packages/vcs/docs/reference.rst
+++ b/Packages/vcs/docs/reference.rst
@@ -1,8 +1,8 @@
 VCS Reference Guide
 --------------------
 
-``init``
-^^^^^^^^
+init
+^^^^
 * Initialize, Construct a VCS Canvas Object
 
 .. code-block:: python
@@ -45,8 +45,8 @@ VCS Reference Guide
 
     # Plot slab using isoline and template objects
 
-``help``
-^^^^^^^^
+help
+^^^^
 * Print out the object's doc string
 
 .. code-block:: python
@@ -59,8 +59,8 @@ VCS Reference Guide
     # This will print out information on how to use ln
     a.objecthelp(ln)
 
-``open``
-^^^^^^^^
+open
+^^^^
 * Open VCS Canvas object.
 * This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
 
@@ -70,8 +70,8 @@ VCS Reference Guide
     a = vcs.init()
     a.open()
 
-``close``
-^^^^^^^^^
+close
+^^^^^
 * Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it.
 
 .. code-block:: python
@@ -81,8 +81,8 @@ VCS Reference Guide
     a.plot(array, 'default', 'isofill', 'quick')
     a.close()
 
-``mode``
-^^^^^^^^
+mode
+^^^^
 * ``Options <0 = manual, 1 = automatic>``
 * Update the VCS Canvas.
 * Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed.
@@ -106,8 +106,8 @@ VCS Reference Guide
     # Update the changes manually
     a.update()
 
-``update``
-^^^^^^^^^^
+update
+^^^^^^
 * Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual).
 
 .. code-block:: python
-- 
GitLab


From f91b6b89d29293266c9de2deb367349bd4b57052 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Thu, 19 May 2016 14:34:34 -0400
Subject: [PATCH 092/196] BUG #1959: Fix memory override for vtkContourFiler in
 isofillpipeline.

This resulted in vtkStripper to generate double coverage of isocountours which resulted in
messed-up patterns.
Also adjusted plot patterns to easier to discriminate.
---
 Packages/vcs/vcs/vcs2vtk.py                |  8 ++++----
 Packages/vcs/vcs/vcsvtk/isolinepipeline.py |  1 -
 testing/vcs/CMakeLists.txt                 |  7 +++++++
 testing/vcs/test_vcs_line_patterns.py      | 22 ++++++++++++++++++++++
 4 files changed, 33 insertions(+), 5 deletions(-)
 create mode 100755 testing/vcs/test_vcs_line_patterns.py

diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index aa4a228ac..81142492c 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -1581,16 +1581,16 @@ def __build_ld__():
 
 def stippleLine(prop, line_type):
     if line_type == 'long-dash':
-        prop.SetLineStipplePattern(int('1111111100000000', 2))
+        prop.SetLineStipplePattern(int('0000111111111111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dot':
-        prop.SetLineStipplePattern(int('1010101010101010', 2))
+        prop.SetLineStipplePattern(int('0101010101010101', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash':
-        prop.SetLineStipplePattern(int('1111000011110000', 2))
+        prop.SetLineStipplePattern(int('0001111100011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash-dot':
-        prop.SetLineStipplePattern(int('0011110000110011', 2))
+        prop.SetLineStipplePattern(int('0101111101011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'solid':
         prop.SetLineStipplePattern(int('1111111111111111', 2))
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index 1560de7c1..3406824f0 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -144,7 +144,6 @@ class IsolinePipeline(Pipeline2D):
 
             for n in range(numLevels):
                 cot.SetValue(n, l[n])
-            cot.SetValue(numLevels, l[-1])
             # TODO remove update
             cot.Update()
 
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index c6ccf8517..fbd89a10f 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1007,6 +1007,13 @@ cdat_add_test(test_vcs_boxfill_lambert_crash
   "${BASELINE_DIR}/test_vcs_boxfill_lambert_crash.png"
   "${UVCDAT_GIT_TESTDATA_DIR}/data/NCEP_09_climo.nc"
 )
+
+cdat_add_test(test_vcs_line_patterns
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_line_patterns.py
+  "${BASELINE_DIR}/test_vcs_line_patterns.png"
+)
+
 cdat_add_test(test_vcs_init_open_sizing
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_init_open_sizing.py
diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py
new file mode 100755
index 000000000..7597403fc
--- /dev/null
+++ b/testing/vcs/test_vcs_line_patterns.py
@@ -0,0 +1,22 @@
+import vcs
+import cdms2
+import sys
+import os
+import testing.regression as regression
+
+
+pth = os.path.join(os.path.dirname(__file__), "..")
+sys.path.append(pth)
+
+import checkimage
+
+x = regression.init(bg=1, geometry=(1620, 1080))
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+s = f('clt')
+iso = x.createisoline()
+iso.level=[5, 50, 70, 95]
+iso.line = ['dot', 'dash', 'dash-dot', 'long-dash']
+x.plot(s,iso,continents=0)
+name = "test_vcs_line_patterns.png"
+regression.run(x, name)
-- 
GitLab


From a89570eb76580ac3453462fddd2b895d4fdc89f3 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 23 May 2016 09:07:45 -0700
Subject: [PATCH 093/196] updated crypto to latest version so it builds on
 Ubuntu 16

---
 CMake/cdat_modules/cryptography_pkg.cmake | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/CMake/cdat_modules/cryptography_pkg.cmake b/CMake/cdat_modules/cryptography_pkg.cmake
index cde5afa76..0b5671da1 100644
--- a/CMake/cdat_modules/cryptography_pkg.cmake
+++ b/CMake/cdat_modules/cryptography_pkg.cmake
@@ -1,10 +1,10 @@
 set(CRYPTOGRAPHY_MAJOR_SRC 1)
-set(CRYPTOGRAPHY_MINOR_SRC 2)
+set(CRYPTOGRAPHY_MINOR_SRC 3)
 set(CRYPTOGRAPHY_PATCH_SRC 2)
 
 set(CRYPTOGRAPHY_VERSION ${CRYPTOGRAPHY_MAJOR_SRC}.${CRYPTOGRAPHY_MINOR_SRC}.${CRYPTOGRAPHY_PATCH_SRC})
 set(CRYPTOGRAPHY_GZ cryptography-${CRYPTOGRAPHY_VERSION}.tar.gz)
 set(CRYPTOGRAPHY_SOURCE ${LLNL_URL}/${CRYPTOGRAPHY_GZ})
-set(CRYPTOGRAPHY_MD5 a8daf092d0558dac6700d7be93b555e5)
+set(CRYPTOGRAPHY_MD5 0359190f291824dc8ad9e6d477a607b2)
 
 add_cdat_package_dependent(CRYPTOGRAPHY "" "" OFF "CDAT_BUILD_LEAN" OFF)
-- 
GitLab


From 1129e5b4c949a305fb26f355d50252877ced9067 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 14:42:12 -0400
Subject: [PATCH 094/196] Compute vector scaling correctly

VTK does clamping and not remapping of input range to desired
range which leads to undesirable visual effects.
---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 39 ++++++++++++++++++-----
 1 file changed, 31 insertions(+), 8 deletions(-)

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index 0badc60b4..0dee32da4 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -84,25 +84,48 @@ class VectorPipeline(Pipeline2D):
         arrow.SetOutputPointsPrecision(vtk.vtkAlgorithm.DOUBLE_PRECISION)
         arrow.FilledOff()
 
+        polydata = self._vtkPolyDataFilter.GetOutput()
+        vectors = polydata.GetPointData().GetVectors()
+        vectorsRangeX = vectors.GetRange(0)
+        vectorsRangeY = vectors.GetRange(1)
+        vectorsRange = []
+        vectorsRange.insert(0, vectorsRangeY[0] if
+            (vectorsRangeX[0] > vectorsRangeY[0])  else vectorsRangeX[0])
+        vectorsRange.insert(1, vectorsRangeY[1]
+            if (vectorsRangeX[1] > vectorsRangeY[1])  else vectorsRangeX[1])
+
+        scalarArray = vtk.vtkDoubleArray()
+        scalarArray.SetNumberOfComponents(1)
+        scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
+
+        oldRange = vectorsRange[1] - vectorsRange[0]
+        newRange = 1.0 - 0.1
+
+        for i in range (0, vectors.GetNumberOfTuples()):
+            norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
+            newValue = (((norm - vectorsRange[0]) * newRange) / oldRange) + 0.1
+            scalarArray.SetValue(i, newValue)
+
+        polydata.GetPointData().SetScalars(scalarArray)
+
         glyphFilter = vtk.vtkGlyph2D()
-        glyphFilter.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
+        # glyphFilter.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
+        glyphFilter.SetInputData(polydata)
         glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
         glyphFilter.SetVectorModeToUseVector()
 
         # Rotate arrows to match vector data:
         glyphFilter.OrientOn()
+        glyphFilter.ScalingOn()
 
         # Scale to vector magnitude:
-        glyphFilter.SetScaleModeToScaleByVector()
+        # NOTE: Currently we compute our own scaling factor since VTK does
+        # it by clamping the values > max to max  and values < min to min
+        # and not remap the range.
+        glyphFilter.SetScaleModeToScaleByScalar()
         glyphFilter.SetScaleFactor(scale * 2.0 * self._gm.scale)
 
-        # These are some unfortunately named methods. It does *not* clamp the
-        # scale range to [min, max], but rather remaps the range
-        # [min, max] --> [0, 1].
-        glyphFilter.ClampingOn()
-        glyphFilter.SetRange(0.01, 1.0)
-
         mapper = vtk.vtkPolyDataMapper()
 
         glyphFilter.Update()
-- 
GitLab


From 9589f126db667b2a836e0f8d1fe06e8f94ae3cfd Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 15:51:10 -0400
Subject: [PATCH 095/196] Set default to 1 for range

---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index 0dee32da4..d6b395da2 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -99,6 +99,7 @@ class VectorPipeline(Pipeline2D):
         scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
 
         oldRange = vectorsRange[1] - vectorsRange[0]
+        oldRange = 1.0 if oldRange == 0.0 else oldRange
         newRange = 1.0 - 0.1
 
         for i in range (0, vectors.GetNumberOfTuples()):
-- 
GitLab


From 9648e967b9802360adac06c860d4790a93ad0e40 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 15:58:18 -0400
Subject: [PATCH 096/196] Using descriptive variable names

---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index d6b395da2..dc5a9701a 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -100,11 +100,15 @@ class VectorPipeline(Pipeline2D):
 
         oldRange = vectorsRange[1] - vectorsRange[0]
         oldRange = 1.0 if oldRange == 0.0 else oldRange
-        newRange = 1.0 - 0.1
+
+        # New range min, max.
+        newRangeValues = [0.0, 1.0]
+
+        newRange = newRangeValues[1] - newRangeValues[0]
 
         for i in range (0, vectors.GetNumberOfTuples()):
             norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
-            newValue = (((norm - vectorsRange[0]) * newRange) / oldRange) + 0.1
+            newValue = (((norm - vectorsRange[0]) * newRange) / oldRange) + newRangeValues[0]
             scalarArray.SetValue(i, newValue)
 
         polydata.GetPointData().SetScalars(scalarArray)
-- 
GitLab


From 2a4089ea74d32fe96c49a065ee7bc6aab99ecc2a Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Mon, 23 May 2016 17:39:03 -0400
Subject: [PATCH 097/196] Fixed flake8 test

---
 Packages/vcs/vcs/VTKPlots.py              |  4 ++--
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 10 +++++-----
 2 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index fb34a4c26..814719536 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -546,9 +546,9 @@ class VTKVCSBackend(object):
 
     def geometry(self, *args):
         if len(args) == 0:
-            return self._geometry;
+            return self._geometry
         if len(args) < 2:
-            raise TypeError("Function takes zero or two <width, height> " \
+            raise TypeError("Function takes zero or two <width, height> "
                             "or more than two arguments. Got " + len(*args))
         x = args[0]
         y = args[1]
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index dc5a9701a..10161a52f 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -89,10 +89,10 @@ class VectorPipeline(Pipeline2D):
         vectorsRangeX = vectors.GetRange(0)
         vectorsRangeY = vectors.GetRange(1)
         vectorsRange = []
-        vectorsRange.insert(0, vectorsRangeY[0] if
-            (vectorsRangeX[0] > vectorsRangeY[0])  else vectorsRangeX[0])
-        vectorsRange.insert(1, vectorsRangeY[1]
-            if (vectorsRangeX[1] > vectorsRangeY[1])  else vectorsRangeX[1])
+        vectorsRange.insert(0, vectorsRangeY[0] if (vectorsRangeX[0] > vectorsRangeY[0])
+                            else vectorsRangeX[0])
+        vectorsRange.insert(1, vectorsRangeY[1] if (vectorsRangeX[1] > vectorsRangeY[1])
+                            else vectorsRangeX[1])
 
         scalarArray = vtk.vtkDoubleArray()
         scalarArray.SetNumberOfComponents(1)
@@ -106,7 +106,7 @@ class VectorPipeline(Pipeline2D):
 
         newRange = newRangeValues[1] - newRangeValues[0]
 
-        for i in range (0, vectors.GetNumberOfTuples()):
+        for i in range(0, vectors.GetNumberOfTuples()):
             norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
             newValue = (((norm - vectorsRange[0]) * newRange) / oldRange) + newRangeValues[0]
             scalarArray.SetValue(i, newValue)
-- 
GitLab


From 7102a1da891048c5d755579e44ec50f1cc2bb44f Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Tue, 24 May 2016 15:12:30 -0400
Subject: [PATCH 098/196] Fix flake8 warnings and a test generated file

---
 Packages/vcs/vcs/VTKPlots.py                  | 4 ++--
 testing/vcs/test_vcs_isoline_width_stipple.py | 2 +-
 2 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index fb34a4c26..814719536 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -546,9 +546,9 @@ class VTKVCSBackend(object):
 
     def geometry(self, *args):
         if len(args) == 0:
-            return self._geometry;
+            return self._geometry
         if len(args) < 2:
-            raise TypeError("Function takes zero or two <width, height> " \
+            raise TypeError("Function takes zero or two <width, height> "
                             "or more than two arguments. Got " + len(*args))
         x = args[0]
         y = args[1]
diff --git a/testing/vcs/test_vcs_isoline_width_stipple.py b/testing/vcs/test_vcs_isoline_width_stipple.py
index 5da8f91f4..20a7e5c4f 100644
--- a/testing/vcs/test_vcs_isoline_width_stipple.py
+++ b/testing/vcs/test_vcs_isoline_width_stipple.py
@@ -25,4 +25,4 @@ isoline.linewidths = (1, 2, 3, 4, 1)
 isoline.line = ('dot', 'dash', 'solid', 'dash-dot', 'long-dash', 'dot', 'dash')
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-regression.run(canvas, "test_isoline_width_stipple.png")
+regression.run(canvas, "test_vcs_isoline_width_stipple.png")
-- 
GitLab


From 2513a0c8327c93e4c0c49e6cb3a0b69d3d2c929f Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Tue, 24 May 2016 15:00:40 -0400
Subject: [PATCH 099/196] Added scale options to vector graphic method

---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py |  87 +++++++++++-------
 Packages/vcs/vcs/vector.py                | 105 ++++++++++++++--------
 2 files changed, 127 insertions(+), 65 deletions(-)

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index 10161a52f..dd3b7ba04 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -19,6 +19,8 @@ class VectorPipeline(Pipeline2D):
         # Preserve time and z axis for plotting these inof in rendertemplate
         projection = vcs.elements["projection"][self._gm.projection]
         taxis = self._originalData1.getTime()
+        scaleFactor = 1.0
+
         if self._originalData1.ndim > 2:
             zaxis = self._originalData1.getAxis(-3)
         else:
@@ -84,37 +86,20 @@ class VectorPipeline(Pipeline2D):
         arrow.SetOutputPointsPrecision(vtk.vtkAlgorithm.DOUBLE_PRECISION)
         arrow.FilledOff()
 
+
         polydata = self._vtkPolyDataFilter.GetOutput()
         vectors = polydata.GetPointData().GetVectors()
         vectorsRangeX = vectors.GetRange(0)
         vectorsRangeY = vectors.GetRange(1)
-        vectorsRange = []
-        vectorsRange.insert(0, vectorsRangeY[0] if (vectorsRangeX[0] > vectorsRangeY[0])
-                            else vectorsRangeX[0])
-        vectorsRange.insert(1, vectorsRangeY[1] if (vectorsRangeX[1] > vectorsRangeY[1])
-                            else vectorsRangeX[1])
-
-        scalarArray = vtk.vtkDoubleArray()
-        scalarArray.SetNumberOfComponents(1)
-        scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
-
-        oldRange = vectorsRange[1] - vectorsRange[0]
-        oldRange = 1.0 if oldRange == 0.0 else oldRange
-
-        # New range min, max.
-        newRangeValues = [0.0, 1.0]
 
-        newRange = newRangeValues[1] - newRangeValues[0]
-
-        for i in range(0, vectors.GetNumberOfTuples()):
-            norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
-            newValue = (((norm - vectorsRange[0]) * newRange) / oldRange) + newRangeValues[0]
-            scalarArray.SetValue(i, newValue)
-
-        polydata.GetPointData().SetScalars(scalarArray)
+        if self._gm.scaletype == 'constant' or\
+           self._gm.scaletype == 'constantNNormalize' or\
+           self._gm.scaletype == 'constantNLinear':
+            scaleFactor = scale * 2.0 * self._gm.scale
+        else:
+            scaleFactor = 1.0
 
         glyphFilter = vtk.vtkGlyph2D()
-        # glyphFilter.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
         glyphFilter.SetInputData(polydata)
         glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
@@ -124,12 +109,54 @@ class VectorPipeline(Pipeline2D):
         glyphFilter.OrientOn()
         glyphFilter.ScalingOn()
 
-        # Scale to vector magnitude:
-        # NOTE: Currently we compute our own scaling factor since VTK does
-        # it by clamping the values > max to max  and values < min to min
-        # and not remap the range.
-        glyphFilter.SetScaleModeToScaleByScalar()
-        glyphFilter.SetScaleFactor(scale * 2.0 * self._gm.scale)
+        glyphFilter.SetScaleModeToScaleByVector()
+
+        if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'linear' or\
+           self._gm.scaletype == 'constantNNormalize' or self._gm.scaletype == 'constantNLinear':
+
+            # Find the min and max vector magnitudes
+            minNorm = None
+            maxNorm = None
+
+            for i in range(0, vectors.GetNumberOfTuples()):
+                norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
+
+                if (minNorm is None or norm < minNorm):
+                    minNorm = norm
+                if (maxNorm is None or norm > maxNorm):
+                    maxNorm = norm
+
+            if maxNorm == 0:
+                maxNorm = 1.0
+
+            if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'constantNNormalize':
+                scaleFactor /= maxNorm
+
+            if self._gm.scaletype == 'linear' or self._gm.scaletype == 'constantNLinear':
+                scalarArray = vtk.vtkDoubleArray()
+                scalarArray.SetNumberOfComponents(1)
+                scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
+
+                oldRange = maxNorm - minNorm
+                oldRange = 1.0 if oldRange == 0.0 else oldRange
+
+                # New range min, max.
+                newRangeValues = self._gm.scalerange
+                newRange = newRangeValues[1] - newRangeValues[0]
+
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
+                    newValue = (((norm - minNorm) * newRange) / oldRange) + newRangeValues[0]
+                    scalarArray.SetValue(i, newValue)
+                    polydata.GetPointData().SetScalars(scalarArray)
+
+                # Scale to vector magnitude:
+                # NOTE: Currently we compute our own scaling factor since VTK does
+                # it by clamping the values > max to max  and values < min to min
+                # and not remap the range.
+                glyphFilter.SetScaleModeToScaleByScalar()
+
+        glyphFilter.SetScaleFactor(scaleFactor)
 
         mapper = vtk.vtkPolyDataMapper()
 
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index acea94c04..b48d74116 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -131,7 +131,7 @@ def process_src(nm, code):
 class Gv(object):
 
     """
- Class:	Gv				# Vector
+ Class: Gv              # Vector
 
  Description of Gv Class:
     The vector graphics method displays a vector plot of a 2D vector field. Vectors
@@ -145,76 +145,76 @@ class Gv(object):
     entry.
 
  Other Useful Functions:
-         a=vcs.init()			# Constructor
-         a.show('vector')		# Show predefined vector graphics methods
-         a.show('line')			# Show predefined VCS line objects
-         a.setcolormap("AMIP")		# Change the VCS color Map
-         a.vector(s1, s2, v,'default')	# Plot data 's1', and 's2' with vector 'v'
+         a=vcs.init()           # Constructor
+         a.show('vector')       # Show predefined vector graphics methods
+         a.show('line')         # Show predefined VCS line objects
+         a.setcolormap("AMIP")      # Change the VCS color Map
+         a.vector(s1, s2, v,'default')  # Plot data 's1', and 's2' with vector 'v'
                                          and 'default' template
-         a.update()		 	# Updates the VCS Canvas at user's request
-         a.mode=1, or 0 	 	# If 1, then automatic update, else if
+         a.update()         # Updates the VCS Canvas at user's request
+         a.mode=1, or 0         # If 1, then automatic update, else if
                                           0, then use update function to
                                           update the VCS Canvas.
 
  Example of Use:
     a=vcs.init()
     To Create a new instance of vector use:
-     vc=a.createvector('new','quick')	# Copies content of 'quick' to 'new'
-     vc=a.createvector('new') 		# Copies content of 'default' to 'new'
+     vc=a.createvector('new','quick')   # Copies content of 'quick' to 'new'
+     vc=a.createvector('new')       # Copies content of 'default' to 'new'
 
     To Modify an existing vector use:
      vc=a.getvector('AMIP_psl')
 
-    vc.list()  				# Will list all the vector attribute values
-    vc.projection='linear'   		# Can only be 'linear'
+    vc.list()               # Will list all the vector attribute values
+    vc.projection='linear'          # Can only be 'linear'
     lon30={-180:'180W',-150:'150W',0:'Eq'}
     vc.xticlabels1=lon30
     vc.xticlabels2=lon30
-    vc.xticlabels(lon30, lon30)  	# Will set them both
+    vc.xticlabels(lon30, lon30)     # Will set them both
     vc.xmtics1=''
     vc.xmtics2=''
-    vc.xmtics(lon30, lon30)  		# Will set them both
+    vc.xmtics(lon30, lon30)         # Will set them both
     vc.yticlabels1=lat10
     vc.yticlabels2=lat10
-    vc.yticlabels(lat10, lat10)  	# Will set them both
+    vc.yticlabels(lat10, lat10)     # Will set them both
     vc.ymtics1=''
     vc.ymtics2=''
-    vc.ymtics(lat10, lat10)  		# Will set them both
+    vc.ymtics(lat10, lat10)         # Will set them both
     vc.datawc_y1=-90.0
     vc.datawc_y2=90.0
     vc.datawc_x1=-180.0
     vc.datawc_x2=180.0
-    vc.datawc(-90, 90, -180, 180)  	# Will set them all
+    vc.datawc(-90, 90, -180, 180)   # Will set them all
     xaxisconvert='linear'
     yaxisconvert='linear'
-    vc.xyscale('linear', 'area_wt')  	# Will set them both
+    vc.xyscale('linear', 'area_wt')     # Will set them both
 
     Specify the line style:
-     vc.line=0 				# Same as vc.line='solid'
-     vc.line=1 				# Same as vc.line='dash'
-     vc.line=2 				# Same as vc.line='dot'
-     vc.line=3 				# Same as vc.line='dash-dot'
-     vc.line=4 				# Same as vc.line='long-dot'
+     vc.line=0              # Same as vc.line='solid'
+     vc.line=1              # Same as vc.line='dash'
+     vc.line=2              # Same as vc.line='dot'
+     vc.line=3              # Same as vc.line='dash-dot'
+     vc.line=4              # Same as vc.line='long-dot'
 
     Specify the line color of the vectors:
-     vc.linecolor=16   			# Color range: 16 to 230, default line color is black
-     vc.linewidth=1   			# Width range: 1 to 100, default size is 1
+     vc.linecolor=16            # Color range: 16 to 230, default line color is black
+     vc.linewidth=1             # Width range: 1 to 100, default size is 1
 
     Specify the vector scale factor:
-     vc.scale=2.0   			# Can be an integer or float
+     vc.scale=2.0               # Can be an integer or float
 
     Specify the vector alignment:
-     vc.alignment=0			# Same as vc.alignment='head'
-     vc.alignment=1			# Same as vc.alignment='center'
-     vc.alignment=2			# Same as vc.alignment='tail'
+     vc.alignment=0         # Same as vc.alignment='head'
+     vc.alignment=1         # Same as vc.alignment='center'
+     vc.alignment=2         # Same as vc.alignment='tail'
 
     Specify the vector type:
-      vc.type=0   			# Same as vc.type='arrow head'
-      vc.type=1   			# Same as vc.type='wind barbs'
-      vc.type=2   			# Same as vc.type='solid arrow head'
+      vc.type=0             # Same as vc.type='arrow head'
+      vc.type=1             # Same as vc.type='wind barbs'
+      vc.type=2             # Same as vc.type='solid arrow head'
 
     Specify the vector reference:
-      vc.reference=4    		# Can be an integer or float
+      vc.reference=4            # Can be an integer or float
 """
     __slots__ = [
         'name',
@@ -244,6 +244,9 @@ class Gv(object):
         'type',
         'reference',
         'colormap',
+        'scaleoptions',
+        'scaletype',
+        'scalerange',
         '_name',
         '_xaxisconvert',
         '_yaxisconvert',
@@ -270,9 +273,13 @@ class Gv(object):
         '_type',
         '_reference',
         '_colormap',
+        '_scaleoptions',
+        '_scaletype',
+        '_scalerange',
     ]
 
     colormap = VCS_validation_functions.colormap
+    scaleoptions = ('off', 'constant', 'normalize', 'linear', 'constantNNormalize', 'constantNLinear')
 
     def _getname(self):
         return self._name
@@ -528,6 +535,25 @@ class Gv(object):
         self._alignment = value
     alignment = property(_getalignment, _setalignment)
 
+
+    def _getscaletype(self):
+        return self._scaletype
+
+    def _setscaletype(self, value):
+        if value in self.scaleoptions:
+            self._scaletype = value
+        else:
+            raise ValueError('Invalid value '+ value + ' expected ' + self.scaleoptions)
+    scaletype = property(_getscaletype, _setscaletype)
+
+    def _getscalerange(self):
+        return self._scalerange
+
+    def _setscalerange(self, value):
+        self._scalerange = value
+    scalerange = property(_getscalerange, _setscalerange)
+
+
     def __init__(self, Gv_name, Gv_name_src='default'):
                 #                                                         #
                 ###########################################################
@@ -568,6 +594,8 @@ class Gv(object):
             self._datawc_timeunits = "days since 2000"
             self._datawc_calendar = 135441
             self._colormap = None
+            self._scaletype = self.scaleoptions[5]
+            self._scalerange = [0.1, 1.0]
         else:
             if isinstance(Gv_name_src, Gv):
                 Gv_name_src = Gv_name_src.name
@@ -583,7 +611,9 @@ class Gv(object):
                         'datawc_x2', 'xaxisconvert', 'yaxisconvert',
                         'line', 'linecolor', 'linewidth',
                         'datawc_timeunits', 'datawc_calendar', 'colormap',
-                        'scale', 'alignment', 'type', 'reference']:
+                        'scale', 'alignment', 'type', 'reference', 'scaletype',
+                        'scalerange']:
+
                 setattr(self, att, getattr(src, att))
         # Ok now we need to stick in the elements
         vcs.elements["vector"][Gv_name] = self
@@ -660,6 +690,8 @@ class Gv(object):
         print "alignment = ", self.alignment
         print "type = ", self.type
         print "reference = ", self.reference
+        print "scaletype = ", self.scaletype
+        print "scalerange = ", self.scalerange
 
     ##########################################################################
     #                                                                           #
@@ -798,6 +830,9 @@ class Gv(object):
             fp.write("%s.linecolor = %s\n" % (unique_name, self.linecolor))
             fp.write("%s.linewidth = %s\n" % (unique_name, self.linewidth))
             fp.write("%s.scale = %s\n" % (unique_name, self.scale))
+            fp.write("%s.scaletype = %s\n" % (unique_name, self.scaletype))
+            fp.write("%s.scalerange = %s\n" % (unique_name, self.scalerange))
+            fp.write("%s.scaleoptions = %s\n" % (unique_name, self.scaleoptions))
             fp.write("%s.alignment = '%s'\n" % (unique_name, self.alignment))
             fp.write("%s.type = '%s'\n" % (unique_name, self.type))
             fp.write("%s.reference = %s\n\n" % (unique_name, self.reference))
@@ -814,5 +849,5 @@ class Gv(object):
 
 
 ###############################################################################
-#        END OF FILE							      #
+#        END OF FILE                                  #
 ###############################################################################
-- 
GitLab


From e1d77e43c389af4f649e3399a5b346eea5f5845d Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 08:47:58 -0400
Subject: [PATCH 100/196] Added check for valid range

---
 Packages/vcs/vcs/vector.py | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index b48d74116..dd06f6b0b 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -550,6 +550,8 @@ class Gv(object):
         return self._scalerange
 
     def _setscalerange(self, value):
+        value = VCS_validation_functions.checkListOfNumbers(self, 'scalerange',
+                    value, minvalue=0.0, minelements=2, maxelements=2)
         self._scalerange = value
     scalerange = property(_getscalerange, _setscalerange)
 
@@ -594,7 +596,7 @@ class Gv(object):
             self._datawc_timeunits = "days since 2000"
             self._datawc_calendar = 135441
             self._colormap = None
-            self._scaletype = self.scaleoptions[5]
+            self._scaletype = self.scaleoptions[4]
             self._scalerange = [0.1, 1.0]
         else:
             if isinstance(Gv_name_src, Gv):
-- 
GitLab


From f00bea8901ba156b9ca388af9fdccadb55898f57 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 11:18:48 -0400
Subject: [PATCH 101/196] Added test for new scale options

---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py     |  6 +-
 testing/vcs/CMakeLists.txt                    | 10 ++++
 testing/vcs/test_vcs_vectors_scale_options.py | 59 +++++++++++++++++++
 3 files changed, 73 insertions(+), 2 deletions(-)
 create mode 100644 testing/vcs/test_vcs_vectors_scale_options.py

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index dd3b7ba04..b0f1cfd1e 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -118,8 +118,10 @@ class VectorPipeline(Pipeline2D):
             minNorm = None
             maxNorm = None
 
+            noOfComponents = vectors.GetNumberOfComponents()
+
             for i in range(0, vectors.GetNumberOfTuples()):
-                norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
+                norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
 
                 if (minNorm is None or norm < minNorm):
                     minNorm = norm
@@ -145,7 +147,7 @@ class VectorPipeline(Pipeline2D):
                 newRange = newRangeValues[1] - newRangeValues[0]
 
                 for i in range(0, vectors.GetNumberOfTuples()):
-                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i))
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
                     newValue = (((norm - minNorm) * newRange) / oldRange) + newRangeValues[0]
                     scalarArray.SetValue(i, newValue)
                     polydata.GetPointData().SetScalars(scalarArray)
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index cadb80cc9..e65b3ca43 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -904,6 +904,16 @@ cdat_add_test(test_vcs_settings_color_name_rgba
       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
       "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
       )
+    cdat_add_test(test_vcs_vectors_scale_options
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_off.png"
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_constant.png"
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_linear.png"
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_normalize.png"
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_constantNLinear.png"
+      "${BASELINE_DIR}/test_vcs_vector_scale_options_constantNNormalize.png"
+      )
   endif()
 endif()
 
diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
new file mode 100644
index 000000000..5dff25d3f
--- /dev/null
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -0,0 +1,59 @@
+import sys, cdms2, vcs, testing.regression as regression
+
+data = cdms2.open(vcs.sample_data+"/clt.nc")
+v = data['v'][...,::10,::10]
+u = data['u'][...,::10,::10]
+
+canvas = regression.init()
+gv = vcs.createvector()
+
+gv.scaletype = 'off'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_off.png'
+canvas.png(outFilename)
+ret = regression.check_result_image(outFilename, sys.argv[1])
+canvas.clear()
+
+v = data['v'][...,::4,::4]
+u = data['u'][...,::4,::4]
+gv.scaletype = 'constant'
+gv.scale = 0.1
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_constant.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[2])
+canvas.clear()
+
+v = data['v']
+u = data['u']
+gv.scale = 1.0
+
+gv.scaletype = 'normalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_normalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[2])
+canvas.clear()
+
+gv.scaletype = 'linear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_linear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[3])
+canvas.clear()
+
+gv.scaletype = 'constantNNormalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_constantNNormalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[4])
+canvas.clear()
+
+gv.scaletype = 'constantNLinear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_constantNLinear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[5])
+canvas.clear()
+
+sys.ecanvasit(ret)
-- 
GitLab


From 9404c34157ededc830a4287d35e6082e55edf6bf Mon Sep 17 00:00:00 2001
From: Bryce Sampson <sampson.bryce@yahoo.com>
Date: Wed, 25 May 2016 13:58:38 -0700
Subject: [PATCH 102/196] Added missing graphics method types to
 creategraphicsmethod

---
 Packages/vcs/vcs/utils.py | 14 ++++++++++----
 1 file changed, 10 insertions(+), 4 deletions(-)

diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 6b94f4352..e69466eab 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -1629,7 +1629,7 @@ def getgraphicsmethod(type, name):
     return copy_mthd
 
 
-def creategraphicsmethod(gtype, name):
+def creategraphicsmethod(gtype, gname='default', name=None):
     if gtype in ['isoline', 'Gi']:
         func = vcs.createisoline
     elif gtype in ['isofill', 'Gfi']:
@@ -1652,11 +1652,17 @@ def creategraphicsmethod(gtype, name):
         func = vcs.createvector
     elif gtype in ['taylordiagram', 'Gtd']:
         func = vcs.createtaylordiagram
-    elif isinstance(type, vcsaddons.core.VCSaddon):
-        func = type.creategm
+    elif gtype == '3d_scalar':
+        func = vcs.create3d_scalar
+    elif gtype == '3d_dual_scalar':
+        func = vcs.create3d_dual_scalar
+    elif gtype == '3d_vector':
+        func = vcs.create3d_vector
+    elif isinstance(gtype, vcsaddons.core.VCSaddon):
+        func = gtype.creategm
     else:
         return None
-    copy_mthd = func(source=name)
+    copy_mthd = func(name=name, source=gname)
     return copy_mthd
 
 
-- 
GitLab


From db669a6b5af83c129e3e1307b9cdd920ce9ab8a2 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 20:55:14 -0400
Subject: [PATCH 103/196] Using vtkDataArray GetMaxNorm for performance

---
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 28 +++++++++++++----------
 Packages/vcs/vcs/vector.py                |  3 ++-
 2 files changed, 18 insertions(+), 13 deletions(-)

diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index b0f1cfd1e..896e32a8e 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -115,18 +115,7 @@ class VectorPipeline(Pipeline2D):
            self._gm.scaletype == 'constantNNormalize' or self._gm.scaletype == 'constantNLinear':
 
             # Find the min and max vector magnitudes
-            minNorm = None
-            maxNorm = None
-
-            noOfComponents = vectors.GetNumberOfComponents()
-
-            for i in range(0, vectors.GetNumberOfTuples()):
-                norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
-
-                if (minNorm is None or norm < minNorm):
-                    minNorm = norm
-                if (maxNorm is None or norm > maxNorm):
-                    maxNorm = norm
+            maxNorm = vectors.GetMaxNorm()
 
             if maxNorm == 0:
                 maxNorm = 1.0
@@ -135,6 +124,21 @@ class VectorPipeline(Pipeline2D):
                 scaleFactor /= maxNorm
 
             if self._gm.scaletype == 'linear' or self._gm.scaletype == 'constantNLinear':
+                minNorm = None
+                maxNorm = None
+
+                noOfComponents = vectors.GetNumberOfComponents()
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
+
+                    if (minNorm is None or norm < minNorm):
+                        minNorm = norm
+                    if (maxNorm is None or norm > maxNorm):
+                        maxNorm = norm
+
+                if maxNorm == 0:
+                    maxNorm = 1.0
+
                 scalarArray = vtk.vtkDoubleArray()
                 scalarArray.SetNumberOfComponents(1)
                 scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index dd06f6b0b..5d65fb4ad 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -543,7 +543,8 @@ class Gv(object):
         if value in self.scaleoptions:
             self._scaletype = value
         else:
-            raise ValueError('Invalid value '+ value + ' expected ' + self.scaleoptions)
+            VCS_validation_functions.checkedRaise(self, value, ValueError,
+                'Invalid value '+ value + '. Valid options are: ' + ','.join(self.scaleoptions))
     scaletype = property(_getscaletype, _setscaletype)
 
     def _getscalerange(self):
-- 
GitLab


From 06bc68d0c4ec89b16463f8c977acd91dfddbf1ac Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 20:57:42 -0400
Subject: [PATCH 104/196] Fixed typo in the test

---
 testing/vcs/test_vcs_vectors_scale_options.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
index 5dff25d3f..a4574bfdd 100644
--- a/testing/vcs/test_vcs_vectors_scale_options.py
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -56,4 +56,4 @@ canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[5])
 canvas.clear()
 
-sys.ecanvasit(ret)
+sys.exit(ret)
-- 
GitLab


From bd610acc966443e5d227694f0a553843fa08e636 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 22:08:59 -0400
Subject: [PATCH 105/196] Added method to check for valid option

---
 Packages/vcs/vcs/VCS_validation_functions.py | 12 ++++++++++++
 Packages/vcs/vcs/vector.py                   |  8 +++-----
 2 files changed, 15 insertions(+), 5 deletions(-)

diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index 7595d900e..db848b968 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -243,6 +243,18 @@ def checkListOfNumbers(self, name, value, minvalue=None,
     return list(value)
 
 
+def checkValidOption(self, name, value, options):
+    checkName(self, name, value)
+    if value not in options:
+        VCS_validation_functions.checkedRaise(
+            self,
+            value,
+            ValueError,
+            'Invalid value '+ value + '. Valid options are: ' +
+            ','.join(self.scaleoptions))
+    return value
+
+
 def checkFont(self, name, value):
     if (value is None):
         pass
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index 5d65fb4ad..db681cc78 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -540,11 +540,9 @@ class Gv(object):
         return self._scaletype
 
     def _setscaletype(self, value):
-        if value in self.scaleoptions:
-            self._scaletype = value
-        else:
-            VCS_validation_functions.checkedRaise(self, value, ValueError,
-                'Invalid value '+ value + '. Valid options are: ' + ','.join(self.scaleoptions))
+        value = VCS_validation_functions.checkValidOption(self, 'scaletype',
+                    value, self.scaleoptions)
+        self._scaletype = value
     scaletype = property(_getscaletype, _setscaletype)
 
     def _getscalerange(self):
-- 
GitLab


From b69d09d59a2d07853c5f4f2fe6987610d141b07a Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 25 May 2016 22:20:47 -0400
Subject: [PATCH 106/196] Fixed using wrong baseline image

---
 testing/vcs/test_vcs_vectors_scale_options.py | 18 +++++++++---------
 1 file changed, 9 insertions(+), 9 deletions(-)

diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
index a4574bfdd..ce48b63c1 100644
--- a/testing/vcs/test_vcs_vectors_scale_options.py
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -28,13 +28,6 @@ v = data['v']
 u = data['u']
 gv.scale = 1.0
 
-gv.scaletype = 'normalize'
-canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_normalize.png'
-canvas.png(outFilename)
-ret += regression.check_result_image(outFilename, sys.argv[2])
-canvas.clear()
-
 gv.scaletype = 'linear'
 canvas.plot(u, v, gv)
 outFilename = 'test_vcs_vector_scale_options_linear.png'
@@ -42,9 +35,9 @@ canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[3])
 canvas.clear()
 
-gv.scaletype = 'constantNNormalize'
+gv.scaletype = 'normalize'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_constantNNormalize.png'
+outFilename = 'test_vcs_vector_scale_options_normalize.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[4])
 canvas.clear()
@@ -56,4 +49,11 @@ canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[5])
 canvas.clear()
 
+gv.scaletype = 'constantNNormalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vector_scale_options_constantNNormalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[6])
+canvas.clear()
+
 sys.exit(ret)
-- 
GitLab


From eb982112ec45b2bd78986fb405646f9cf8b24b05 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 26 May 2016 00:18:59 -0400
Subject: [PATCH 107/196] Added option to export text as object or path

---
 Packages/vcs/vcs/Canvas.py   | 24 ++++++++++++------------
 Packages/vcs/vcs/VTKPlots.py | 23 +++++++++++++++--------
 2 files changed, 27 insertions(+), 20 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 49361248c..c8712580c 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -2313,7 +2313,7 @@ Options:::
     Other:
        [x|y]rev         = 0|1                         # if ==1, reverse the direction of the x
                                                              or y axis
-       continents	= 0,1,2,3,4,5,6,7,8,9,10,11   #	if >=1, plot continental outlines
+       continents   = 0,1,2,3,4,5,6,7,8,9,10,11   # if >=1, plot continental outlines
                                                              (default: plot if xaxis is
                                                              longitude, yaxis is latitude -or-
                                                              xname is 'longitude' and yname is
@@ -3939,7 +3939,7 @@ Options:::
 
     ##########################################################################
     #                                                                           #
-    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).      		#
+    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).            #
     #                                                                           #
     ##########################################################################
     def destroy(self):
@@ -4680,7 +4680,7 @@ Options:::
     # pdf wrapper for VCS.                                               #
     #                                                                           #
     ##########################################################################
-    def pdf(self, file, width=None, height=None, units='inches'):
+    def pdf(self, file, width=None, height=None, units='inches', textAsObject=True):
         """
  Function: postscript
 
@@ -4704,14 +4704,14 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['pdf']:
             file += '.pdf'
-        return self.backend.pdf(file, W, H)
+        return self.backend.pdf(file, W, H, textAsObject)
     ##########################################################################
-    #                                                                           #
-    # SVG wrapper for VCS.                                               #
-    #                                                                           #
+    #                                                                        #
+    # SVG wrapper for VCS.                                                   #
+    #                                                                        #
     ##########################################################################
 
-    def svg(self, file, width=None, height=None, units='inches'):
+    def svg(self, file, width=None, height=None, units='inches', textAsObject=True):
         """
  Function: postscript
 
@@ -4735,7 +4735,7 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['svg']:
             file += '.svg'
-        return self.backend.svg(file, W, H)
+        return self.backend.svg(file, W, H, textAsObject)
 
     def _compute_margins(
             self, W, H, top_margin, bottom_margin, right_margin, left_margin, dpi):
@@ -4911,7 +4911,7 @@ Options:::
         return W, H
 
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
-                   units='inches'):
+                   units='inches', textAsObject=True):
         """
  Function: postscript
 
@@ -4950,7 +4950,7 @@ Options:::
         if not file.split('.')[-1].lower() in ['ps', 'eps']:
             file += '.ps'
         if mode == 'r':
-            return self.backend.postscript(file, W, H, units="pixels")
+            return self.backend.postscript(file, W, H, units="pixels", textAsObject=textAsObject)
         else:
             n = random.randint(0, 10000000000000)
             psnm = '/tmp/' + '__VCS__tmp__' + str(n) + '.ps'
@@ -5209,7 +5209,7 @@ Options:::
 
     ##########################################################################
     #                                                                        #
-    # Set continents type wrapper for VCS.                           		 #
+    # Set continents type wrapper for VCS.                                   #
     #                                                                        #
     ##########################################################################
     def setcontinentstype(self, value):
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 814719536..b6b49ec1b 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1064,7 +1064,7 @@ class VTKVCSBackend(object):
         return plot
 
     def vectorGraphics(
-            self, output_type, file, width=None, height=None, units=None):
+            self, output_type, file, width=None, height=None, units=None, textAsObject=True):
         if self.renWin is None:
             raise Exception("Nothing on Canvas to dump to file")
 
@@ -1095,7 +1095,11 @@ class VTKVCSBackend(object):
         gl.SetInput(self.renWin)
         gl.SetCompress(0)  # Do not compress
         gl.SetFilePrefix(".".join(file.split(".")[:-1]))
-        gl.TextAsPathOn()
+
+        if textAsObject:
+            gl.TextAsPathOff()
+        else:
+            gl.TextAsPathOn()
         if output_type == "svg":
             gl.SetFileFormatToSVG()
         elif output_type == "ps":
@@ -1112,14 +1116,17 @@ class VTKVCSBackend(object):
         self.showGUI()
 
     def postscript(self, file, width=None, height=None,
-                   units=None):
-        return self.vectorGraphics("ps", file, width, height, units)
+                   units=None, textAsObject=True):
+        return self.vectorGraphics("ps", file, width, height,
+                                    units, textAsObject)
 
-    def pdf(self, file, width=None, height=None, units=None):
-        return self.vectorGraphics("pdf", file, width, height, units)
+    def pdf(self, file, width=None, height=None, units=None, textAsObject=True):
+        return self.vectorGraphics("pdf", file, width, height,
+                                    units, textAsObject)
 
-    def svg(self, file, width=None, height=None, units=None):
-        return self.vectorGraphics("svg", file, width, height, units)
+    def svg(self, file, width=None, height=None, units=None, textAsObject=True):
+        return self.vectorGraphics("svg", file, width,
+                                    height, units, textAsObject)
 
     def gif(self, filename='noname.gif', merge='r', orientation=None,
             geometry='1600x1200'):
-- 
GitLab


From 0b2065ad031cf1e0b9c4efc1006273a65647d1f6 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Thu, 26 May 2016 09:51:16 -0400
Subject: [PATCH 108/196] Fixed comment style

---
 Packages/vcs/vcs/Canvas.py | 481 +++++++++++++++++++------------------
 1 file changed, 241 insertions(+), 240 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index c8712580c..38ca2a013 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -18,7 +18,7 @@
 #               landscape (width exceeding height), portrait (height exceeding#
 #               width), or full-screen mode.                                  #
 #                                                                             #
-# Version: 2.4                                                          #
+# Version: 2.4                                                                #
 #                                                                             #
 ###############################################################################
 
@@ -322,9 +322,9 @@ class Canvas(object):
     a=vcs.Canvas()                    # This examples constructs a VCS Canvas
 """
     ##########################################################################
-    #                                                                           #
-    # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode).          #
-    #                                                                           #
+    #                                                                        #
+    # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode).       #
+    #                                                                        #
     ##########################################################################
     __slots__ = [
         '_mode',
@@ -835,9 +835,9 @@ class Canvas(object):
         return tv
 
     ##########################################################################
-    #                                                                           #
-    # Print out the object's doc string.                                        #
-    #                                                                           #
+    #                                                                        #
+    # Print out the object's doc string.                                     #
+    #                                                                        #
     ##########################################################################
     def objecthelp(self, *arg):
         """
@@ -855,13 +855,13 @@ class Canvas(object):
         for x in arg:
             print getattr(x, "__doc__", "")
 
-    ##########################################################################
-    #                                                                           #
-    # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode  #
-    # is set to 0, the user will have to manually update the VCS Canvas by      #
-    # using the "update" function.                                              #
-    #                                                                           #
-    ##########################################################################
+    ############################################################################
+    #                                                                          #
+    # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode #
+    # is set to 0, the user will have to manually update the VCS Canvas by     #
+    # using the "update" function.                                             #
+    #                                                                          #
+    ############################################################################
     def __init__(self, mode=1, pause_time=0,
                  call_from_gui=0, size=None, backend="vtk", geometry=None, bg=None):
         self._canvas_id = vcs.next_canvas_id
@@ -964,12 +964,12 @@ class Canvas(object):
 
 # Initial.attributes is being called in main.c, so it is not needed here!
 # Actually it is for taylordiagram graphic methods....
-##########################################################################
+###########################################################################################
 #  Okay, then this is redundant since it is done in main.c. When time perments, put the   #
 #  taylordiagram graphic methods attributes in main.c Because this is here we must check  #
 #  to make sure that the initial attributes file is called only once for normalization    #
 #  purposes....                                                                           #
-##########################################################################
+###########################################################################################
 
         self.canvas_template_editor = None
         self.ratio = '0'
@@ -1013,11 +1013,11 @@ class Canvas(object):
     def initLogoDrawing(self):
         self.drawLogo = self.enableLogo
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Update wrapper function for VCS.                                          #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
 
     def update(self, *args, **kargs):
         """
@@ -1046,11 +1046,11 @@ class Canvas(object):
 
         return self.backend.update(*args, **kargs)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Update wrapper function for VCS with a check to update the continents.    #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def _update_continents_check(self, *args):
 
         a = self.canvas.updatecanvas_continents(*args)
@@ -1060,11 +1060,11 @@ class Canvas(object):
 
         return a
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Script VCS primary or secondary elements wrapper functions for VCS.       #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def scriptobject(self, obj, script_filename=None, mode=None):
         """
  Function: scriptobject       # Script a single primary or secondary class object
@@ -1139,11 +1139,11 @@ class Canvas(object):
         else:
             print 'This is not a template, graphics method or secondary method object.'
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Remove VCS primary and secondary methods wrapper functions for VCS.       #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
 
     def removeobject(self, obj):
         __doc__ = vcs.removeobject.__doc__  # noqa
@@ -1193,11 +1193,11 @@ class Canvas(object):
     def check_name_source(self, name, source, typ):
         return vcs.check_name_source(name, source, typ)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Template functions for VCS.                                               #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createtemplate(self, name=None, source='default'):
         return vcs.createtemplate(name, source)
     createtemplate.__doc__ = vcs.manageElements.createtemplate.__doc__
@@ -1206,11 +1206,11 @@ class Canvas(object):
         return vcs.gettemplate(Pt_name_src)
     gettemplate.__doc__ = vcs.manageElements.gettemplate.__doc__
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Projection functions for VCS.                                             #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createprojection(self, name=None, source='default'):
         return vcs.createprojection(name, source)
     createprojection.__doc__ = vcs.manageElements.createprojection.__doc__
@@ -1219,11 +1219,11 @@ class Canvas(object):
         return vcs.getprojection(Proj_name_src)
     getprojection.__doc__ = vcs.manageElements.getprojection.__doc__
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Boxfill functions for VCS.                                                #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createboxfill(self, name=None, source='default'):
         return vcs.createboxfill(name, source)
     createboxfill.__doc__ = vcs.manageElements.createboxfill.__doc__
@@ -1282,11 +1282,11 @@ Options:::
     boxfill.__doc__ = boxfill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Taylordiagram functions for VCS.                                          #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createtaylordiagram(self, name=None, source='default'):
         return vcs.createtaylordiagram(name, source)
     createtaylordiagram.__doc__ = vcs.manageElements.createtaylordiagram.__doc__
@@ -1317,11 +1317,11 @@ Options:::
         arglist = _determine_arg_list('taylordiagram', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Meshfill functions for VCS.                                               #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
 
     def createmeshfill(self, name=None, source='default'):
         return vcs.createmeshfill(name, source)
@@ -1368,11 +1368,11 @@ Options:::
         arglist = _determine_arg_list('meshfill', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
-    # DV3D functions for VCS.                                                #
+    # DV3D functions for VCS.                                                   #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
 
     def create3d_scalar(self, name=None, source='default'):
         return vcs.create3d_scalar(name, source)
@@ -1413,11 +1413,11 @@ Options:::
         arglist = _determine_arg_list('3d_dual_scalar', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Isofill functions for VCS.                                                #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createisofill(self, name=None, source='default'):
         return vcs.createisofill(name, source)
     createisofill.__doc__ = vcs.manageElements.createisofill.__doc__
@@ -1469,11 +1469,11 @@ Options:::
     isofill.__doc__ = isofill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Isoline functions for VCS.                                                #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createisoline(self, name=None, source='default'):
         return vcs.createisoline(name, source)
     createisoline.__doc__ = vcs.manageElements.createisoline.__doc__
@@ -1533,11 +1533,11 @@ Options:::
         return vcs.get1d(name)
     create1d.__doc__ = vcs.manageElements.create1d.__doc__
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Xyvsy functions for VCS.                                                  #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createxyvsy(self, name=None, source='default'):
         return vcs.createxyvsy(name, source)
     createxyvsy.__doc__ = vcs.manageElements.createxyvsy.__doc__
@@ -1589,11 +1589,11 @@ Options:::
     xyvsy.__doc__ = xyvsy.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Yxvsx functions for VCS.                                                  #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createyxvsx(self, name=None, source='default'):
         return vcs.createyxvsx(name, source)
     createyxvsx.__doc__ = vcs.manageElements.createyxvsx.__doc__
@@ -1645,11 +1645,11 @@ Options:::
     yxvsx.__doc__ = yxvsx.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # XvsY functions for VCS.                                                   #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createxvsy(self, name=None, source='default'):
         return vcs.createxvsy(name, source)
     createxvsy.__doc__ = vcs.manageElements.createxvsy.__doc__
@@ -1702,11 +1702,11 @@ Options:::
                                    plot_2_1D_input,
                                    plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Vector functions for VCS.                                                 #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createvector(self, name=None, source='default'):
         return vcs.createvector(name, source)
     createvector.__doc__ = vcs.manageElements.createvector.__doc__
@@ -1737,11 +1737,11 @@ Options:::
         arglist = _determine_arg_list('vector', args)
         return self.__plot(arglist, parms)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Scatter functions for VCS.                                                #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createscatter(self, name=None, source='default'):
         return vcs.createscatter(name, source)
     createscatter.__doc__ = vcs.manageElements.createscatter.__doc__
@@ -1792,11 +1792,11 @@ Options:::
     scatter.__doc__ = scatter.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2_1D_input, plot_output)
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Line  functions for VCS.                                                  #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createline(self, name=None, source='default', ltype=None,  # noqa
                    width=None, color=None, priority=None,
                    viewport=None, worldcoordinate=None,
@@ -1876,11 +1876,11 @@ Options:::
 
         return ln
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Marker  functions for VCS.                                                #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createmarker(self, name=None, source='default', mtype=None,  # noqa
                      size=None, color=None, priority=1,
                      viewport=None, worldcoordinate=None,
@@ -1959,11 +1959,11 @@ Options:::
 
         return mrk
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Fillarea  functions for VCS.                                              #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createfillarea(self, name=None, source='default', style=None,
                        index=None, color=None, priority=1,
                        viewport=None, worldcoordinate=None,
@@ -2044,11 +2044,11 @@ Options:::
 
         return fa
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Text Table  functions for VCS.                                            #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createtexttable(self, name=None, source='default', font=None,
                         spacing=None, expansion=None, color=None, priority=None,
                         viewport=None, worldcoordinate=None,
@@ -2066,11 +2066,11 @@ Options:::
                                 viewport, worldcoordinate, x, y)
     gettexttable.__doc__ = vcs.manageElements.gettexttable.__doc__
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Text Orientation  functions for VCS.                                      #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createtextorientation(self, name=None, source='default'):
         return vcs.createtextorientation(name, source)
     createtextorientation.__doc__ = vcs.manageElements.createtextorientation.__doc__
@@ -2079,11 +2079,11 @@ Options:::
         return vcs.gettextorientation(To_name_src)
     gettextorientation.__doc__ = vcs.manageElements.gettextorientation.__doc__
 
-    ##########################################################################
+    #############################################################################
     #                                                                           #
     # Text Combined  functions for VCS.                                         #
     #                                                                           #
-    ##########################################################################
+    #############################################################################
     def createtextcombined(self, Tt_name=None, Tt_source='default', To_name=None, To_source='default',  # noqa
                            font=None, spacing=None, expansion=None, color=None,
                            priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
@@ -3802,33 +3802,33 @@ Options:::
         self.backend.setAnimationStepper(stepper)
 
     ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to return the number of displays that are "ON".       #
-    #                                                                           #
+    #                                                                        #
+    # VCS utility wrapper to return the number of displays that are "ON".    #
+    #                                                                        #
     ##########################################################################
     def return_display_ON_num(self, *args):
         return self.canvas.return_display_ON_num(*args)
 
     ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to return the current display names.                  #
-    #                                                                           #
+    #                                                                        #
+    # VCS utility wrapper to return the current display names.               #
+    #                                                                        #
     ##########################################################################
     def return_display_names(self, *args):
         return self.display_names
 
     ##########################################################################
-    #                                                                           #
-    # VCS utility wrapper to remove the display names.                          #
-    #                                                                           #
+    #                                                                        #
+    # VCS utility wrapper to remove the display names.                       #
+    #                                                                        #
     ##########################################################################
     def remove_display_name(self, *args):
         return self.canvas.remove_display_name(*args)
 
     ##########################################################################
-    #                                                                           #
-    # CGM  wrapper for VCS.                                                     #
-    #                                                                           #
+    #                                                                        #
+    # CGM  wrapper for VCS.                                                  #
+    #                                                                        #
     ##########################################################################
     def cgm(self, file, mode='w'):
         """
@@ -3861,9 +3861,9 @@ Options:::
         return self.backend.cgm(file)
 
     ##########################################################################
-    #                                                                           #
-    # Clear VCS Canvas wrapper for VCS.                                         #
-    #                                                                           #
+    #                                                                        #
+    # Clear VCS Canvas wrapper for VCS.                                      #
+    #                                                                        #
     ##########################################################################
     def clear(self, *args, **kargs):
         """
@@ -3912,9 +3912,9 @@ Options:::
         return
 
     ##########################################################################
-    #                                                                           #
-    # Close VCS Canvas wrapper for VCS.                                         #
-    #                                                                           #
+    #                                                                        #
+    # Close VCS Canvas wrapper for VCS.                                      #
+    #                                                                        #
     ##########################################################################
     def close(self, *args, **kargs):
         """
@@ -3938,9 +3938,9 @@ Options:::
         return a
 
     ##########################################################################
-    #                                                                           #
-    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).            #
-    #                                                                           #
+    #                                                                        #
+    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).             #
+    #                                                                        #
     ##########################################################################
     def destroy(self):
         """
@@ -3962,9 +3962,9 @@ Options:::
         gc.collect()
 
     ##########################################################################
-    #                                                                           #
-    # Graphics Method Change display.                                           #
-    #                                                                           #
+    #                                                                        #
+    # Graphics Method Change display.                                        #
+    #                                                                        #
     ##########################################################################
     def change_display_graphic_method(self, display, type, name):
         '''
@@ -3977,9 +3977,9 @@ Options:::
         return self.canvas.change_display_graphic_method(
             *(display, type, name))
     ##########################################################################
-    #                                                                           #
-    # Figures out which display is selected in graphic method editor mode       #
-    #                                                                           #
+    #                                                                        #
+    # Figures out which display is selected in graphic method editor mode    #
+    #                                                                        #
     ##########################################################################
 
     def get_selected_display(self):
@@ -3990,9 +3990,9 @@ Options:::
         return self.canvas.get_selected_display(*())
 
     ##########################################################################
-    #                                                                           #
-    # Send a request to turn on a picture template object in the VCS Canvas.    #
-    #                                                                           #
+    #                                                                        #
+    # Send a request to turn on a picture template object in the VCS Canvas. #
+    #                                                                        #
     ##########################################################################
     def _select_one(self, template_name, attr_name, X1, X2, Y1, Y2):
         # flush and block the X main loop
@@ -4000,19 +4000,19 @@ Options:::
         self.canvas._select_one(template_name, attr_name, X1, X2, Y1, Y2)
 
     ##########################################################################
-    #                                                                           #
-    # Send a request to turn off a picture template object in the VCS Canvas.   #
-    #                                                                           #
+    #                                                                        #
+    # Send a request to turn off a picture template object in the VCS Canvas.#
+    #                                                                        #
     ##########################################################################
     def _unselect_one(self, template_name, attr_name, X1, X2, Y1, Y2):
 
         self.canvas._unselect_one(template_name, attr_name, X1, X2, Y1, Y2)
 
     ##########################################################################
-    #                                                                           #
-    # Set the template editor event flag to select all template objects on the  #
-    # VCS Canvas.                                                               #
-    #                                                                           #
+    #                                                                        #
+    # Set the template editor event flag to select all template objects on   #
+    # the VCS Canvas.                                                        #
+    #                                                                        #
     ##########################################################################
     def _select_all(self):
         # flush and block the X main loop
@@ -4020,10 +4020,10 @@ Options:::
         self.canvas._select_all()
 
     ##########################################################################
-    #                                                                           #
-    # Set the template editor event flag to unselect all the template objects   #
-    # on the VCS Canvas.                                                        #
-    #                                                                           #
+    #                                                                        #
+    # Set the template editor event flag to unselect all the template        #
+    # objects on the VCS Canvas.                                             #
+    #                                                                        #
     ##########################################################################
     def _unselect_all(self):
         # flush and block the X main loop
@@ -4031,57 +4031,57 @@ Options:::
         self.canvas._unselect_all()
 
     ##########################################################################
-    #                                                                           #
-    # Set the template editor mode for the VCS Canvas screen.                   #
-    #                                                                           #
+    #                                                                        #
+    # Set the template editor mode for the VCS Canvas screen.                #
+    #                                                                        #
     ##########################################################################
     def _SCREEN_TEMPLATE_FLAG(self):
         self.canvas.SCREEN_TEMPLATE_FLAG()
 
     ##########################################################################
-    #                                                                           #
-    # Set the graphic method editor mode for the VCS Canvas screen.                   #
-    #                                                                           #
+    #                                                                        #
+    # Set the graphic method editor mode for the VCS Canvas screen.          #
+    #                                                                        #
     ##########################################################################
     def _SCREEN_GM_FLAG(self):
         self.canvas.SCREEN_GM_FLAG()
 
     ##########################################################################
-    #                                                                           #
-    # Set the data mode for the VCS Canvas screen.                              #
-    #                                                                           #
+    #                                                                        #
+    # Set the data mode for the VCS Canvas screen.                           #
+    #                                                                        #
     ##########################################################################
     def _SCREEN_DATA_FLAG(self):
         self.canvas.SCREEN_DATA_FLAG()
 
     ##########################################################################
-    #                                                                           #
-    # Set the screen check mode to DATA for the VCS Canvas.                     #
-    #                                                                           #
+    #                                                                        #
+    # Set the screen check mode to DATA for the VCS Canvas.                  #
+    #                                                                        #
     ##########################################################################
     def _SCREEN_CHECKMODE_DATA_FLAG(self):
         self.canvas.SCREEN_CHECKMODE_DATA_FLAG()
 
     ##########################################################################
-    #                                                                           #
-    # Return the Screen mode, either data mode or template editor mode.         #
-    #                                                                           #
+    #                                                                        #
+    # Return the Screen mode, either data mode or template editor mode.      #
+    #                                                                        #
     ##########################################################################
     def SCREEN_MODE(self, *args):
         return self.canvas.SCREEN_MODE(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Return the Screen mode, either data mode or template editor mode.         #
-    #                                                                           #
+    #                                                                        #
+    # Return the Screen mode, either data mode or template editor mode.      #
+    #                                                                        #
     ##########################################################################
     def plot_annotation(self, *args):
         self.canvas.plot_annotation(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Flush X event que wrapper for VCS.                                        #
-    #                                                                           #
+    #                                                                        #
+    # Flush X event que wrapper for VCS.                                     #
+    #                                                                        #
     ##########################################################################
     def flush(self, *args):
         """
@@ -4099,9 +4099,9 @@ Options:::
         return self.backend.flush(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Geometry wrapper for VCS.                                                 #
-    #                                                                           #
+    #                                                                        #
+    # Geometry wrapper for VCS.                                              #
+    #                                                                        #
     ##########################################################################
     def geometry(self, *args):
         """
@@ -4129,9 +4129,9 @@ Options:::
         return a
 
     ##########################################################################
-    #                                                                           #
-    # VCS Canvas Information wrapper.                                           #
-    #                                                                           #
+    #                                                                        #
+    # VCS Canvas Information wrapper.                                        #
+    #                                                                        #
     ##########################################################################
     def canvasinfo(self, *args, **kargs):
         """
@@ -4149,9 +4149,9 @@ Options:::
         return self.backend.canvasinfo(*args, **kargs)
 
     ##########################################################################
-    #                                                                           #
-    # Get continents type wrapper for VCS.                                      #
-    #                                                                           #
+    #                                                                        #
+    # Get continents type wrapper for VCS.                                   #
+    #                                                                        #
     ##########################################################################
     def getcontinentstype(self, *args):
         """
@@ -4219,9 +4219,9 @@ Options:::
         return
 
     ##########################################################################
-    #                                                                           #
-    # Grid wrapper for VCS.                                                     #
-    #                                                                           #
+    #                                                                        #
+    # Grid wrapper for VCS.                                                  #
+    #                                                                        #
     ##########################################################################
     def grid(self, *args):
         """
@@ -4243,9 +4243,9 @@ Options:::
         return p
 
     ##########################################################################
-    #                                                                           #
-    # Landscape VCS Canvas orientation wrapper for VCS.                         #
-    #                                                                           #
+    #                                                                        #
+    # Landscape VCS Canvas orientation wrapper for VCS.                      #
+    #                                                                        #
     ##########################################################################
     def landscape(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
@@ -4305,9 +4305,9 @@ Options:::
         return l
 
     ##########################################################################
-    #                                                                           #
-    # List Primary and Secondary elements wrapper for VCS.                      #
-    #                                                                           #
+    #                                                                        #
+    # List Primary and Secondary elements wrapper for VCS.                   #
+    #                                                                        #
     ##########################################################################
     def listelements(self, *args):
         """
@@ -4331,9 +4331,9 @@ Options:::
         return L
 
     ##########################################################################
-    #                                                                           #
-    # update VCS's Canvas orientation wrapper for VCS.                          #
-    #                                                                           #
+    #                                                                        #
+    # update VCS's Canvas orientation wrapper for VCS.                       #
+    #                                                                        #
     ##########################################################################
     def updateorientation(self, *args):
         """
@@ -4347,9 +4347,9 @@ Options:::
         return a
 
     ##########################################################################
-    #                                                                           #
-    # Open VCS Canvas wrapper for VCS.                                          #
-    #                                                                           #
+    #                                                                        #
+    # Open VCS Canvas wrapper for VCS.                                       #
+    #                                                                        #
     ##########################################################################
     def open(self, width=None, height=None, **kargs):
         """
@@ -4370,9 +4370,9 @@ Options:::
         return a
 
     ##########################################################################
-    #                                                                           #
-    # Return VCS Canvas ID.                                                     #
-    #                                                                           #
+    #                                                                        #
+    # Return VCS Canvas ID.                                                  #
+    #                                                                        #
     ##########################################################################
     def canvasid(self, *args):
         '''
@@ -4390,17 +4390,18 @@ Options:::
         return self._canvas_id
 
     ##########################################################################
-    #                                                                           #
-    # Connect the VCS Canvas to the GUI.                                        #
-    #                                                                           #
+    #                                                                        #
+    # Connect the VCS Canvas to the GUI.                                     #
+    #                                                                        #
     ##########################################################################
     def _connect_gui_and_canvas(self, *args):
         return self.canvas.connect_gui_and_canvas(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for VCS.  #
-    #                                                                           #
+    #                                                                        #
+    # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for    #
+    # VCS.                                                                   #
+    #                                                                        #
     ##########################################################################
     def page(self, *args):
         """
@@ -4424,9 +4425,9 @@ Options:::
         return l
 
     ##########################################################################
-    #                                                                           #
-    # Portrait VCS Canvas orientation wrapper for VCS.                          #
-    #                                                                           #
+    #                                                                        #
+    # Portrait VCS Canvas orientation wrapper for VCS.                       #
+    #                                                                        #
     ##########################################################################
     def portrait(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
@@ -4676,9 +4677,9 @@ Options:::
             file, W, H, units, draw_white_background, **args)
 
     ##########################################################################
-    #                                                                           #
-    # pdf wrapper for VCS.                                               #
-    #                                                                           #
+    #                                                                        #
+    # pdf wrapper for VCS.                                                   #
+    #                                                                        #
     ##########################################################################
     def pdf(self, file, width=None, height=None, units='inches', textAsObject=True):
         """
@@ -4967,9 +4968,9 @@ Options:::
                 shutil.move(psnm, file)
 
     ##########################################################################
-    #                                                                           #
-    # Showbg wrapper for VCS.                                                   #
-    #                                                                           #
+    #                                                                        #
+    # Showbg wrapper for VCS.                                                #
+    #                                                                        #
     ##########################################################################
     def showbg(self, *args):
         """
@@ -4992,9 +4993,9 @@ Options:::
         return a
 
     ##########################################################################
-    #                                                                           #
-    # Backing Store wrapper for VCS.                                            #
-    #                                                                           #
+    #                                                                        #
+    # Backing Store wrapper for VCS.                                         #
+    #                                                                        #
     ##########################################################################
     def backing_store(self, *args):
         """
@@ -5010,25 +5011,25 @@ Options:::
         return self.canvas.backing_store(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Update the animation slab. Used only for the VCS Canvas GUI.              #
-    #                                                                           #
+    #                                                                        #
+    # Update the animation slab. Used only for the VCS Canvas GUI.           #
+    #                                                                        #
     ##########################################################################
     def update_animation_data(self, *args):
         return self.canvas.update_animation_data(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Return the dimension information. Used only for the VCS Canvas GUI.       #
-    #                                                                           #
+    #                                                                        #
+    # Return the dimension information. Used only for the VCS Canvas GUI.    #
+    #                                                                        #
     ##########################################################################
     def return_dimension_info(self, *args):
         return self.canvas.return_dimension_info(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Raster wrapper for VCS.                                                   #
-    #                                                                           #
+    #                                                                        #
+    # Raster wrapper for VCS.                                                #
+    #                                                                        #
     ##########################################################################
     def raster(self, file, mode='a'):
         """
@@ -5058,9 +5059,9 @@ Options:::
         return self.canvas.raster(*(file, mode))
 
     ##########################################################################
-    #                                                                           #
-    # Reset grid wrapper for VCS.                                               #
-    #                                                                           #
+    #                                                                        #
+    # Reset grid wrapper for VCS.                                            #
+    #                                                                        #
     ##########################################################################
     def resetgrid(self, *args):
         """
@@ -5075,9 +5076,9 @@ Options:::
         return self.canvas.resetgrid(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Script wrapper for VCS.                                                   #
-    #                                                                           #
+    #                                                                        #
+    # Script wrapper for VCS.                                                #
+    #                                                                        #
     ##########################################################################
     def _scriptrun(self, *args):
         return vcs._scriptrun(*args)
@@ -5086,9 +5087,9 @@ Options:::
         vcs.scriptrun(aFile, *args, **kargs)
 
     ##########################################################################
-    #                                                                           #
-    # Set default graphics method and template wrapper for VCS.                 #
-    #                                                                           #
+    #                                                                        #
+    # Set default graphics method and template wrapper for VCS.              #
+    #                                                                        #
     ##########################################################################
     def set(self, *args):
         """
@@ -5108,9 +5109,9 @@ Options:::
         return self.canvas.set(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Set VCS color map wrapper for VCS.                                        #
-    #                                                                           #
+    #                                                                        #
+    # Set VCS color map wrapper for VCS.                                     #
+    #                                                                        #
     ##########################################################################
     def setcolormap(self, name):
         """
@@ -5140,9 +5141,9 @@ Options:::
         return
 
     ##########################################################################
-    #                                                                           #
-    # Set VCS color map cell wrapper for VCS.                                   #
-    #                                                                           #
+    #                                                                        #
+    # Set VCS color map cell wrapper for VCS.                                #
+    #                                                                        #
     ##########################################################################
     def setcolorcell(self, *args):
         """
@@ -5312,9 +5313,9 @@ Options:::
         return self.backend.gif(nargs)
 
     ##########################################################################
-    #                                                                           #
-    # Screen GhostScript (gs) wrapper for VCS.                                  #
-    #                                                                           #
+    #                                                                        #
+    # Screen GhostScript (gs) wrapper for VCS.                               #
+    #                                                                        #
     ##########################################################################
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
@@ -5322,9 +5323,9 @@ Options:::
         warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning)
 
     ##########################################################################
-    #                                                                           #
-    # Screen Encapsulated PostScript wrapper for VCS.                           #
-    #                                                                           #
+    #                                                                        #
+    # Screen Encapsulated PostScript wrapper for VCS.                        #
+    #                                                                        #
     ##########################################################################
     def eps(self, file, mode='r', orientation=None, width=None, height=None, units='inches',
             left_margin=None, right_margin=None, top_margin=None, bottom_margin=None):
@@ -5373,18 +5374,18 @@ Options:::
         os.remove(tmpfile)
 
     ##########################################################################
-    #                                                                           #
-    # Show VCS primary and secondary elements wrapper for VCS.                  #
-    #                                                                           #
+    #                                                                        #
+    # Show VCS primary and secondary elements wrapper for VCS.               #
+    #                                                                        #
     ##########################################################################
     def show(self, *args):
         return vcs.show(*args)
     show.__doc__ = vcs.__doc__
 
     ##########################################################################
-    #                                                                           #
-    # Look if a graphic method is in a file           .                         #
-    #                                                                           #
+    #                                                                        #
+    # Look if a graphic method is in a file           .                      #
+    #                                                                        #
     ##########################################################################
     def isinfile(self, GM, file=None):
         """ Checks if a graphic method is stored in a file
@@ -5404,9 +5405,9 @@ Options:::
                 return 1
         return 0
     ##########################################################################
-    #                                                                           #
-    # Save VCS initial.attribute file  wrapper for VCS.                         #
-    #                                                                           #
+    #                                                                        #
+    # Save VCS initial.attribute file  wrapper for VCS.                      #
+    #                                                                        #
     ##########################################################################
 
     def saveinitialfile(self):
@@ -5443,9 +5444,9 @@ Options:::
         return vcs.saveinitialfile()
 
     ##########################################################################
-    #                                                                           #
-    # Raise VCS Canvas to the top of all its siblings.                          #
-    #                                                                           #
+    #                                                                        #
+    # Raise VCS Canvas to the top of all its siblings.                       #
+    #                                                                        #
     ##########################################################################
     def canvasraised(self, *args):
         """
@@ -5465,10 +5466,10 @@ Options:::
         return self.backend.canvasraised(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no   #
-    # VCS Canvas is displayed on the screen.                                    #
-    #                                                                           #
+    #                                                                        #
+    # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no#
+    # VCS Canvas is displayed on the screen.                                 #
+    #                                                                        #
     ##########################################################################
     def iscanvasdisplayed(self, *args):
         """
@@ -5488,9 +5489,9 @@ Options:::
         return self.canvas.iscanvasdisplayed(*args)
 
     ##########################################################################
-    #                                                                           #
-    # Is VCS's orientation landscape?                                           #
-    #                                                                           #
+    #                                                                        #
+    # Is VCS's orientation landscape?                                        #
+    #                                                                        #
     ##########################################################################
     def islandscape(self):
         """
@@ -5515,9 +5516,9 @@ Options:::
             return 0
 
     ##########################################################################
-    #                                                                           #
-    # Is VCS's orientation portrait?                                            #
-    #                                                                           #
+    #                                                                        #
+    # Is VCS's orientation portrait?                                         #
+    #                                                                        #
     ##########################################################################
     def isportrait(self):
         """
@@ -5541,9 +5542,9 @@ Options:::
         else:
             return 0
     ##########################################################################
-    #                                                                           #
-    # Dislplay plot functions for VCS.                                          #
-    #                                                                           #
+    #                                                                        #
+    # Dislplay plot functions for VCS.                                       #
+    #                                                                        #
     ##########################################################################
 
     def getplot(self, Dp_name_src='default', template=None):
@@ -5570,9 +5571,9 @@ Options:::
         return display
 
     ##########################################################################
-    #                                                                           #
-    # Colormap functions for VCS.                                               #
-    #                                                                           #
+    #                                                                        #
+    # Colormap functions for VCS.                                            #
+    #                                                                        #
     ##########################################################################
     def createcolormap(self, Cp_name=None, Cp_name_src='default'):
         return vcs.createcolormap(Cp_name, Cp_name_src)
@@ -5583,9 +5584,9 @@ Options:::
     getcolormap.__doc__ = vcs.manageElements.getcolormap.__doc__
 
     ##########################################################################
-    #                                                                           #
-    # Font functions.                       #
-    #                                                                           #
+    #                                                                        #
+    # Font functions.                                                        #
+    #                                                                        #
     ##########################################################################
     def addfont(self, path, name=""):
         """
@@ -5698,9 +5699,9 @@ Options:::
         return self.copyfontto(font, 1)
 
     ##########################################################################
-    #                                                                           #
-    # Orientation VCS Canvas orientation wrapper for VCS.                       #
-    #                                                                           #
+    #                                                                        #
+    # Orientation VCS Canvas orientation wrapper for VCS.                    #
+    #                                                                        #
     ##########################################################################
     def orientation(self, *args, **kargs):
         """
@@ -5716,9 +5717,9 @@ Options:::
         return self.backend.orientation(*args, **kargs)
 
     ##########################################################################
-    #                                                                           #
-    # Get VCS color map cell wrapper for VCS.                                   #
-    #                                                                           #
+    #                                                                        #
+    # Get VCS color map cell wrapper for VCS.                                #
+    #                                                                        #
     ##########################################################################
     def getcolorcell(self, *args):
         """
@@ -5750,9 +5751,9 @@ Options:::
         return vcs.getcolorcell(args[0], self)
 
     ##########################################################################
-    #                                                                           #
-    # Get VCS color map name wrapper for VCS.                                   #
-    #                                                                           #
+    #                                                                        #
+    # Get VCS color map name wrapper for VCS.                                #
+    #                                                                        #
     ##########################################################################
     def getcolormapname(self, *args):
         """
-- 
GitLab


From c30239c24c6ec90e5b9393c7d869151fe902d4a0 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 27 May 2016 13:12:11 -0400
Subject: [PATCH 109/196] Fixed flake8

---
 Packages/vcs/vcs/VCS_validation_functions.py |  4 ++--
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py    |  3 ---
 Packages/vcs/vcs/vector.py                   | 16 ++++++++++------
 3 files changed, 12 insertions(+), 11 deletions(-)

diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index db848b968..33d554f27 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -246,11 +246,11 @@ def checkListOfNumbers(self, name, value, minvalue=None,
 def checkValidOption(self, name, value, options):
     checkName(self, name, value)
     if value not in options:
-        VCS_validation_functions.checkedRaise(
+        self.checkedRaise(
             self,
             value,
             ValueError,
-            'Invalid value '+ value + '. Valid options are: ' +
+            'Invalid value ' + value + '. Valid options are: ' +
             ','.join(self.scaleoptions))
     return value
 
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index 896e32a8e..642884bc6 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -86,11 +86,8 @@ class VectorPipeline(Pipeline2D):
         arrow.SetOutputPointsPrecision(vtk.vtkAlgorithm.DOUBLE_PRECISION)
         arrow.FilledOff()
 
-
         polydata = self._vtkPolyDataFilter.GetOutput()
         vectors = polydata.GetPointData().GetVectors()
-        vectorsRangeX = vectors.GetRange(0)
-        vectorsRangeY = vectors.GetRange(1)
 
         if self._gm.scaletype == 'constant' or\
            self._gm.scaletype == 'constantNNormalize' or\
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index db681cc78..9976fae44 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -535,13 +535,14 @@ class Gv(object):
         self._alignment = value
     alignment = property(_getalignment, _setalignment)
 
-
     def _getscaletype(self):
         return self._scaletype
 
     def _setscaletype(self, value):
-        value = VCS_validation_functions.checkValidOption(self, 'scaletype',
-                    value, self.scaleoptions)
+        value = VCS_validation_functions.checkValidOption(self,
+                                                          'scaletype',
+                                                          value,
+                                                          self.scaleoptions)
         self._scaletype = value
     scaletype = property(_getscaletype, _setscaletype)
 
@@ -549,12 +550,15 @@ class Gv(object):
         return self._scalerange
 
     def _setscalerange(self, value):
-        value = VCS_validation_functions.checkListOfNumbers(self, 'scalerange',
-                    value, minvalue=0.0, minelements=2, maxelements=2)
+        value = VCS_validation_functions.checkListOfNumbers(self,
+                                                            'scalerange',
+                                                            value,
+                                                            minvalue=0.0,
+                                                            minelements=2,
+                                                            maxelements=2)
         self._scalerange = value
     scalerange = property(_getscalerange, _setscalerange)
 
-
     def __init__(self, Gv_name, Gv_name_src='default'):
                 #                                                         #
                 ###########################################################
-- 
GitLab


From 9a2d02d9c8b78479ca07411a3fd7baf43d82f96a Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 27 May 2016 13:32:48 -0400
Subject: [PATCH 110/196] Updated method name for consistency

---
 Packages/vcs/vcs/VCS_validation_functions.py | 2 +-
 Packages/vcs/vcs/vector.py                   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index 33d554f27..2968d7832 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -243,7 +243,7 @@ def checkListOfNumbers(self, name, value, minvalue=None,
     return list(value)
 
 
-def checkValidOption(self, name, value, options):
+def checkInStringList(self, name, value, options):
     checkName(self, name, value)
     if value not in options:
         self.checkedRaise(
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index 9976fae44..9ed8bfbef 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -539,7 +539,7 @@ class Gv(object):
         return self._scaletype
 
     def _setscaletype(self, value):
-        value = VCS_validation_functions.checkValidOption(self,
+        value = VCS_validation_functions.checkInStringList(self,
                                                           'scaletype',
                                                           value,
                                                           self.scaleoptions)
-- 
GitLab


From a7f5b860ff1408282431b70890044f956396c320 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 03:52:42 -0400
Subject: [PATCH 111/196] Made background rendering default for testing

---
 Packages/testing/regression.py | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index b9cd2cdeb..25255fdc0 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -20,13 +20,15 @@ def init(*args, **kwargs):
     testingDir = os.path.join(os.path.dirname(__file__), "..")
     sys.path.append(testingDir)
 
-    vcsinst = vcs.init(*args, **kwargs)
-    vcsinst.setantialiasing(0)
-    vcsinst.drawlogooff()
-
     if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs)) and
         ('geometry' not in kwargs)):
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=1))
         vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
+    else:
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=0))
+
+    vcsinst.setantialiasing(0)
+    vcsinst.drawlogooff()
     return vcsinst
 
 def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
-- 
GitLab


From b59d84e4807a5efa678338aaa7cf6cbd1454beca Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 04:16:31 -0400
Subject: [PATCH 112/196] Using consistent naming scheme

---
 testing/vcs/CMakeLists.txt                    | 12 ++++++------
 testing/vcs/test_vcs_vectors_scale_options.py | 12 ++++++------
 2 files changed, 12 insertions(+), 12 deletions(-)

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index d15355ce4..bee8b9a45 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -907,12 +907,12 @@ cdat_add_test(test_vcs_settings_color_name_rgba
     cdat_add_test(test_vcs_vectors_scale_options
       "${PYTHON_EXECUTABLE}"
       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_off.png"
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_constant.png"
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_linear.png"
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_normalize.png"
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_constantNLinear.png"
-      "${BASELINE_DIR}/test_vcs_vector_scale_options_constantNNormalize.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
       )
   endif()
 endif()
diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
index ce48b63c1..32898d129 100644
--- a/testing/vcs/test_vcs_vectors_scale_options.py
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -9,7 +9,7 @@ gv = vcs.createvector()
 
 gv.scaletype = 'off'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_off.png'
+outFilename = 'test_vcs_vectors_scale_options_off.png'
 canvas.png(outFilename)
 ret = regression.check_result_image(outFilename, sys.argv[1])
 canvas.clear()
@@ -19,7 +19,7 @@ u = data['u'][...,::4,::4]
 gv.scaletype = 'constant'
 gv.scale = 0.1
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_constant.png'
+outFilename = 'test_vcs_vectors_scale_options_constant.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[2])
 canvas.clear()
@@ -30,28 +30,28 @@ gv.scale = 1.0
 
 gv.scaletype = 'linear'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_linear.png'
+outFilename = 'test_vcs_vectors_scale_options_linear.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[3])
 canvas.clear()
 
 gv.scaletype = 'normalize'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_normalize.png'
+outFilename = 'test_vcs_vectors_scale_options_normalize.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[4])
 canvas.clear()
 
 gv.scaletype = 'constantNLinear'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_constantNLinear.png'
+outFilename = 'test_vcs_vectors_scale_options_constantNLinear.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[5])
 canvas.clear()
 
 gv.scaletype = 'constantNNormalize'
 canvas.plot(u, v, gv)
-outFilename = 'test_vcs_vector_scale_options_constantNNormalize.png'
+outFilename = 'test_vcs_vectors_scale_options_constantNNormalize.png'
 canvas.png(outFilename)
 ret += regression.check_result_image(outFilename, sys.argv[6])
 canvas.clear()
-- 
GitLab


From c8befa3b19aab92a53ad644a6113f816965b5d98 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 04:26:50 -0400
Subject: [PATCH 113/196] Fixed another flake8 issue

---
 Packages/vcs/vcs/vector.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index 9ed8bfbef..2c49bfd30 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -540,9 +540,9 @@ class Gv(object):
 
     def _setscaletype(self, value):
         value = VCS_validation_functions.checkInStringList(self,
-                                                          'scaletype',
-                                                          value,
-                                                          self.scaleoptions)
+                                                           'scaletype',
+                                                           value,
+                                                           self.scaleoptions)
         self._scaletype = value
     scaletype = property(_getscaletype, _setscaletype)
 
-- 
GitLab


From e3f7ede95f7fb84f144e6942c9e5dbe0eb1f5a9f Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 04:33:32 -0400
Subject: [PATCH 114/196] Using more generic name for testing

---
 testing/vcs/test_vcs_dump_json.json | 24 ++++++++++++------------
 testing/vcs/test_vcs_dump_json.py   | 26 +++++++++++++-------------
 2 files changed, 25 insertions(+), 25 deletions(-)

diff --git a/testing/vcs/test_vcs_dump_json.json b/testing/vcs/test_vcs_dump_json.json
index b79b1319c..fdd28171b 100644
--- a/testing/vcs/test_vcs_dump_json.json
+++ b/testing/vcs/test_vcs_dump_json.json
@@ -1,6 +1,6 @@
 {
  "G1d": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -29,7 +29,7 @@
   }
  }, 
  "Gfb": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "boxfill_type": "linear", 
    "color_1": 16, 
    "color_2": 239, 
@@ -70,7 +70,7 @@
   }
  }, 
  "Gfi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -110,7 +110,7 @@
   }
  }, 
  "Gfm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -153,7 +153,7 @@
   }
  }, 
  "Gi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": [
     35.0
    ], 
@@ -211,7 +211,7 @@
   }
  }, 
  "P": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "box1": {
     "line": "default", 
     "priority": 1, 
@@ -579,7 +579,7 @@
   }
  }, 
  "Proj": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "parameters": [
     1e+20, 
     1e+20, 
@@ -601,7 +601,7 @@
   }
  }, 
  "Tf": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -632,7 +632,7 @@
   }
  }, 
  "Tl": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -662,7 +662,7 @@
   }
  }, 
  "Tm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -691,7 +691,7 @@
   }
  }, 
  "To": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": 0, 
    "halign": 0, 
    "height": 14, 
@@ -700,7 +700,7 @@
   }
  }, 
  "Tt": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "backgroundcolor": 0, 
    "backgroundopacity": 0, 
    "color": 1, 
diff --git a/testing/vcs/test_vcs_dump_json.py b/testing/vcs/test_vcs_dump_json.py
index aca6215b8..9247b2d38 100644
--- a/testing/vcs/test_vcs_dump_json.py
+++ b/testing/vcs/test_vcs_dump_json.py
@@ -1,33 +1,33 @@
 
 import filecmp
 import vcs,numpy,os,sys
-src=sys.argv[1]
+src = sys.argv[1]
 if os.path.exists("test_vcs_dump_json.json"):
     os.remove("test_vcs_dump_json.json")
 
-b = vcs.createboxfill("Charles.Doutriaux")
+b = vcs.createboxfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisofill("Charles.Doutriaux")
+b = vcs.createisofill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisoline("Charles.Doutriaux")
+b = vcs.createisoline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmeshfill("Charles.Doutriaux")
+b = vcs.createmeshfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.create1d("Charles.Doutriaux")
+b = vcs.create1d("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createfillarea("Charles.Doutriaux")
+b = vcs.createfillarea("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtext("Charles.Doutriaux")
+b = vcs.createtext("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createline("Charles.Doutriaux")
+b = vcs.createline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmarker("Charles.Doutriaux")
+b = vcs.createmarker("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtemplate("Charles.Doutriaux")
+b = vcs.createtemplate("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createprojection("Charles.Doutriaux")
+b = vcs.createprojection("vcs_instance")
 b.script("test_vcs_dump_json","a")
 
-assert(filecmp.cmp("test_vcs_dump_json.json",src))
+assert(filecmp.cmp("test_vcs_dump_json.json", src))
 
 
-- 
GitLab


From 8c05ac458f0fbc5b325ff8373197e96555ac20a0 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 04:49:13 -0400
Subject: [PATCH 115/196] Added vector method for testing

---
 testing/vcs/test_vcs_dump_json.json | 42 +++++++++++++++++++++++++++++
 testing/vcs/test_vcs_dump_json.py   |  2 ++
 2 files changed, 44 insertions(+)

diff --git a/testing/vcs/test_vcs_dump_json.json b/testing/vcs/test_vcs_dump_json.json
index fdd28171b..d40844987 100644
--- a/testing/vcs/test_vcs_dump_json.json
+++ b/testing/vcs/test_vcs_dump_json.json
@@ -210,6 +210,48 @@
    "yticlabels2": "*"
   }
  }, 
+ "Gv": {
+  "vcs_instance": {
+   "alignment": "center", 
+   "colormap": null, 
+   "datawc_calendar": 135441, 
+   "datawc_timeunits": "days since 2000", 
+   "datawc_x1": 1e+20, 
+   "datawc_x2": 1e+20, 
+   "datawc_y1": 1e+20, 
+   "datawc_y2": 1e+20, 
+   "line": null, 
+   "linecolor": null, 
+   "linewidth": null, 
+   "projection": "linear", 
+   "reference": 1e+20, 
+   "scale": 1.0, 
+   "scaleoptions": [
+    "off", 
+    "constant", 
+    "normalize", 
+    "linear", 
+    "constantNNormalize", 
+    "constantNLinear"
+   ], 
+   "scalerange": [
+    0.1, 
+    1.0
+   ], 
+   "scaletype": "constantNNormalize", 
+   "type": "arrows", 
+   "xaxisconvert": "linear", 
+   "xmtics1": "", 
+   "xmtics2": "", 
+   "xticlabels1": "*", 
+   "xticlabels2": "*", 
+   "yaxisconvert": "linear", 
+   "ymtics1": "", 
+   "ymtics2": "", 
+   "yticlabels1": "*", 
+   "yticlabels2": "*"
+  }
+ }, 
  "P": {
   "vcs_instance": {
    "box1": {
diff --git a/testing/vcs/test_vcs_dump_json.py b/testing/vcs/test_vcs_dump_json.py
index 9247b2d38..421606c4d 100644
--- a/testing/vcs/test_vcs_dump_json.py
+++ b/testing/vcs/test_vcs_dump_json.py
@@ -17,6 +17,8 @@ b = vcs.create1d("vcs_instance")
 b.script("test_vcs_dump_json","a")
 b = vcs.createfillarea("vcs_instance")
 b.script("test_vcs_dump_json","a")
+b = vcs.createvector("vcs_instance")
+b.script("test_vcs_dump_json","a")
 b = vcs.createtext("vcs_instance")
 b.script("test_vcs_dump_json","a")
 b = vcs.createline("vcs_instance")
-- 
GitLab


From 92d5053c014b8ef0f5ecf5ca136d1f8e5c039ce7 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 05:05:33 -0400
Subject: [PATCH 116/196] Added new test for text as object feature

---
 testing/vcs/CMakeLists.txt          |  4 +++
 testing/vcs/test_vcs_text_object.py | 41 +++++++++++++++++++++++++++++
 2 files changed, 45 insertions(+)
 create mode 100644 testing/vcs/test_vcs_text_object.py

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 09910f4a3..4cace0204 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -385,6 +385,10 @@ cdat_add_test(test_vcs_geometry
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py
   )
+cdat_add_test(test_vcs_text_object
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_text_object.py
+  )
 ##############################################################################
 #
 # These tests perform plotting and need sample data
diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_text_object.py
new file mode 100644
index 000000000..f36d44b77
--- /dev/null
+++ b/testing/vcs/test_vcs_text_object.py
@@ -0,0 +1,41 @@
+import cdms2, vcs, tempfile
+
+x = vcs.init(bg=1, geometry=(800, 600))
+txt = x.createtext()
+txt.x = [.0000005,.00000005,.5,.99999,.999999]
+txt.y = [0.05,.9,.5,.9,0.05]
+txt.string = ["SAMPLE TEXT A","SAMPLE TEXT B","SAMPLE TEXT C","SAMPLE TEXT D","SAMPLE TEXT E"]
+txt.halign = "center"
+txt.valign = "base"
+txt.height = 10
+x.plot(txt)
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
+              prefix='tmpTextAsObjectFalse', delete=True)
+x.postscript(tmpfile.name, textAsObject=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
+              prefix='tmpTextAsObjectTrue', delete=True)
+x.postscript(tmpfile.name, textAsObject=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
+              prefix='tmpTextAsObjectFalse', delete=True)
+x.pdf(tmpfile.name, textAsObject=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
+              prefix='tmpTextAsObjectTrue', delete=True )
+x.pdf(tmpfile.name, textAsObject=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
+              prefix='tmpTextAsObjectFalse', delete=True)
+x.pdf(tmpfile.name, textAsObject=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
+              prefix='tmpTextAsObjectTrue', delete=True)
+x.pdf(tmpfile.name, textAsObject=True)
+tmpfile.close()
\ No newline at end of file
-- 
GitLab


From ab65bdb33de0bf0ee5434860791bb57729b65990 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 12:44:35 -0400
Subject: [PATCH 117/196] Made text export as path the default

---
 Packages/vcs/vcs/Canvas.py          | 22 ++++++++++-------
 Packages/vcs/vcs/VTKPlots.py        | 38 ++++++++++++++++++++++-------
 testing/vcs/test_vcs_text_object.py | 24 +++++++++---------
 3 files changed, 54 insertions(+), 30 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 38ca2a013..8a91c4ab6 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -4681,7 +4681,8 @@ Options:::
     # pdf wrapper for VCS.                                                   #
     #                                                                        #
     ##########################################################################
-    def pdf(self, file, width=None, height=None, units='inches', textAsObject=True):
+    def pdf(self, file, width=None, height=None, units='inches',
+            textAsPaths=True):
         """
  Function: postscript
 
@@ -4705,14 +4706,15 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['pdf']:
             file += '.pdf'
-        return self.backend.pdf(file, W, H, textAsObject)
+        return self.backend.pdf(file, W, H, textAsPaths)
     ##########################################################################
     #                                                                        #
     # SVG wrapper for VCS.                                                   #
     #                                                                        #
     ##########################################################################
 
-    def svg(self, file, width=None, height=None, units='inches', textAsObject=True):
+    def svg(self, file, width=None, height=None, units='inches',
+            textAsPaths=True):
         """
  Function: postscript
 
@@ -4736,7 +4738,7 @@ Options:::
 
         if not file.split('.')[-1].lower() in ['svg']:
             file += '.svg'
-        return self.backend.svg(file, W, H, textAsObject)
+        return self.backend.svg(file, W, H, textAsPaths)
 
     def _compute_margins(
             self, W, H, top_margin, bottom_margin, right_margin, left_margin, dpi):
@@ -4912,7 +4914,7 @@ Options:::
         return W, H
 
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
-                   units='inches', textAsObject=True):
+                   units='inches', textAsPaths=True):
         """
  Function: postscript
 
@@ -4951,7 +4953,7 @@ Options:::
         if not file.split('.')[-1].lower() in ['ps', 'eps']:
             file += '.ps'
         if mode == 'r':
-            return self.backend.postscript(file, W, H, units="pixels", textAsObject=textAsObject)
+            return self.backend.postscript(file, W, H, units="pixels", textAsPaths=textAsPaths)
         else:
             n = random.randint(0, 10000000000000)
             psnm = '/tmp/' + '__VCS__tmp__' + str(n) + '.ps'
@@ -5327,8 +5329,9 @@ Options:::
     # Screen Encapsulated PostScript wrapper for VCS.                        #
     #                                                                        #
     ##########################################################################
-    def eps(self, file, mode='r', orientation=None, width=None, height=None, units='inches',
-            left_margin=None, right_margin=None, top_margin=None, bottom_margin=None):
+    def eps(self, file, mode='r', orientation=None, width=None, height=None,
+            units='inches', left_margin=None, right_margin=None, top_margin=None,
+            bottom_margin=None, textAsPaths=True):
         """
         Function: Encapsulated PostScript
 
@@ -5369,7 +5372,8 @@ Options:::
             left_margin,
             right_margin,
             top_margin,
-            bottom_margin)
+            bottom_margin,
+            textAsPaths)
         os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines()
         os.remove(tmpfile)
 
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index b6b49ec1b..975b5cdfd 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1063,8 +1063,28 @@ class VTKVCSBackend(object):
                 break
         return plot
 
-    def vectorGraphics(
-            self, output_type, file, width=None, height=None, units=None, textAsObject=True):
+    def vectorGraphics(self, output_type, file, width=None, height=None,
+                       units=None, textAsPaths=True):
+        """Export vector graphics to PDF, Postscript, SVG and EPS format.
+
+       Reasoning for textAsPaths as default:
+       The output formats supported by gl2ps which VTK uses for postscript/pdf/svg/etc
+       vector exports) handle text objects inconsistently. For example, postscript mangles
+       newlines, pdf doesn't fully support rotation and alignment, stuff like that.
+       These are limitations in the actual format specifications themselves.
+
+       On top of that, embedding text objects then relies on the viewer to locate
+       a similar font and render the text, and odds are good that the fonts used
+       by the viewer will have different characteristics than the ones used in the
+       original rendering. So, for instance, you have some right-justified lines of
+       text, like the data at the top of the VCS plots. If the font used by the viewer
+       uses different widths for any of glyphs composing the text, the text will be
+       unaligned along the right-hand side, since the text is always anchored on
+       it's left side due to how these formats represent text objects. This just looks bad.
+       Exporting text as paths eliminates all of these problems with portability across
+       viewers and inconsistent text object handling between output formats.
+       """
+
         if self.renWin is None:
             raise Exception("Nothing on Canvas to dump to file")
 
@@ -1096,7 +1116,7 @@ class VTKVCSBackend(object):
         gl.SetCompress(0)  # Do not compress
         gl.SetFilePrefix(".".join(file.split(".")[:-1]))
 
-        if textAsObject:
+        if textAsPaths:
             gl.TextAsPathOff()
         else:
             gl.TextAsPathOn()
@@ -1116,17 +1136,17 @@ class VTKVCSBackend(object):
         self.showGUI()
 
     def postscript(self, file, width=None, height=None,
-                   units=None, textAsObject=True):
+                   units=None, textAsPaths=True):
         return self.vectorGraphics("ps", file, width, height,
-                                    units, textAsObject)
+                                    units, textAsPaths)
 
-    def pdf(self, file, width=None, height=None, units=None, textAsObject=True):
+    def pdf(self, file, width=None, height=None, units=None, textAsPaths=True):
         return self.vectorGraphics("pdf", file, width, height,
-                                    units, textAsObject)
+                                    units, textAsPaths)
 
-    def svg(self, file, width=None, height=None, units=None, textAsObject=True):
+    def svg(self, file, width=None, height=None, units=None, textAsPaths=True):
         return self.vectorGraphics("svg", file, width,
-                                    height, units, textAsObject)
+                                    height, units, textAsPaths)
 
     def gif(self, filename='noname.gif', merge='r', orientation=None,
             geometry='1600x1200'):
diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_text_object.py
index f36d44b77..b98d7ec24 100644
--- a/testing/vcs/test_vcs_text_object.py
+++ b/testing/vcs/test_vcs_text_object.py
@@ -11,31 +11,31 @@ txt.height = 10
 x.plot(txt)
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
-              prefix='tmpTextAsObjectFalse', delete=True)
-x.postscript(tmpfile.name, textAsObject=False)
+              prefix='textAsPathsFalse', delete=True)
+x.postscript(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
-              prefix='tmpTextAsObjectTrue', delete=True)
-x.postscript(tmpfile.name, textAsObject=True)
+              prefix='textAsPathsTrue', delete=True)
+x.postscript(tmpfile.name, textAsPaths=True)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
-              prefix='tmpTextAsObjectFalse', delete=True)
-x.pdf(tmpfile.name, textAsObject=False)
+              prefix='textAsPathsFalse', delete=True)
+x.pdf(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
-              prefix='tmpTextAsObjectTrue', delete=True )
-x.pdf(tmpfile.name, textAsObject=True)
+              prefix='textAsPathsTrue', delete=True )
+x.pdf(tmpfile.name, textAsPaths=True)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
-              prefix='tmpTextAsObjectFalse', delete=True)
-x.pdf(tmpfile.name, textAsObject=False)
+              prefix='textAsPathsFalse', delete=True)
+x.pdf(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
-              prefix='tmpTextAsObjectTrue', delete=True)
-x.pdf(tmpfile.name, textAsObject=True)
+              prefix='textAsPathsTrue', delete=True)
+x.pdf(tmpfile.name, textAsPaths=True)
 tmpfile.close()
\ No newline at end of file
-- 
GitLab


From ee7da76d3a193ba0f5245d0fb0619fd67f97ac17 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 12:56:56 -0400
Subject: [PATCH 118/196] Renamed text for clarity

---
 testing/vcs/CMakeLists.txt                                    | 4 ++--
 .../vcs/{test_vcs_text_object.py => test_vcs_export_text.py}  | 0
 2 files changed, 2 insertions(+), 2 deletions(-)
 rename testing/vcs/{test_vcs_text_object.py => test_vcs_export_text.py} (100%)

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 873a17d63..43f6b5fe6 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -385,9 +385,9 @@ cdat_add_test(test_vcs_geometry
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py
   )
-cdat_add_test(test_vcs_text_object
+cdat_add_test(test_vcs_export_text
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_text_object.py
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_export_text.py
   )
 ##############################################################################
 #
diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_export_text.py
similarity index 100%
rename from testing/vcs/test_vcs_text_object.py
rename to testing/vcs/test_vcs_export_text.py
-- 
GitLab


From a90c075415a47deaa6f1ba0a3ab04ff27ff0f8ba Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 13:00:11 -0400
Subject: [PATCH 119/196] Added test for eps format

---
 testing/vcs/test_vcs_export_text.py | 16 +++++++++++++---
 1 file changed, 13 insertions(+), 3 deletions(-)

diff --git a/testing/vcs/test_vcs_export_text.py b/testing/vcs/test_vcs_export_text.py
index b98d7ec24..3e477b7d1 100644
--- a/testing/vcs/test_vcs_export_text.py
+++ b/testing/vcs/test_vcs_export_text.py
@@ -23,7 +23,7 @@ tmpfile.close()
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
               prefix='textAsPathsFalse', delete=True)
 x.pdf(tmpfile.name, textAsPaths=False)
-tmpfile.close()
+# tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
               prefix='textAsPathsTrue', delete=True )
@@ -32,10 +32,20 @@ tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
               prefix='textAsPathsFalse', delete=True)
-x.pdf(tmpfile.name, textAsPaths=False)
+x.svg(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
               prefix='textAsPathsTrue', delete=True)
-x.pdf(tmpfile.name, textAsPaths=True)
+x.svg(tmpfile.name, textAsPaths=True)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
+              prefix='textAsPathsFalse', delete=True)
+x.eps(tmpfile.name, textAsPaths=False)
+tmpfile.close()
+
+tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
+              prefix='textAsPathsTrue', delete=True)
+x.eps(tmpfile.name, textAsPaths=True)
 tmpfile.close()
\ No newline at end of file
-- 
GitLab


From f53c74a1fa1d04dfa5a3ea74df8f1169a52ee42f Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 28 May 2016 13:05:43 -0400
Subject: [PATCH 120/196] Fixed eps method runtime error

---
 Packages/vcs/vcs/Canvas.py          |  9 +++------
 testing/vcs/test_vcs_export_text.py | 22 +++++++++++-----------
 2 files changed, 14 insertions(+), 17 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 8a91c4ab6..07514563b 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -4913,6 +4913,7 @@ Options:::
             H = tmp
         return W, H
 
+
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
                    units='inches', textAsPaths=True):
         """
@@ -5330,8 +5331,7 @@ Options:::
     #                                                                        #
     ##########################################################################
     def eps(self, file, mode='r', orientation=None, width=None, height=None,
-            units='inches', left_margin=None, right_margin=None, top_margin=None,
-            bottom_margin=None, textAsPaths=True):
+            units='inches', textAsPaths=True):
         """
         Function: Encapsulated PostScript
 
@@ -5369,11 +5369,8 @@ Options:::
             width,
             height,
             units,
-            left_margin,
-            right_margin,
-            top_margin,
-            bottom_margin,
             textAsPaths)
+
         os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines()
         os.remove(tmpfile)
 
diff --git a/testing/vcs/test_vcs_export_text.py b/testing/vcs/test_vcs_export_text.py
index 3e477b7d1..d4507e3d8 100644
--- a/testing/vcs/test_vcs_export_text.py
+++ b/testing/vcs/test_vcs_export_text.py
@@ -2,8 +2,8 @@ import cdms2, vcs, tempfile
 
 x = vcs.init(bg=1, geometry=(800, 600))
 txt = x.createtext()
-txt.x = [.0000005,.00000005,.5,.99999,.999999]
-txt.y = [0.05,.9,.5,.9,0.05]
+txt.x = [0.2, 0.2, 0.5, 0.8, 0.8]
+txt.y = [0.2, 0.8, 0.5, 0.8, 0.2]
 txt.string = ["SAMPLE TEXT A","SAMPLE TEXT B","SAMPLE TEXT C","SAMPLE TEXT D","SAMPLE TEXT E"]
 txt.halign = "center"
 txt.valign = "base"
@@ -11,41 +11,41 @@ txt.height = 10
 x.plot(txt)
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
-              prefix='textAsPathsFalse', delete=True)
+              prefix='textAsPathsFalse', delete=False)
 x.postscript(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \
-              prefix='textAsPathsTrue', delete=True)
+              prefix='textAsPathsTrue', delete=False)
 x.postscript(tmpfile.name, textAsPaths=True)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
-              prefix='textAsPathsFalse', delete=True)
+              prefix='textAsPathsFalse', delete=False)
 x.pdf(tmpfile.name, textAsPaths=False)
-# tmpfile.close()
+tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \
-              prefix='textAsPathsTrue', delete=True )
+              prefix='textAsPathsTrue', delete=False)
 x.pdf(tmpfile.name, textAsPaths=True)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
-              prefix='textAsPathsFalse', delete=True)
+              prefix='textAsPathsFalse', delete=False)
 x.svg(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \
-              prefix='textAsPathsTrue', delete=True)
+              prefix='textAsPathsTrue', delete=False)
 x.svg(tmpfile.name, textAsPaths=True)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
-              prefix='textAsPathsFalse', delete=True)
+              prefix='textAsPathsFalse', delete=False)
 x.eps(tmpfile.name, textAsPaths=False)
 tmpfile.close()
 
 tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \
-              prefix='textAsPathsTrue', delete=True)
+              prefix='textAsPathsTrue', delete=False)
 x.eps(tmpfile.name, textAsPaths=True)
 tmpfile.close()
\ No newline at end of file
-- 
GitLab


From a90268e4337be88ebc69adfcd9aeb12cb9a65e0d Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 31 May 2016 08:24:36 -0700
Subject: [PATCH 121/196] Fixed memory leak, added mintic

---
 Packages/vcsaddons/Lib/polar.py | 222 +++++++++++++++++++++++++-------
 1 file changed, 173 insertions(+), 49 deletions(-)

diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
index 0b0f6a9fc..622b0d3a0 100644
--- a/Packages/vcsaddons/Lib/polar.py
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -2,6 +2,7 @@ import vcs
 import numpy
 import vcsaddons
 
+
 def circle_points(center, radius, points=75, ratio=1):
     """
     Generates the coordinates of a circle in x list and y list.
@@ -22,37 +23,6 @@ def circle_points(center, radius, points=75, ratio=1):
     return x, y
 
 
-def text_orientation_for_angle(theta, source="default"):
-    """
-    Generates a text orientation that will align text to look good depending on quadrant.
-    """
-    # Normalize to [0, 2*pi)
-    while 0 > theta:
-        theta += 2 * numpy.pi
-    while 2 * numpy.pi <= theta:
-        theta -= 2 * numpy.pi
-
-    if 0 < theta < numpy.pi:
-        valign = "bottom"
-    elif 0 == theta or numpy.pi == theta:
-        valign = "half"
-    else:
-        valign = "top"
-
-    if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta:
-        halign = "left"
-    elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta):
-        halign = "center"
-    else:
-        halign = "right"
-
-    # Build new text table
-    to = vcs.createtextorientation(source=source)
-    to.valign = valign
-    to.halign = halign
-    return to
-
-
 def convert_arrays(var, theta):
     """
     Normalizes valid input options to two lists of lists of values and a list of names.
@@ -172,12 +142,21 @@ class Gpo(vcsaddons.core.VCSaddon):
             self.markersizes = [3]
             self.markercolors = ["black"]
             self.markers = ["dot"]
+            self.markercolorsource = "group"
             self.clockwise = False
             self.theta_offset = 0
             self.magnitude_ticks = "*"
+            self.magnitude_mintics = None
             self.magnitude_tick_angle = 0
             self.theta_tick_count = 6
             self.group_names = []
+            self.draw_lines = False
+            self.connect_groups = False
+            self.linecolors = ["black"]
+            self.lines = ["solid"]
+            self.linewidths = [1]
+            self.markerpriority = 2
+            self.linepriority = 1
             # Nice default labels
             self.xticlabels1 = {
                 0: "0 (2pi)",
@@ -197,12 +176,59 @@ class Gpo(vcsaddons.core.VCSaddon):
             self.markersizes = gm.markersizes
             self.markercolors = gm.markercolors
             self.markers = gm.markers
+            self.markercolorsource = gm.markercolorsource
+            self.markerpriority = gm.markerpriority
             self.clockwise = gm.clockwise
+            self.draw_lines = gm.draw_lines
+            self.linecolors = gm.linecolors
+            self.linewidths = gm.linewidths
+            self.linepriority = gm.linepriority
+            self.lines = gm.lines
+            self.connect_groups = gm.connect_groups
             self.theta_offset = gm.theta_offset
             self.magnitude_ticks = gm.magnitude_ticks
+            self.magnitude_mintics = gm.magnitude_mintics
             self.magnitude_tick_angle = gm.magnitude_tick_angle
             self.theta_tick_count = gm.theta_tick_count
             self.group_names = gm.group_names
+        self.to_cleanup = []
+
+    def create_text(self, tt, to):
+        tc = vcs.createtext(Tt_source=tt, To_source=to)
+        self.to_cleanup.append(tc.Tt)
+        self.to_cleanup.append(tc.To)
+        return tc
+
+    def text_orientation_for_angle(self, theta, source="default"):
+        """
+        Generates a text orientation that will align text to look good depending on quadrant.
+        """
+        # Normalize to [0, 2*pi)
+        while 0 > theta:
+            theta += 2 * numpy.pi
+        while 2 * numpy.pi <= theta:
+            theta -= 2 * numpy.pi
+
+        if 0 < theta < numpy.pi:
+            valign = "bottom"
+        elif 0 == theta or numpy.pi == theta:
+            valign = "half"
+        else:
+            valign = "top"
+
+        if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta:
+            halign = "left"
+        elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta):
+            halign = "center"
+        else:
+            halign = "right"
+
+        # Build new text table
+        to = vcs.createtextorientation(source=source)
+        to.valign = valign
+        to.halign = halign
+        self.to_cleanup.append(to)
+        return to
 
     def magnitude_from_value(self, value, minmax):
         if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20):
@@ -244,6 +270,9 @@ class Gpo(vcsaddons.core.VCSaddon):
         if template is None:
             template = self.template
 
+        if self.markercolorsource.lower() not in ("group", "magnitude", "theta"):
+            raise ValueError("polar.markercolorsource must be one of: 'group', 'magnitude', 'theta'")
+
         magnitudes, thetas, names = convert_arrays(var, theta)
         if self.group_names:
             names = self.group_names
@@ -306,8 +335,8 @@ class Gpo(vcsaddons.core.VCSaddon):
             m_ticks.y = []
 
             if template.ylabel1.priority > 0:
-                to = text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation)
-                m_labels = vcs.createtext(Tt_source=template.ylabel1.texttable, To_source=to)
+                to = self.text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation)
+                m_labels = self.create_text(template.ylabel1.texttable, to)
                 m_labels.x = []
                 m_labels.y = []
                 m_labels.string = []
@@ -334,23 +363,40 @@ class Gpo(vcsaddons.core.VCSaddon):
                 canvas.plot(m_labels, **plot_kwargs)
                 del vcs.elements["textcombined"][m_labels.name]
 
+        if template.ymintic1.priority > 0 and self.magnitude_mintics is not None:
+            mag_mintics = vcs.createline(source=template.ymintic1.line)
+            mag_mintics.x = []
+            mag_mintics.y = []
+
+            mintics = self.magnitude_mintics
+            if isinstance(mintics, (str, unicode)):
+                mintics = vcs.elements["list"][mintics]
+
+            for mag in mintics:
+                mintic_radius = radius * self.magnitude_from_value(mag, (m_scale[0], m_scale[-1]))
+                x, y = circle_points(center, mintic_radius, ratio=window_aspect)
+                mag_mintics.x.append(x)
+                mag_mintics.y.append(y)
+            canvas.plot(mag_mintics, **plot_kwargs)
+            del vcs.elements["line"][mag_mintics.name]
+
+        if self.xticlabels1 == "*":
+            if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
+                tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4))
+                tick_labels = {t: str(t) for t in tick_thetas}
+            else:
+                d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count)
+                tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta)
+                tick_labels = vcs.mklabels(tick_thetas)
+        else:
+            tick_thetas = self.xticlabels1.keys()
+            tick_labels = self.xticlabels1
+
         if template.xtic1.priority > 0:
             t_ticks = vcs.createline(source=template.xtic1.line)
             t_ticks.x = []
             t_ticks.y = []
 
-            if self.xticlabels1 == "*":
-                if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20):
-                    tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4))
-                    tick_labels = {t: str(t) for t in tick_thetas}
-                else:
-                    d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count)
-                    tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta)
-                    tick_labels = vcs.mklabels(tick_thetas)
-            else:
-                tick_thetas = self.xticlabels1.keys()
-                tick_labels = self.xticlabels1
-
             if template.xlabel1.priority > 0:
                 t_labels = []
                 theta_labels = tick_labels
@@ -364,8 +410,7 @@ class Gpo(vcsaddons.core.VCSaddon):
                 y0 = center[1] + (ymul * radius * numpy.sin(angle))
                 y1 = center[1]
                 if t_labels is not None:
-                    label = vcs.createtext(Tt_source=template.xlabel1.texttable,
-                                           To_source=text_orientation_for_angle(angle, source=template.xlabel1.textorientation))
+                    label = self.create_text(template.xlabel1.texttable, self.text_orientation_for_angle(angle, source=template.xlabel1.textorientation))
                     label.string = [theta_labels[t]]
                     label.x = [x0]
                     label.y = [y0]
@@ -384,17 +429,65 @@ class Gpo(vcsaddons.core.VCSaddon):
         values.size = self.markersizes
         values.color = self.markercolors
         values.colormap = self.colormap
+        values.priority = self.markerpriority
         values.x = []
         values.y = []
 
         if template.legend.priority > 0:
             # Only labels that are set will show up in the legend
             label_count = len(names) - len([i for i in names if i is None])
-            labels = vcs.createtext(Tt_source=template.legend.texttable, To_source=template.legend.textorientation)
+            labels = self.create_text(template.legend.texttable, template.legend.textorientation)
             labels.x = []
             labels.y = []
             labels.string = []
 
+        if self.draw_lines:
+            line = vcs.createline()
+            line.x = []
+            line.y = []
+            line.type = self.lines
+            line.color = self.linecolors if self.linecolors is not None else self.markercolors
+            line.width = self.linewidths
+            line.priority = self.linepriority
+
+            # This is up here because when it's part of the main loop, we can lose "order" of points when we flatten them.
+            for mag, theta in zip(magnitudes, thetas):
+                x = []
+                y = []
+
+                for m, t in zip(mag, theta):
+                    t = self.theta_from_value(t)
+                    r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius
+                    x.append(xmul * numpy.cos(t) * r + center[0])
+                    y.append(ymul * numpy.sin(t) * r + center[1])
+
+                if self.connect_groups:
+                    line.x.extend(x)
+                    line.y.extend(y)
+                else:
+                    line.x.append(x)
+                    line.y.append(y)
+
+        if self.markercolorsource.lower() in ('magnitude', "theta"):
+            # Regroup the values using the appropriate metric
+
+            mag_flat = numpy.array(magnitudes).flatten()
+            theta_flat = numpy.array(thetas).flatten()
+
+            if self.markercolorsource.lower() == "magnitude":
+                scale = m_scale
+                vals = mag_flat
+            else:
+                scale = theta_ticks
+                vals = theta_flat
+
+            indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1])) for i in range(len(scale) - 1)]
+            magnitudes = [mag_flat[inds] for inds in indices]
+            thetas = [theta_flat[inds] for inds in indices]
+            names = vcs.mklabels(scale, output="list")
+            names = [names[i] + " - " + names[i + 1] for i in range(len(names) - 1)]
+            label_count = len(names)
+
         for mag, theta, name in zip(magnitudes, thetas, names):
             x = []
             y = []
@@ -410,13 +503,44 @@ class Gpo(vcsaddons.core.VCSaddon):
                 y.append(ly)
                 labels.x.append(lx + .01)
                 labels.y.append(ly)
-                labels.string.append(name)
+                labels.string.append(str(name))
             values.x.append(x)
             values.y.append(y)
 
         if template.legend.priority > 0:
             canvas.plot(labels, **plot_kwargs)
             del vcs.elements["textcombined"][labels.name]
+        if self.draw_lines:
+            canvas.plot(line, **plot_kwargs)
+            del vcs.elements["line"][line.name]
+
+        for el in self.to_cleanup:
+            if vcs.istexttable(el):
+                if el.name in vcs.elements["texttable"]:
+                    del vcs.elements["texttable"][el.name]
+            else:
+                if el.name in vcs.elements["textorientation"]:
+                    del vcs.elements["textorientation"][el.name]
+        self.to_cleanup = []
+
+        # Prune unneeded levels from values
+        to_prune = []
+        for ind, (x, y) in enumerate(zip(values.x, values.y)):
+            if x and y:
+                continue
+            else:
+                to_prune.append(ind)
+
+        for prune_ind in to_prune[::-1]:
+            del values.x[prune_ind]
+            del values.y[prune_ind]
+            if len(values.color) > prune_ind and len(values.color) > 1:
+                del values.color[prune_ind]
+            if len(values.size) > prune_ind and len(values.size) > 1:
+                del values.size[prune_ind]
+            if len(values.type) > prune_ind and len(values.type) > 1:
+                del values.type[prune_ind]
+
         canvas.plot(values, bg=bg, donotstoredisplay=True)
         del vcs.elements["marker"][values.name]
         return canvas
-- 
GitLab


From 9196c2c16a5c73c11ebfc3c8951b9098919a9f42 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Mon, 23 May 2016 13:59:03 -0400
Subject: [PATCH 122/196] BUG #1985: orthographic projection plot is empty

This is because proj4 sets points that are not visisble to infinity.
We set those points to 0 and hide them.
---
 Packages/vcs/vcs/Canvas.py                    |   8 +-
 Packages/vcs/vcs/VTKPlots.py                  |   4 +-
 Packages/vcs/vcs/projection.py                |  12 +-
 Packages/vcs/vcs/vcs2vtk.py                   | 124 +++++++++++++-----
 Packages/vcs/vcs/vcsvtk/boxfillpipeline.py    |   3 +-
 Packages/vcs/vcs/vcsvtk/isofillpipeline.py    |   3 +-
 Packages/vcs/vcs/vcsvtk/isolinepipeline.py    |   3 +-
 Packages/vcs/vcs/vcsvtk/meshfillpipeline.py   |   3 +-
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py     |   3 +-
 testing/vcs/CMakeLists.txt                    |  25 +++-
 testing/vcs/test_vcs_boxfill_orthographic.py  |  21 +++
 ...olar.py => test_vcs_boxfill_projection.py} |  10 +-
 12 files changed, 160 insertions(+), 59 deletions(-)
 create mode 100644 testing/vcs/test_vcs_boxfill_orthographic.py
 rename testing/vcs/{test_vcs_boxfill_polar.py => test_vcs_boxfill_projection.py} (64%)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 49361248c..e95609e3e 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -71,7 +71,7 @@ canvas_closed = 0
 import vcsaddons  # noqa
 import vcs.manageElements  # noqa
 import configurator  # noqa
-from projection import round_projections  # noqa
+from projection import no_deformation_projections  # noqa
 
 # Python < 3 DeprecationWarning ignored by default
 warnings.simplefilter('default')
@@ -3497,7 +3497,7 @@ Options:::
                 if hasattr(gm, "priority") and gm.priority == 0:
                     return
             p = self.getprojection(gm.projection)
-            if p.type in round_projections and (
+            if p.type in no_deformation_projections and (
                     doratio == "0" or doratio[:4] == "auto"):
                 doratio = "1t"
             for keyarg in keyargs.keys():
@@ -3554,7 +3554,7 @@ Options:::
                 t.data.y2 = p.viewport[3]
 
                 proj = self.getprojection(p.projection)
-                if proj.type in round_projections and (
+                if proj.type in no_deformation_projections and (
                         doratio == "0" or doratio[:4] == "auto"):
                     doratio = "1t"
 
@@ -3610,7 +3610,7 @@ Options:::
                         tp = "textcombined"
                     gm = vcs.elements[tp][arglist[4]]
                 p = self.getprojection(gm.projection)
-                if p.type in round_projections:
+                if p.type in no_deformation_projections:
                     doratio = "1t"
                 if p.type == 'linear':
                     if gm.g_name == 'Gfm':
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 814719536..9d3d85c74 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -597,6 +597,7 @@ class VTKVCSBackend(object):
 
         vtk_backend_grid = kargs.get("vtk_backend_grid", None)
         vtk_backend_geo = kargs.get("vtk_backend_geo", None)
+        bounds = vtk_backend_grid.GetBounds() if vtk_backend_grid else None
 
         pipeline = vcsvtk.createPipeline(gm, self)
         if pipeline is not None:
@@ -626,7 +627,7 @@ class VTKVCSBackend(object):
                     ren,
                     to=to,
                     tt=tt,
-                    cmap=self.canvas.colormap)
+                    cmap=self.canvas.colormap, geoBounds=bounds, geo=vtk_backend_geo)
                 self.setLayer(ren, tt.priority)
                 self.text_renderers[tt_key] = ren
         elif gtype == "line":
@@ -635,7 +636,6 @@ class VTKVCSBackend(object):
                                           cmap=self.canvas.colormap)
                 returned["vtk_backend_line_actors"] = actors
                 create_renderer = True
-                bounds = vtk_backend_grid.GetBounds() if vtk_backend_grid else None
                 for act, geo in actors:
                     ren = self.fitToViewport(
                         act,
diff --git a/Packages/vcs/vcs/projection.py b/Packages/vcs/vcs/projection.py
index a8476a989..6b19c5e62 100644
--- a/Packages/vcs/vcs/projection.py
+++ b/Packages/vcs/vcs/projection.py
@@ -16,13 +16,15 @@ import VCS_validation_functions
 import vcs
 import copy
 
-# projection that seems to be doing a circle
-# We will probably to add some more in it as we find more that fit this
-round_projections = ['polar (non gctp)', 'stereographic',
-                     'orthographic', "ortho", ]
+# used to decide if we show longitude labels for round projections or
+# latitude labels for elliptical projections
+round_projections = ['polar (non gctp)', 'stereographic']
+elliptical_projections = ["robinson", "mollweide", 'orthographic', "ortho"]
+# projections in this list are not deformed based on the window size
+no_deformation_projections = ['polar (non gctp)', 'stereographic',
+                              'orthographic', "ortho", ]
 
 no_over_proj4_parameter_projections = round_projections+["aeqd", "lambert conformal c"]
-elliptical_projections = ["robinson", "mollweide"]
 
 
 def process_src(nm, code):
diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index 81142492c..27fe8f16b 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -4,12 +4,14 @@ import vtk
 import numpy
 import json
 import os
+import math
 import meshfill
 from vtk.util import numpy_support as VN
 import cdms2
 import warnings
 from projection import round_projections, no_over_proj4_parameter_projections
 from vcsvtk import fillareautils
+import sys
 import numbers
 
 f = open(os.path.join(vcs.prefix, "share", "vcs", "wmo_symbols.json"))
@@ -220,6 +222,34 @@ def getBoundsList(axis, hasCellData, dualGrid):
         return None
 
 
+def setInfToValid(geoPoints, ghost):
+    '''
+    Set infinity points to a point that already exists in the list.
+    If a ghost array is passed, we also hide infinity points.
+    We return true if any points are infinity
+    '''
+    anyInfinity = False
+    validPoint = [0, 0, 0]
+    for i in range(geoPoints.GetNumberOfPoints()):
+        point = geoPoints.GetPoint(i)
+        if (not math.isinf(point[0]) and not math.isinf(point[1])):
+            validPoint[0] = point[0]
+            validPoint[1] = point[1]
+            break
+    for i in range(geoPoints.GetNumberOfPoints()):
+        point = geoPoints.GetPoint(i)
+        if (math.isinf(point[0]) or math.isinf(point[1])):
+            anyInfinity = True
+            newPoint = list(point)
+            if (math.isinf(point[0])):
+                newPoint[0] = validPoint[0]
+            if (math.isinf(point[1])):
+                newPoint[1] = validPoint[1]
+            geoPoints.SetPoint(i, newPoint)
+            ghost.SetValue(i, vtk.vtkDataSetAttributes.HIDDENPOINT)
+    return anyInfinity
+
+
 def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
             dualGrid=False):
     continents = False
@@ -444,6 +474,25 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
                                                data1.getAxis(-2))
         geo, geopts = project(pts, projection, getWrappedBounds(
             wc, [xm, xM, ym, yM], wrap))
+        # proj4 returns inf for points that are not visible. Set those to a valid point
+        # and hide them.
+        ghost = vg.AllocatePointGhostArray()
+        if (setInfToValid(geopts, ghost)):
+            # if there are hidden points, we recompute the bounds
+            xm = ym = sys.float_info.max
+            xM = yM = - sys.float_info.max
+            for i in range(pts.GetNumberOfPoints()):
+                if (ghost.GetValue(i) & vtk.vtkDataSetAttributes.HIDDENPOINT == 0):
+                    # point not hidden
+                    p = pts.GetPoint(i)
+                    if (p[0] < xm):
+                        xm = p[0]
+                    if (p[0] > xM):
+                        xM = p[0]
+                    if (p[1] < ym):
+                        ym = p[1]
+                    if (p[1] > yM):
+                        yM = p[1]
         # Sets the vertics into the grid
         vg.SetPoints(geopts)
     else:
@@ -557,24 +606,42 @@ def apply_proj_parameters(pd, projection, x1, x2, y1, y2):
         else:
             pd.SetOptionalParameter("over", "false")
             setProjectionParameters(pd, projection)
-        if (hasattr(projection, 'centralmeridian') and
-                numpy.allclose(projection.centralmeridian, 1e+20)):
-            pd.SetCentralMeridian(float(x1 + x2) / 2.0)
-        if (hasattr(projection, 'centerlongitude') and
-                numpy.allclose(projection.centerlongitude, 1e+20)):
-            pd.SetOptionalParameter("lon_0", str(float(x1 + x2) / 2.0))
-        if (hasattr(projection, 'originlatitude') and
-                numpy.allclose(projection.originlatitude, 1e+20)):
-            pd.SetOptionalParameter("lat_0", str(float(y1 + y2) / 2.0))
-        if (hasattr(projection, 'centerlatitude') and
-                numpy.allclose(projection.centerlatitude, 1e+20)):
-            pd.SetOptionalParameter("lat_0", str(float(y1 + y2) / 2.0))
-        if (hasattr(projection, 'standardparallel1') and
-                numpy.allclose(projection.standardparallel1, 1.e20)):
-            pd.SetOptionalParameter('lat_1', str(min(y1, y2)))
-        if (hasattr(projection, 'standardparallel2') and
-                numpy.allclose(projection.standardparallel2, 1.e20)):
-            pd.SetOptionalParameter('lat_2', str(max(y1, y2)))
+        if (hasattr(projection, 'centralmeridian')):
+            if (numpy.allclose(projection.centralmeridian, 1e+20)):
+                centralmeridian = float(x1 + x2) / 2.0
+            else:
+                centralmeridian = projection.centralmeridian
+            pd.SetCentralMeridian(centralmeridian)
+        if (hasattr(projection, 'centerlongitude')):
+            if (numpy.allclose(projection.centerlongitude, 1e+20)):
+                centerlongitude = float(x1 + x2) / 2.0
+            else:
+                centerlongitude = projection.centerlongitude
+            pd.SetOptionalParameter("lon_0", str(centerlongitude))
+        if (hasattr(projection, 'originlatitude')):
+            if (numpy.allclose(projection.originlatitude, 1e+20)):
+                originlatitude = float(y1 + y2) / 2.0
+            else:
+                originlatitude = projection.originlatitude
+            pd.SetOptionalParameter("lat_0", str(originlatitude))
+        if (hasattr(projection, 'centerlatitude')):
+            if (numpy.allclose(projection.centerlatitude, 1e+20)):
+                centerlatitude = float(y1 + y2) / 2.0
+            else:
+                centerlatitude = projection.centerlatitude
+            pd.SetOptionalParameter("lat_0", str(centerlatitude))
+        if (hasattr(projection, 'standardparallel1')):
+            if (numpy.allclose(projection.standardparallel1, 1.e20)):
+                standardparallel1 = min(y1, y2)
+            else:
+                standardparallel1 = projection.standardparallel1
+            pd.SetOptionalParameter('lat_1', str(standardparallel1))
+        if (hasattr(projection, 'standardparallel2')):
+            if (numpy.allclose(projection.standardparallel2, 1.e20)):
+                standardparallel2 = max(y1, y2)
+            else:
+                standardparallel2 = projection.standardparallel2
+            pd.SetOptionalParameter('lat_2', str(standardparallel2))
 
 
 def projectArray(w, projection, wc, geo=None):
@@ -1072,7 +1139,7 @@ def prepTextProperty(p, winSize, to="default", tt="default", cmap=None,
 
 
 def genTextActor(renderer, string=None, x=None, y=None,
-                 to='default', tt='default', cmap=None):
+                 to='default', tt='default', cmap=None, geoBounds=None, geo=None):
     if isinstance(to, str):
         to = vcs.elements["textorientation"][to]
     if isinstance(tt, str):
@@ -1096,21 +1163,8 @@ def genTextActor(renderer, string=None, x=None, y=None,
     sz = renderer.GetRenderWindow().GetSize()
     actors = []
     pts = vtk.vtkPoints()
-    geo = None
     if vcs.elements["projection"][tt.projection].type != "linear":
-            # Need to figure out new WC
-        Npts = 20
-        for i in range(Npts + 1):
-            X = tt.worldcoordinate[
-                0] + float(i) / Npts * (tt.worldcoordinate[1] -
-                                        tt.worldcoordinate[0])
-            for j in range(Npts + 1):
-                Y = tt.worldcoordinate[
-                    2] + float(j) / Npts * (tt.worldcoordinate[3] -
-                                            tt.worldcoordinate[2])
-                pts.InsertNextPoint(X, Y, 0.)
-        geo, pts = project(pts, tt.projection, tt.worldcoordinate, geo=None)
-        wc = pts.GetBounds()[:4]
+        wc = geoBounds[:4]
         # renderer.SetViewport(tt.viewport[0],tt.viewport[2],tt.viewport[1],tt.viewport[3])
         renderer.SetWorldPoint(wc)
 
@@ -1120,8 +1174,8 @@ def genTextActor(renderer, string=None, x=None, y=None,
         prepTextProperty(p, sz, to, tt, cmap)
         pts = vtk.vtkPoints()
         pts.InsertNextPoint(x[i], y[i], 0.)
-        if geo is not None:
-            geo, pts = project(pts, tt.projection, tt.worldcoordinate, geo=geo)
+        if vcs.elements["projection"][tt.projection].type != "linear":
+            _, pts = project(pts, tt.projection, tt.worldcoordinate, geo=geo)
             X, Y, tz = pts.GetPoint(0)
             X, Y = world2Renderer(renderer, X, Y, tt.viewport, wc)
         else:
diff --git a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
index f2a3ea602..005241b4a 100644
--- a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
@@ -152,7 +152,8 @@ class BoxfillPipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index 55098f9e5..887c6158c 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -176,7 +176,8 @@ class IsofillPipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index 3406824f0..4cc1519a0 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -311,7 +311,8 @@ class IsolinePipeline(Pipeline2D):
             z = None
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
diff --git a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
index 64a95c4e3..49320aff9 100644
--- a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
@@ -210,7 +210,8 @@ class MeshfillPipeline(Pipeline2D):
         self._resultDict["vtk_backend_actors"] = actors
         kwargs = {"vtk_backend_grid": self._vtkDataSet,
                   "dataset_bounds": self._vtkDataSetBounds,
-                  "plotting_dataset_bounds": plotting_dataset_bounds}
+                  "plotting_dataset_bounds": plotting_dataset_bounds,
+                  "vtk_backend_geo": self._vtkGeoTransform}
         if ("ratio_autot_viewport" in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._template.plot(self._context().canvas, self._data1, self._gm,
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index 642884bc6..c471a6fa9 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -216,7 +216,8 @@ class VectorPipeline(Pipeline2D):
             create_renderer=True)
         kwargs = {'vtk_backend_grid': self._vtkDataSet,
                   'dataset_bounds': self._vtkDataSetBounds,
-                  'plotting_dataset_bounds': plotting_dataset_bounds}
+                  'plotting_dataset_bounds': plotting_dataset_bounds,
+                  'vtk_backend_geo': self._vtkGeoTransform}
         if ('ratio_autot_viewport' in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
         self._resultDict.update(self._context().renderTemplate(
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index bee8b9a45..b1e6247e7 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -11,11 +11,26 @@ cdat_add_test(test_vcs_bad_png_path
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_png_path.py
 )
-cdat_add_test(test_vcs_boxfill_polar
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_polar.py
-  "${BASELINE_DIR}/test_vcs_boxfill_polar.png"
-  )
+
+foreach(projection polar mollweide lambert orthographic mercator polyconic robinson)
+  cdat_add_test(test_vcs_boxfill_${projection}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_projection.py
+    "${BASELINE_DIR}/test_vcs_boxfill_${projection}.png"
+    ${projection}
+    )
+endforeach()
+
+foreach(lat_0 45 90)
+  cdat_add_test(test_vcs_boxfill_orthographic_${lat_0}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_orthographic.py
+    "${BASELINE_DIR}/test_vcs_boxfill_orthographic_${lat_0}.png"
+    ${lat_0}
+    )
+endforeach()
+
+
 cdat_add_test(test_vcs_create_get
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_create_get.py
diff --git a/testing/vcs/test_vcs_boxfill_orthographic.py b/testing/vcs/test_vcs_boxfill_orthographic.py
new file mode 100644
index 000000000..b0ebbb3a0
--- /dev/null
+++ b/testing/vcs/test_vcs_boxfill_orthographic.py
@@ -0,0 +1,21 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+baselineName = sys.argv[1]
+centerlatitude = float(sys.argv[2])
+
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+a = f("clt")
+
+x = regression.init()
+p = x.getprojection('orthographic')
+p.centerlatitude = centerlatitude
+b = x.createboxfill()
+b.projection = p
+x.plot(a(latitude=(90,-90)), b, bg=1)
+
+fileName = os.path.basename(baselineName)
+fileName = os.path.splitext(fileName)[0]
+fileName += '.png'
+
+regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_polar.py b/testing/vcs/test_vcs_boxfill_projection.py
similarity index 64%
rename from testing/vcs/test_vcs_boxfill_polar.py
rename to testing/vcs/test_vcs_boxfill_projection.py
index 869d09802..6f319efd4 100644
--- a/testing/vcs/test_vcs_boxfill_polar.py
+++ b/testing/vcs/test_vcs_boxfill_projection.py
@@ -1,16 +1,20 @@
 import os, sys, cdms2, vcs, testing.regression as regression
 
+baselineName = sys.argv[1]
+projection = sys.argv[2]
+
 
 f = cdms2.open(vcs.sample_data + "/clt.nc")
 a = f("clt")
 
 x = regression.init()
-p = x.getprojection("polar")
+p = x.getprojection(projection)
 b = x.createboxfill()
 b.projection = p
 x.plot(a(latitude=(90,-90)), b, bg=1)
 
-fileName = os.path.basename(__file__)
+fileName = os.path.basename(baselineName)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-regression.run(x, fileName)
\ No newline at end of file
+
+regression.run(x, fileName)
-- 
GitLab


From e489972640e0d824181550c208c82f2fcac1fffa Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Tue, 24 May 2016 17:44:23 -0400
Subject: [PATCH 123/196] Remove code that results in non-deterministic
 behavior for test_vcs_boxfill_mercator

We deal with points set to infinity by setting them to 0 and hidding them.
See the following commit: BUG: orthographic projection plot is empty
---
 Packages/vcs/vcs/vcs2vtk.py | 21 ---------------------
 1 file changed, 21 deletions(-)

diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index 27fe8f16b..686b1a677 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -165,23 +165,6 @@ def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
     return mapper
 
 
-def handleProjectionEdgeCases(projection, data):
-    # For mercator projection, latitude values of -90 or 90
-    # transformation result in infinity values. We chose -85, 85
-    # as that's the typical limit used by the community.
-    ptype = projDict.get(projection._type, projection.type)
-    if (ptype.lower() == "merc"):
-        lat = data.getLatitude()
-        if isinstance(lat, cdms2.axis.TransientAxis):
-            lat = lat[:]
-            # Reverse the latitudes incase the starting latitude is greater
-            # than the ending one
-            if lat[-1] < lat[0]:
-                lat = lat[::-1]
-        data = data(latitude=(max(-85, lat.min()), min(85, lat.max())))
-    return data
-
-
 def getBoundsList(axis, hasCellData, dualGrid):
     '''
     Returns the bounds list for 'axis'. If axis has n elements the
@@ -260,10 +243,6 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
     xm, xM, ym, yM = None, None, None, None
     projection = vcs.elements["projection"][gm.projection]
 
-    data1 = handleProjectionEdgeCases(projection, data1)
-    if data2 is not None:
-        data2 = handleProjectionEdgeCases(projection, data2)
-
     try:  # First try to see if we can get a mesh out of this
         g = data1.getGrid()
         # Ok need unstructured grid
-- 
GitLab


From b95883e9a640c83ac1a52b156238a72fa493fc8a Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 31 May 2016 12:03:05 -0700
Subject: [PATCH 124/196] Renamed to match scheme

---
 testing/vcsaddons/CMakeLists.txt              | 38 +++++++++----------
 ...s.py => test_vcs_addons_convert_arrays.py} |  0
 ... => test_vcs_addons_histogram_defaults.py} |  0
 ...y => test_vcs_addons_histogram_inherit.py} |  0
 ...test_polar.py => test_vcs_addons_polar.py} |  0
 ...ual.py => test_vcs_addons_polar_annual.py} |  0
 ...es.py => test_vcs_addons_polar_degrees.py} |  0
 ...al.py => test_vcs_addons_polar_diurnal.py} |  0
 ...it.py => test_vcs_addons_polar_inherit.py} |  0
 ...l.py => test_vcs_addons_polar_seasonal.py} |  0
 ...y => test_vcs_addons_polar_semidiurnal.py} |  0
 11 files changed, 19 insertions(+), 19 deletions(-)
 rename testing/vcsaddons/{vcs_addons_test_convert_arrays.py => test_vcs_addons_convert_arrays.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_histogram_defaults.py => test_vcs_addons_histogram_defaults.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_histogram_inherit.py => test_vcs_addons_histogram_inherit.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar.py => test_vcs_addons_polar.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_annual.py => test_vcs_addons_polar_annual.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_degrees.py => test_vcs_addons_polar_degrees.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_diurnal.py => test_vcs_addons_polar_diurnal.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_inherit.py => test_vcs_addons_polar_inherit.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_seasonal.py => test_vcs_addons_polar_seasonal.py} (100%)
 rename testing/vcsaddons/{vcs_addons_test_polar_semidiurnal.py => test_vcs_addons_polar_semidiurnal.py} (100%)

diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt
index a0fd4e90b..d6b382faf 100644
--- a/testing/vcsaddons/CMakeLists.txt
+++ b/testing/vcsaddons/CMakeLists.txt
@@ -37,52 +37,52 @@ cdat_add_test(vcs_addons_test_EzTemplate_12_plots_spacing
 )
 cdat_add_test(vcs_addons_test_histogram_defaults
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_defaults.py
-  ${BASELINE_DIR}/vcs_addons_test_histogram_defaults.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
+  ${BASELINE_DIR}/test_vcs_addons_histogram_defaults.png
 )
 cdat_add_test(vcs_addons_test_histogram_inherit
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_inherit.py
-  ${BASELINE_DIR}/vcs_addons_test_histogram_inherit.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
+  ${BASELINE_DIR}/test_vcs_addons_histogram_inherit.png
 )
 cdat_add_test(vcs_addons_test_polar
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar.py
-  ${BASELINE_DIR}/vcs_addons_test_polar.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar.py
+  ${BASELINE_DIR}/test_vcs_addons_polar.png
 )
 cdat_add_test(vcs_addons_test_polar_inherit
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_inherit.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_inherit.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_inherit.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_inherit.png
 )
 cdat_add_test(vcs_addons_test_convert_arrays
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_convert_arrays.py
 )
 cdat_add_test(vcs_addons_test_polar_degrees
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_degrees.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_degrees.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_degrees.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_degrees.png
 )
 cdat_add_test(vcs_addons_test_polar_annual
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_annual.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_annual.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_annual.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_annual.png
 )
 cdat_add_test(vcs_addons_test_polar_diurnal
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_diurnal.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_diurnal.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_diurnal.png
 )
 cdat_add_test(vcs_addons_test_polar_seasonal
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_seasonal.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_seasonal.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_seasonal.png
 )
 cdat_add_test(vcs_addons_test_polar_semidiurnal
   "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py
-  ${BASELINE_DIR}/vcs_addons_test_polar_semidiurnal.png
+  ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
+  ${BASELINE_DIR}/test_vcs_addons_polar_semidiurnal.png
 )
 
 if (CDAT_DOWNLOAD_SAMPLE_DATA)
diff --git a/testing/vcsaddons/vcs_addons_test_convert_arrays.py b/testing/vcsaddons/test_vcs_addons_convert_arrays.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_convert_arrays.py
rename to testing/vcsaddons/test_vcs_addons_convert_arrays.py
diff --git a/testing/vcsaddons/vcs_addons_test_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_histogram_defaults.py
rename to testing/vcsaddons/test_vcs_addons_histogram_defaults.py
diff --git a/testing/vcsaddons/vcs_addons_test_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_histogram_inherit.py
rename to testing/vcsaddons/test_vcs_addons_histogram_inherit.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar.py
rename to testing/vcsaddons/test_vcs_addons_polar.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_annual.py b/testing/vcsaddons/test_vcs_addons_polar_annual.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_annual.py
rename to testing/vcsaddons/test_vcs_addons_polar_annual.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_degrees.py
rename to testing/vcsaddons/test_vcs_addons_polar_degrees.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_diurnal.py
rename to testing/vcsaddons/test_vcs_addons_polar_diurnal.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_inherit.py
rename to testing/vcsaddons/test_vcs_addons_polar_inherit.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_seasonal.py
rename to testing/vcsaddons/test_vcs_addons_polar_seasonal.py
diff --git a/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
similarity index 100%
rename from testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py
rename to testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
-- 
GitLab


From 6f7c191de90aa38506a78a28f4447dcbe19d7a88 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Tue, 31 May 2016 12:03:38 -0700
Subject: [PATCH 125/196] Converted to using new testing.regression module

---
 .../test_vcs_addons_histogram_defaults.py        | 13 ++++---------
 .../test_vcs_addons_histogram_inherit.py         | 14 +++++---------
 testing/vcsaddons/test_vcs_addons_polar.py       | 13 ++++---------
 .../vcsaddons/test_vcs_addons_polar_annual.py    | 16 ++++++----------
 .../vcsaddons/test_vcs_addons_polar_degrees.py   | 15 +++++----------
 .../vcsaddons/test_vcs_addons_polar_diurnal.py   | 13 ++++---------
 .../vcsaddons/test_vcs_addons_polar_inherit.py   | 13 ++++---------
 .../vcsaddons/test_vcs_addons_polar_seasonal.py  | 13 ++++---------
 .../test_vcs_addons_polar_semidiurnal.py         | 13 ++++---------
 9 files changed, 40 insertions(+), 83 deletions(-)

diff --git a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
index b2b19e499..24af6757b 100644
--- a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
+++ b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
@@ -1,22 +1,17 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 numpy.random.seed(seed=12345)
 vals = numpy.random.random_sample(2000) * 100
 histo = vcsaddons.histograms.Ghg()
 histo.plot(vals, bg=True, x=x)
 
-fnm = "vcs_addons_test_histogram_defaults.png"
+fnm = "test_vcs_addons_histogram_defaults.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
index 8ce19e0c2..c761c4e05 100644
--- a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
+++ b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
@@ -1,15 +1,11 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs, cdms2
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
+
 cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc")
 clt = cdmsfile("clt")
 
@@ -56,7 +52,7 @@ histo3.datawc_x2 = 100
 histo3.bins = None
 histo3.plot(clt, template="default", bg=True)
 
-fnm = "vcs_addons_test_histogram_inherit.png"
+fnm = "test_vcs_addons_histogram_inherit.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py
index 8a848e7a1..5512d9d52 100644
--- a/testing/vcsaddons/test_vcs_addons_polar.py
+++ b/testing/vcsaddons/test_vcs_addons_polar.py
@@ -1,15 +1,10 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 polar = vcsaddons.polar.Gpo()
 polar.markers = ["dot", "circle"]
@@ -22,7 +17,7 @@ magnitude = list(numpy.sin(theta))
 
 polar.plot(magnitude, theta, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar.png"
+fnm = "test_vcs_addons_polar.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_annual.py b/testing/vcsaddons/test_vcs_addons_polar_annual.py
index 420b724cd..5cea2bfc1 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_annual.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_annual.py
@@ -1,15 +1,11 @@
 import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+src = sys.argv[1]
+
+x = regression.init()
 
 polar = vcsaddons.getpolar("annual_cycle")
 polar.markers = ["dot"]
@@ -34,7 +30,7 @@ theta = range(1, len(clt) + 1)
 magnitude = avg_departures
 polar.plot(magnitude, theta, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_annual.png"
+fnm = "test_vcs_addons_polar_annual.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
index 46d34168a..3727dad14 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_degrees.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
@@ -1,15 +1,10 @@
-import sys,os
+import sys
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 polar = vcsaddons.getpolar("degrees")
 polar.markers = ["dot", "circle"]
@@ -23,7 +18,7 @@ polar.datawc_y1 = 0
 polar.datawc_y2 = max(magnitude)
 polar.plot(magnitude, theta, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_degrees.png"
+fnm = "test_vcs_addons_polar_degrees.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
index ac0664171..927180e38 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
@@ -1,16 +1,11 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 import vcs
 import vcsaddons, numpy
 import cdms2, cdutil, cdtime
+import testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
 temp = f('t')
@@ -38,7 +33,7 @@ polar.magnitude_tick_angle = numpy.pi / 8
 
 polar.plot(magnitudes, thetas, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_diurnal.png"
+fnm = "test_vcs_addons_polar_diurnal.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
index 4eb946359..4fc56138d 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_inherit.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
@@ -1,15 +1,10 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 gm = vcsaddons.polar.Gpo()
 gm.markers = ["dot", "circle"]
@@ -44,7 +39,7 @@ magnitude = [magnitude[:len(magnitude)/ 2], magnitude[len(magnitude) / 2:]]
 
 polar.plot(magnitude, theta, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_inherit.png"
+fnm = "test_vcs_addons_polar_inherit.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
index 42612ddae..6a6eafd9b 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
@@ -1,16 +1,11 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy, MV2
 import cdms2, cdutil, cdtime
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 # Trim first few months and last month so we have even number of seasons
@@ -52,7 +47,7 @@ polar.magnitude_tick_angle = numpy.pi / 4
 
 polar.plot(magnitudes, thetas, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_seasonal.png"
+fnm = "test_vcs_addons_polar_seasonal.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
index 900d570b4..3061e8a0d 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
@@ -1,16 +1,11 @@
 import sys,os
 src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 import cdms2, cdutil, cdtime
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
 temp = f('t')
@@ -38,7 +33,7 @@ polar.magnitude_tick_angle = numpy.pi / 8
 
 polar.plot(magnitudes, thetas, bg=True, x=x)
 
-fnm = "vcs_addons_test_polar_semidiurnal.png"
+fnm = "test_vcs_addons_polar_semidiurnal.png"
 x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src)
 sys.exit(ret)
-- 
GitLab


From 34352f73e172b596e2f0e243561e61bb5c63868a Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Tue, 31 May 2016 15:29:10 -0400
Subject: [PATCH 126/196] Fix regression for test_vcs_png_window_resize

The regression is from a7f5b860ff1408282431b70890044f956396c320
---
 testing/vcs/test_vcs_png_window_resize.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/testing/vcs/test_vcs_png_window_resize.py b/testing/vcs/test_vcs_png_window_resize.py
index a6346ca28..2adc55d15 100644
--- a/testing/vcs/test_vcs_png_window_resize.py
+++ b/testing/vcs/test_vcs_png_window_resize.py
@@ -1,9 +1,9 @@
 import vcs, sys, os, testing.regression as regression
 
-x = regression.init()
+x = regression.init(bg=0)
 x.setantialiasing(0)
 x.drawlogooff()
 x.open(814,628)
 x.plot([1,2,3,4,5,6,7])
 fnm = __file__[:-3]+".png"
-regression.run(x, fnm)
\ No newline at end of file
+regression.run(x, fnm)
-- 
GitLab


From b8882669bc2758923350695eae0905aba9b27d4f Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Tue, 31 May 2016 16:20:53 -0400
Subject: [PATCH 127/196] BUG: Use the geometry argument, if available, for
 background tests

---
 Packages/testing/regression.py | 9 ++++++---
 1 file changed, 6 insertions(+), 3 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 25255fdc0..6b1b2bf9b 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -20,10 +20,13 @@ def init(*args, **kwargs):
     testingDir = os.path.join(os.path.dirname(__file__), "..")
     sys.path.append(testingDir)
 
-    if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs)) and
-        ('geometry' not in kwargs)):
+    if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs))):
         vcsinst = vcs.init(*args, **dict(kwargs, bg=1))
-        vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
+        if ('geometry' not in kwargs):
+            vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
+        else:
+            xy = kwargs['geometry']
+            vcsinst.setbgoutputdimensions(xy[0], xy[1], units="pixels")
     else:
         vcsinst = vcs.init(*args, **dict(kwargs, bg=0))
 
-- 
GitLab


From e5ee0a05e23004734bfde10337da204479945ab2 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 1 Jun 2016 12:00:22 -0700
Subject: [PATCH 128/196] Fixes issues with tests for DV3D, makes DV3D obey VCS
 create/source semantics

---
 Packages/DV3D/ConfigurationFunctions.py |  3 +--
 Packages/vcs/vcs/dv3d.py                | 35 +++++++++++++++++++------
 testing/dv3d/TestManager.py             |  5 ++--
 3 files changed, 30 insertions(+), 13 deletions(-)

diff --git a/Packages/DV3D/ConfigurationFunctions.py b/Packages/DV3D/ConfigurationFunctions.py
index c3bff88c3..98e1947b7 100644
--- a/Packages/DV3D/ConfigurationFunctions.py
+++ b/Packages/DV3D/ConfigurationFunctions.py
@@ -245,7 +245,7 @@ class ConfigManager:
         if ( self.parent <> None ):
             for parm_address in self.parent.parameters.keys():
                 basename = get_parameter_name( parm_address )
-                self.parameters[basename] = self.getParameter( basename  )
+                self.parameters[basename] = ConfigParameter(basename, parent=self.parent.getParameter(basename))
         self.initialized = False
 
     def clear( self, cell ):
@@ -264,7 +264,6 @@ class ConfigManager:
             if self.parent is None:
                 cparm = ConfigParameter( param_name, **args )
             else:
-#                print "Getting config param from parent: ", param_name
                 cparm_parent = self.parent.getParameter( param_name, cell=self.cell_coordinates )
                 cparm = ConfigParameter( param_name, parent=cparm_parent, **args )
             self.addParam( param_name, cparm )
diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py
index 19a35a808..2afae29f2 100644
--- a/Packages/vcs/vcs/dv3d.py
+++ b/Packages/vcs/vcs/dv3d.py
@@ -126,19 +126,38 @@ class Gfdv3d(object):
         self.projection = 'default'
         self.provenanceHandler = None
 
+        vcs.elements[self.g_name][Gfdv3d_name] = self
+
+        self._axes = "xyz"
+
+        # Use parent config values if possible
+        if isinstance(Gfdv3d_name_src, (unicode, str)):
+            # Make sure we aren't inheriting from ourself
+            if Gfdv3d_name_src != Gfdv3d_name:
+                parent_cfg = vcs.elements[self.g_name][Gfdv3d_name_src].cfgManager
+                self._axes = vcs.elements[self.g_name][Gfdv3d_name_src]._axes
+            else:
+                parent_cfg = None
+        else:
+            # Make sure we aren't inheriting from ourself
+            if Gfdv3d_name_src.name != self.name:
+                parent_cfg = Gfdv3d_name_src.cfgManager
+                self._axes = Gfdv3d_name_src._axes
+            else:
+                parent_cfg = None
+
+        self.cfgManager = ConfigManager(cm=parent_cfg)
+
         if Gfdv3d_name == "Hovmoller3D":
             self._axes = "xyt"
-        else:
-            self._axes = "xyz"
 
-        self.cfgManager = ConfigManager()
         self.ncores = multiprocessing.cpu_count()
+
         self.addParameters()
 
-        vcs.elements[self.g_name][Gfdv3d_name] = self
         self.plot_attributes['name'] = self.g_name
         self.plot_attributes['template'] = Gfdv3d_name
-#        print "Adding VCS element: %s %s " % ( self.g_name, Gfdv3d_name )
+
 
     def setProvenanceHandler(self, provenanceHandler):
         self.provenanceHandler = provenanceHandler
@@ -215,14 +234,14 @@ class Gf3Dvector(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_vector'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
 
 
 class Gf3Dscalar(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_scalar'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
         self.VectorDisplay = Gfdv3d_name
 
 
@@ -230,7 +249,7 @@ class Gf3DDualScalar(Gfdv3d):
 
     def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'):
         self.g_name = '3d_dual_scalar'
-        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default')
+        Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src)
 
 if __name__ == '__main__':
     dv3d = vcs.get3d_scalar()
diff --git a/testing/dv3d/TestManager.py b/testing/dv3d/TestManager.py
index 94e7e365e..5b0aa208e 100644
--- a/testing/dv3d/TestManager.py
+++ b/testing/dv3d/TestManager.py
@@ -106,9 +106,8 @@ class vcsTest:
 
         plot_kwargs = { 'cdmsfile': self.file.id, 'window_size': (900,600) }
         self.canvas.setantialiasing(False)
-        self.canvas.plot( *plot_args, **plot_kwargs )
-        self.plot = self.canvas.backend.plotApps[ self.gm ]
-#        self.applyActions()
+        display = self.canvas.plot( *plot_args, **plot_kwargs )
+        self.plot = self.canvas.backend.plotApps[ vcs.elements[display.g_type][display.g_name] ]
 
     def applyActions(self):
         for action in self.actions:
-- 
GitLab


From f29a6d136b98f4e84bd56eb63af3e6ed26910ba9 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 1 Jun 2016 15:35:53 -0700
Subject: [PATCH 129/196] first pass, bare bones but works for me

---
 CMake/cdat_modules/cdat_deps.cmake            |  17 +-
 CMake/cdat_modules/cdat_external.cmake        |  12 +-
 CMake/cdat_modules/cdat_pkg.cmake             |   4 +-
 .../install_cdat_from_conda.bash              |  16 +
 CMake/cdat_modules_extra/runtest              |   4 +
 CMake/cdat_modules_extra/runtest.in           |   4 -
 CMakeLists.txt                                | 775 +-----------------
 testing/CMakeLists.txt                        |  47 +-
 8 files changed, 33 insertions(+), 846 deletions(-)
 create mode 100755 CMake/cdat_modules_extra/install_cdat_from_conda.bash
 create mode 100755 CMake/cdat_modules_extra/runtest
 delete mode 100755 CMake/cdat_modules_extra/runtest.in

diff --git a/CMake/cdat_modules/cdat_deps.cmake b/CMake/cdat_modules/cdat_deps.cmake
index 70d44f426..dcb9b307a 100644
--- a/CMake/cdat_modules/cdat_deps.cmake
+++ b/CMake/cdat_modules/cdat_deps.cmake
@@ -1,16 +1 @@
-set(CDAT_deps ${wget_pkg} ${python_pkg} ${numpy_pkg}
-              ${libcdms_pkg}
-              ${libcf_pkg} ${netcdf_pkg} ${myproxyclient_pkg} ${udunits2_pkg})
-if (CDAT_BUILD_GRAPHICS)
-  if (CDAT_BUILD_PARAVIEW)
-    list(APPEND CDAT_deps ${paraview_pkg})
-  else()
-    list(APPEND CDAT_deps ${vtk_pkg})
-  endif()
-  list(APPEND CDAT_deps ${ffmpeg_pkg})
-endif()
-
-if (CDAT_BUILD_ESMF)
-    list(APPEND CDAT_deps ${esmf_pkg})
-endif()
-
+set(CDAT_deps)
diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake
index 7b1b53f9b..3a273bcb0 100644
--- a/CMake/cdat_modules/cdat_external.cmake
+++ b/CMake/cdat_modules/cdat_external.cmake
@@ -1,19 +1,11 @@
 set(CDAT_source "${cdat_SOURCE_DIR}")
 
-set(RUNTIME_FLAGS ${cdat_EXTERNALS}/lib)
-set(LDFLAGS -L${cdat_EXTERNALS}/lib)
-
-if (CDAT_BUILD_LIBDRS)
- set(cdat_xtra_flags "${cdat_xtra_flags} --enable-drs")
-endif()
-
-set(cdat_build_dir ${CMAKE_CURRENT_BINARY_DIR}/cdat-build)
-
 set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}")
 configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in
   ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake
   @ONLY)
 
+message("[CDAT BUILD SCRIPT:${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash")
 ExternalProject_Add(CDAT
   DOWNLOAD_DIR ""
   SOURCE_DIR ${cdat_SOURCE_DIR}
@@ -22,7 +14,7 @@ ExternalProject_Add(CDAT
   PATCH_COMMAND ""
   CONFIGURE_COMMAND ""
   BUILD_COMMAND ""
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" ${CMAKE_COMMAND} -DPYTHON_INSTALL_ARGS=${cdat_xtra_flags} -P ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake
+  INSTALL_COMMAND ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash
   DEPENDS ${CDAT_deps}
   ${ep_log_options}
 )
diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake
index 05a66faf5..3997c0d96 100644
--- a/CMake/cdat_modules/cdat_pkg.cmake
+++ b/CMake/cdat_modules/cdat_pkg.cmake
@@ -1,5 +1,5 @@
 set(cdat_VERSION_MAJOR 2)
-set(cdat_VERSION_MINOR 2)
+set(cdat_VERSION_MINOR 6)
 set(cdat_VERSION_PATCH 0)
 set(cdat_VERSION ${cdat_VERSION_MAJOR}.${cdat_VERSION_MINOR}.${cdat_VERSION_PATCH})
 
@@ -30,6 +30,6 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in
   ${cdat_BINARY_DIR}/version
   @ONLY
 )
-
+message("[INFO] ADDIBNG CDAT")
 add_cdat_package(CDAT "" "" ON)
 
diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash b/CMake/cdat_modules_extra/install_cdat_from_conda.bash
new file mode 100755
index 000000000..9d393b5da
--- /dev/null
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+SRCS=`dirname $0`
+conda create -n TEST_UVCDAT -c uvcdat uvcdat
+source activate TEST_UVCDAT
+cd ${SRCS}/../..
+echo "PATH:"`pwd`
+for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
+    cd Packages/${pkg}
+    rm -rf build
+    python setup.py install
+    cd ../..
+done
+
+
+
+
diff --git a/CMake/cdat_modules_extra/runtest b/CMake/cdat_modules_extra/runtest
new file mode 100755
index 000000000..8ca0cbf3b
--- /dev/null
+++ b/CMake/cdat_modules_extra/runtest
@@ -0,0 +1,4 @@
+#!/bin/bash
+source activate TEST _UVCDAT
+echo `which python`
+python $@
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
deleted file mode 100755
index 972a674ad..000000000
--- a/CMake/cdat_modules_extra/runtest.in
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-$@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 32b16d453..982d6d3e7 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,12 +1,4 @@
-#=============================================================================
 cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)
-CMAKE_POLICY(SET CMP0012 NEW)
-
-if ("${CMAKE_VERSION}" VERSION_LESS "2.8.12")
-  message(WARNING "Your CMake version is ${CMAKE_VERSION} which is depreciated for UV-CDAT. The recommended minimum CMake version is 2.8.12. Using older versions can result in build errors particularly with Xcode 5")
-endif()
-
-# Project name and initial checks
 #=============================================================================
 project(cdat)
 
@@ -19,15 +11,6 @@ set(cdat_external_patch_dir ${cdat_SOURCE_DIR}/exsrc)
 
 
 
-if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr/local")
-  get_filename_component(cdat_ROOT_DIR ${cdat_BINARY_DIR} PATH)
-  set(CMAKE_INSTALL_PREFIX ${cdat_BINARY_DIR}/install CACHE STRING "" FORCE)
-endif()
-
-set(cdat_EXTERNALS ${CMAKE_INSTALL_PREFIX}/Externals)
-set(ENV{PATH} "${cdat_EXTERNALS}/bin:$ENV{PATH}")
-message("[INFO] We reset your path to: " $ENV{PATH})
-
 set(CMAKE_MODULE_PATH
   ${cdat_CMAKE_SOURCE_DIR}
   ${cdat_CMAKE_SOURCE_DIR}/cmake_modules
@@ -49,64 +32,12 @@ else()
   set(ENV{UVCDAT_ANONYMOUS_LOG} "no")
 endif()
 
-# Disable in source build of any kind.
-#=============================================================================
-include(CheckBuildOutOfSource)
-check_build_out_of_source("${cdat_SOURCE_DIR}" "${cdat_BINARY_DIR}"
-                          BUILDINSOURCE)
-if(BUILDINSOURCE)
-   set(msg "[ERROR] CDAT requires an out of source Build.")
-   set(msg "${msg}\nRun 'git clean -dfx' to restore source dir.")
-   message(FATAL_ERROR "${msg}")
-endif()
-
 # Include useful cmake scripts
 #=============================================================================
 include(cmake_utils)
 include(check_fortran)
 include(CTest)
 
-# Enable/Disable coverage
-#=============================================================================
-option(CDAT_MEASURE_COVERAGE "Measure test coverage while running tests" OFF)
-
-if(CDAT_MEASURE_COVERAGE)
-  message("Coverage measurement enabled; tests will run slower.")
-  set(COVERAGE_PKGS "cdms2,vcs,cdutil,genutil,DV3D,vcsaddons,vcs.vtk_ui,vcs.editors,vcs.vcsvtk,regrid2")
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/coverage_report.py.in
-    ${CMAKE_INSTALL_PREFIX}/bin/coverage_report
-    @ONLY
-  )
-endif()
-
-# Set up the test data. If UVCDAT_USE_SYSTEM_TESTDATA is ON and UVCDAT_TESTDATA
-# is not set then we won't use it for testing. Otherwise we'll test either
-# with the system test data or download it ourselves.
-#=============================================================================
-if (BUILD_TESTING)
-  set(UVCDAT_USE_SYSTEM_TESTDATA ON CACHE BOOL "Use UV-CDAT's test data from the system")
-  if(UVCDAT_USE_SYSTEM_TESTDATA)
-    set(UVCDAT_TESTDATA "" CACHE PATH "Location of UV-CDAT test data")
-    set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA})
-  else()
-    set(UVCDAT_TestData_GZ boonth-1-22-2013.p94m.tar.gz)
-    set(UVCDAT_TestData_MD5 cf47adb0b6164997fb122ccbc3bd6f92)
-    file(DOWNLOAD ${LLNL_URL}/${UVCDAT_TestData_GZ} ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ}
-      STATUS testdatastatus SHOW_PROGRESS EXPECTED_MD5 ${UVCDAT_TestData_MD5})
-    list(GET testdatastatus 0 actualtestdatastatus)
-    if(actualtestdatastatus)
-      message("[WARNING] Unable to automatically download test data ${testdatastatus}")
-    else()
-      set(UVCDAT_TESTDATA_DIR ${CMAKE_BINARY_DIR}/UVCDAT_TestData)
-      file(MAKE_DIRECTORY ${UVCDAT_TESTDATA_DIR})
-      execute_process(
-        COMMAND ${CMAKE_COMMAND} -E tar xzf ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ}
-        WORKING_DIRECTORY ${UVCDAT_TESTDATA_DIR})
-      set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA_DIR})
-    endif()
-  endif()
-endif()
-
 # Change architecture *before* any enable_language() or project()
 # calls so that it's set properly to detect 64-bit-ness...
 #-----------------------------------------------------------------------------
@@ -143,50 +74,6 @@ if(NOT GIT_PROTOCOL)
   set_property(CACHE GIT_PROTOCOL PROPERTY STRINGS "git://" "http://" "https://")
 endif()
 
-if(GIT_PROTOCOL MATCHES "http://")
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-      COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-    )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-else()
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-  )
-  execute_process(
-      COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
-    )
-  execute_process(
-    COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git
-    WORKING_DIRECTORY ${cdat_SOURCE_DIR}
- )
-endif()
-
 # Checkout the baseline repository.
 #=============================================================================
 if(BUILD_TESTING)
@@ -231,7 +118,7 @@ set(PARTS_BUILT_INFO "${cdat_BINARY_DIR}/build_info.txt" CACHE STRING "File wher
 # files in order to move them (somehow) to the OFFLINE machine where build will happen
 # OFF the machine has no internet access all files are suppposed to be here, pre-downloaded
 
-option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF)
+# option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF)
 option(CDAT_BUILD_PARALLEL "Build parallel components of CDAT" OFF)
 
 # OSMesa/VTK aren't playing nicely on macs. Disabling for now.
@@ -242,85 +129,6 @@ cmake_dependent_option(CDAT_BUILD_OFFSCREEN "Use OSMesa for offscreen rendering.
 # Option to enable vtkweb for cdatweb
 option(CDAT_BUILD_WEB "Build in Web support (VTKWeb, etc.)" OFF)
 
-# Option to enable CMOR
-option(CDAT_BUILD_CMOR "Build CMOR" ON)
-
-# Option to choose between easy_install and pip (crunchy ssl/man in the middle prevents us to use pip here...
-set(EGG_INSTALLER "PIP" CACHE STRING "Which package installer to use")
-set_property(CACHE EGG_INSTALLER PROPERTY STRINGS "PIP" "EASY_INSTALL")
-set(PIP_CERTIFICATE "" CACHE STRING "Certificate to use for PIP (LLNL issue really)")
-
-# Options for various types of builds
-option(CDAT_USE_SYSTEM_PYTHON "Use system Python" OFF)
-
-# Default state
-set(CDAT_BUILD_LEAN OFF)
-set(CDAT_BUILD_ALL OFF)
-
-# Some more options
-option(CDAT_BUILD_GUI "Builds GUI-based dependencies (Vistrails, ParaView, VisIt, R, etc.) " ON)
-option(CDAT_BUILD_GRAPHICS "Build graphics-based dependencies (vcs, pyqt, Vistrails, ParaView, VisIt, R, etc.) " ON)
-option(CDAT_BUILD_ESGF "Alias for CDAT_BUILD_LEAN" OFF)
-option(CDAT_BUILD_UVCMETRICSPKG "Builds uvcmetrics package " ON)
-option(CDAT_BUILD_PARAVIEW "Build ParaView rather than just VTK" OFF)
-option(CDAT_DOWNLOAD_UVCMETRICS_TESTDATA "Download test data uvcmetrics package " ON)
-
-# If ESGF option is on then our build mode is LEAN.
-if (CDAT_BUILD_ESGF)
-  if( (DEFINED CDAT_BUILD_MODE) AND (NOT "${CDAT_BUILD_MODE}" STREQUAL "LEAN") )
-    message(WARNING "[INFO] CDAT_BUILD_ESGF enabled, forcing CDAT_BUILD_MODE to LEAN")
-  endif()
-  set(CDAT_BUILD_MODE "LEAN" CACHE STRING "Build mode for CDAT <ALL, LEAN, DEFAULT>" FORCE)
-  set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-set(CDAT_BUILD_MODE "DEFAULT" CACHE STRING "Build mode for CDAT <ALL, LEAN, DEFAULT>")
-set_property(CACHE CDAT_BUILD_MODE PROPERTY STRINGS "DEFAULT" "ALL" "LEAN")
-message([INFO] BUILD MODE: ${CDAT_BUILD_MODE})
-
-# Set the state of LEAN all based on the MODE
-if (CDAT_BUILD_MODE STREQUAL "LEAN")
-  set(CDAT_BUILD_LEAN ON)
-  set(CDAT_BUILD_ALL OFF)
-elseif (CDAT_BUILD_MODE STREQUAL "ALL")
-  set(CDAT_BUILD_LEAN OFF)
-  set(CDAT_BUILD_ALL ON)
-elseif (CDAT_BUILD_MODE STREQUAL "DEFAULT")
-  set(CDAT_BUILD_LEAN OFF)
-  set(CDAT_BUILD_ALL OFF)
-else()
-  message(FATAL_ERROR "[ERROR] Unknown CDAT_BUILD_MODE \"${CDAT_BUILD_MODE}\" VALID MODES ARE \"DEFAULT\" \"ALL\" \"LEAN\"")
-endif()
-
-# First of all if LEAN then turn OFF GRAPHICS and PARALLEL
-if (CDAT_BUILD_LEAN)
-  set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE OFF)
-  set(CDMS_ONLY --enable-cdms-only)
-else()
-  set(CDMS_ONLY "")
-endif()
-
-# If ALL is enabled then turn ON GUI, GRAPHICS, and PARALLEL
-if (CDAT_BUILD_ALL)
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE ON)
-  set_property(CACHE CDAT_BUILD_ESGF PROPERTY VALUE OFF)
-  set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE ON)
-  set(CDMS_ONLY "")
-endif()
-
-# If no graphics then no gui as well
-if (NOT CDAT_BUILD_GRAPHICS)
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF)
-endif()
-
-# Don't build GUI if this is an offscreen-only build:
-if(CDAT_BUILD_OFFSCREEN AND CDAT_BUILD_GUI)
-  message("[INFO] Turning off CDAT_BUILD_GUI; incompatible with CDAT_BUILD_OFFSCREEN.")
-  set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF)
-endif()
 
 set(ep_prefix ${cdat_BINARY_DIR}/build/prefix)
 set_property(DIRECTORY PROPERTY ep_log_dir ${cdat_BINARY_DIR}/logs)
@@ -418,228 +226,13 @@ endif()
 # when left to create them.
 #=============================================================================
 set(CDAT_PACKAGE_CACHE_DIR
-#  ${CMAKE_CURRENT_BINARY_DIR}/../cdat_dependencies"
   "${CMAKE_CURRENT_BINARY_DIR}"
   CACHE PATH
   "Directory where source tar balls of external dependencies are kept"
 )
 
 include(ExternalProject)
-
-file(MAKE_DIRECTORY ${cdat_EXTERNALS})
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/lib)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/bin)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/include)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/logs)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/build)
-file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/sources)
-
-# Configure cdat command files
-#=============================================================================
-set(cdat_configure_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake)
-set(cdat_make_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake)
-set(cdat_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake)
-
-# Include essential packages
-#=============================================================================
-set(external_packages)
-set(found_system_include_dirs)
-set(found_system_libraries)
-
-include(python_pkg)
-if (APPLE)
-  set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals")
-  set(SB_DIR "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}")
-else()
-  set(SB_DIR "${CMAKE_INSTALL_PREFIX}")
-  # Helper variables to locate programs and libraries
-  set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals")
-endif()
-
-set(SB_LIB_DIR "${SB_DIR}/lib")
-set(SB_BIN_DIR "${SB_DIR}/bin")
-
-include(basemap_pkg)
 include(cdat_pkg)
-include(clapack_pkg)
-#include(curl_pkg)
-include(configobj_pkg)
-include(cycler_pkg)
-include(cython_pkg)
-include(data_pkg)
-include(esmf_pkg)
-include(x264_pkg)
-include(ffmpeg_pkg)
-include(pyflakes_pkg)
-include(pep8_pkg)
-include(mccabe_pkg)
-include(flake8_pkg)
-include(g2clib_pkg)
-include(proj4_pkg)
-include(ocgis_pkg)
-include(cligj_pkg)
-include(click_pkg)
-include(fiona_pkg)
-include(pynetcdf4_pkg)
-include(gdal_pkg)
-include(geos_pkg)
-include(gsw_pkg)
-include(gui_support_pkg)
-include(h5py_pkg)
-include(hdf5_pkg)
-include(zmq_pkg)
-include(pyzmq_pkg)
-include(tornado_pkg)
-include(ipython_pkg)
-include(jasper_pkg)
-include(lapack_pkg)
-include(lepl_pkg)
-include(libcf_pkg)
-include(lats_pkg)
-include(libdrs_pkg)
-include(libdrsfortran_pkg)
-include(ezget_pkg)
-include(cd77_pkg)
-include(matplotlib_pkg)
-include(six_pkg)
-include(openssl_pkg)
-include(cryptography_pkg)
-include(enum34_pkg)
-include(idna_pkg)
-include(pyasn1_pkg)
-include(ipaddress_pkg)
-include(cffi_pkg)
-include(ffi_pkg)
-include(dateutils_pkg)
-include(pyparsing_pkg)
-include(pycparser_pkg)
-include(md5_pkg)
-include(mpi4py_pkg)
-include(pyopenssl_pkg)
-include(setuptools_pkg)
-include(myproxyclient_pkg)
-include(netcdf_pkg)
-include(numexpr_pkg)
-include(numpy_pkg)
-include(mpi_pkg)
-include(osmesa_pkg)
-include(seawater_pkg)
-include(vacumm_pkg)
-if (CDAT_BUILD_PARAVIEW)
-  include(paraview_pkg)
-else()
-  include(vtk_pkg)
-endif()
-include(pkgconfig_pkg)
-include(libcdms_pkg)
-include(sampledata_pkg)
-include(pyspharm_pkg)
-include(pytables_pkg)
-include(readline_pkg)
-include(r_pkg)
-include(rpy2_pkg)
-include(singledispatch_pkg)
-include(scikits_pkg)
-include(scipy_pkg)
-## Part of setuptools no need to extra build it
-## include(distribute_pkg)
-if (NOT CDAT_USE_SYSTEM_PYTHON)
-  include(pip_pkg)
-endif()
-include(shapely_pkg)
-include(pygments_pkg)
-include(markupsafe_pkg)
-include(jinja2_pkg)
-include(docutils_pkg)
-include(sphinx_pkg)
-include(freetype_pkg)
-include(coverage_pkg)
-## C. Doutriaux: We need to replace the following with a findPackage at some point
-if (APPLE)
-else()
-  include(jpeg_pkg)
-  include(pixman_pkg)
-  include(fontconfig_pkg)
-  include(curses_pkg)
-  #include(tiff_pkg)
-  include(netcdfplus_pkg)
-endif()
-#include(geotiff_pkg)
-include(cmor_pkg)
-include(udunits2_pkg)
-include(uuid_pkg)
-# IF we build the UVCDAT Metrics package
-if (CDAT_BUILD_UVCMETRICSPKG)
-  if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA)
-    set(UVCMETRICS_TEST_DATA_DIRECTORY ${CMAKE_INSTALL_PREFIX}/share/uvcmetrics/test_data CACHE PATH "DIR FOR UVCMETRICS TEST DATA" )
-  endif()
-  include(uvcmetrics_pkg)
-endif()
-include(vistrails_pkg)
-#include(yasm_pkg)
-include(pylibxml2_pkg)
-include(cdatlogger_pkg)
-include(pyclimate_pkg)
-include(scientificpython_pkg)
-include(windspharm_pkg)
-include(eof2_pkg)
-include(eofs_pkg)
-include(windfield_pkg)
-if (CDAT_BUILD_ESGF)
-    include(lxml_pkg)
-endif()
-
-if (CDAT_BUILD_GUI)
-  include(qt4_pkg)
-  if (NOT CDAT_USE_SYSTEM_PYTHON)
-    include(sip_pkg)
-    include(pyqt_pkg)
-  endif()
-  include(spyder_pkg)
-endif()
-
-# Configure custom configure/build/install step files
-#=============================================================================
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_common_environment.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_cmake_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_cmake_make_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cleanenv_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdatmpi_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/cdatmpi_configure_step.cmake
-    @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/fix_install_name.py.in
-    ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py
-    @ONLY
-)
-
 # Now sort and include external packages
 #=============================================================================
 include(TopologicalSort)
@@ -670,368 +263,4 @@ foreach(package ${external_packages})
     include("${lc_package}_external")
   endif()
 endforeach()
-file(WRITE ${PARTS_BUILT_INFO} ${packages_info})
-
-# Construct Include and Link variables
-#=============================================================================
-if(found_system_include_dirs)
-  list(REMOVE_DUPLICATES found_system_include_dirs)
-  list(REMOVE_ITEM found_system_include_dirs ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} ${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES})
-  set(cdat_external_include_directories)
-  foreach(include_dir ${found_system_include_dirs})
-    set(cdat_external_include_directories "-I${include_dir} ${cdat_external_include_directories}")
-  endforeach()
-endif()
-message("[INFO] CDAT external include directories: ${cdat_external_include_directories}")
-
-message("[INFO] System libraries: ${found_system_libraries}")
-if(found_system_libraries)
-  list(REMOVE_DUPLICATES found_system_libraries)
-  list(REMOVE_ITEM found_system_libraries ${CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES})
-  set(cdat_external_link_directories)
-  foreach(library_dir ${found_system_libraries})
-    set(cdat_external_link_directories "-L${library_dir} ${cdat_external_link_directories}")
-  endforeach()
-endif()
-message("[INFO] CDAT external link directories: ${cdat_external_link_directories}")
-
-# Configure remaining files
-#=============================================================================
-
-# set candidate paths for setup_runtime scripts
-# will be added to environment variables in reverse order
-set(SETUP_LIBRARY_PATHS
-  "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} "
-  "Externals/lib/R/lib "
-  "Externals/lib "
-  "Externals/proj4/lib "
-  "Externals/lib64 "
-  "lib "
-)
-string(REPLACE ";" " " SETUP_LIBRARY_PATHS ${SETUP_LIBRARY_PATHS})
-set(SETUP_EXECUTABLE_PATHS
-  "Externals/paraview.app/Contents/bin "
-  "Library/Frameworks/Python.framework/Versions/${PYVER}/bin "
-  "Externals/bin "
-  "bin "
-)
-string(REPLACE ";" " " SETUP_EXECUTABLE_PATHS ${SETUP_EXECUTABLE_PATHS})
-set(SETUP_PYTHON_PATHS
-  "Externals/paraview.app/Contents/Python "
-  "Externals/lib/python${PYVER}/site-packages "
-  "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}/site-packages "
-  "lib/python${PYVER}/site-packages "
-)
-string(REPLACE ";" " " SETUP_PYTHON_PATHS ${SETUP_PYTHON_PATHS})
-include(GetGitRevisionDescription)
-git_describe(UVCDAT_PROMPT_STRING)
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install.py.in
-    ${cdat_SOURCE_DIR}/installation/install.py
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/checked_get.sh.in
-  ${cdat_BINARY_DIR}/checked_get.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/predownload.py.in
-  ${cdat_BINARY_DIR}/predownload.py
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.sh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/preofflinebuild.sh.in
-  ${cdat_BINARY_DIR}/preofflinebuild.sh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.csh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.csh.in
-  ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.csh
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.in
-  ${CMAKE_INSTALL_PREFIX}/bin/uvcdat
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat.in
-  ${CMAKE_INSTALL_PREFIX}/bin/cdat
-  @ONLY
-)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in
-  ${CMAKE_INSTALL_PREFIX}/bin/runtest
-  @ONLY
-)
-
-
-if (BUILD_TESTING)
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runpytest.in
-    ${CMAKE_INSTALL_PREFIX}/bin/runpytest
-    @ONLY
-  )
-  add_subdirectory(testing)
-endif()
-
-# Where to install the wrapper scripts
-set(WRAPPER_INSTALL_LOCATION ${CMAKE_INSTALL_PREFIX}/wrappers
-    CACHE PATH
-    "Install wrapper scripts 'cdat', 'uvcdat' and 'loadcdat' in that directory")
-
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh
-        ${WRAPPER_INSTALL_LOCATION}/loadcdat)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh
-        ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/uvcdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/uvcdat
-        ${WRAPPER_INSTALL_LOCATION}/uvcdat)
-add_custom_command(
-        OUTPUT ${WRAPPER_INSTALL_LOCATION}/cdat
-        COMMAND ${CMAKE_COMMAND} -E copy
-        ${CMAKE_INSTALL_PREFIX}/bin/cdat
-        ${WRAPPER_INSTALL_LOCATION}/cdat)
-
-add_custom_target(wrappers ALL DEPENDS
-                  ${WRAPPER_INSTALL_LOCATION}/loadcdat
-                  ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh
-                  ${WRAPPER_INSTALL_LOCATION}/uvcdat
-                  ${WRAPPER_INSTALL_LOCATION}/cdat)
-
-# Package UV-CDAT with CPACK
-include(InstallRequiredSystemLibraries)
-
-set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "UVCDAT")
-set(CPACK_PACKAGE_VENDOR "UVCDAT")
-set(CPACK_PACKAGE_NAME "UVCDAT")
-set(CPACK_PACKAGE_VERSION_MAJOR "2")
-set(CPACK_PACKAGE_VERSION_MINOR "3")
-set(CPACK_PACKAGE_VERSION_PATCH "0")
-set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH})
-set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/docs/README.txt")
-set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/docs/Legal.txt")
-set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required
-set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
-set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7)
-set(CPACK_DESTINATION_BIN_PREFIX "bin")
-
-if (APPLE)
-  set(SB_EXTERNALS_DIR "Externals")
-  set(SB_LIB_DIR "Library/Frameworks/Python.framework/Versions/2.7/lib")
-  set(CPACK_GENERATOR DragNDrop)
-  set(CPACK_DESTINATION_PREFIX "\${CMAKE_INSTALL_PREFIX}/UVCDAT.app/Contents")
-  set(CPACK_DESTINATION_BIN_PREFIX "${CPACK_DESTINATION_PREFIX}/MacOS")
-endif()
-
-include(CPack)
-
-
-install(CODE "
-  set(SB_EXTERNALS_DIR ${SB_EXTERNALS_DIR})
-  set(SB_LIB_DIR ${SB_LIB_DIR})
-  set(PYVER ${PYVER})
-  set(PARAVIEW_MAJOR ${PARAVIEW_MAJOR})
-  set(PARAVIEW_MINOR ${PARAVIEW_MINOR})
-  set(VISIT_VERSION ${VISIT_VERSION})
-  set(CDAT_BUILD_PARAVIEW ${CDAT_BUILD_PARAVIEW})
-  set(SETUP_EXECUTABLE_PATHS \"${SETUP_EXECUTABLE_PATHS}\")
-  set(SETUP_PYTHON_PATHS \"${SETUP_PYTHON_PATHS}\")
-  set(SETUP_LIBRARY_PATHS \"${SETUP_LIBRARY_PATHS}\")
-
-  file(GLOB_RECURSE programs \"${CMAKE_INSTALL_PREFIX}/bin/*\")
-  file(GLOB programs_images \"${CMAKE_INSTALL_PREFIX}/bin/images/*\")
-  file(GLOB programs_tutorials \"${CMAKE_INSTALL_PREFIX}/bin/tutorials/*\")
-
-  if (NOT \"\${programs_images}\" STREQUAL \"\" OR NOT \"\${programs_tutorials}\" STREQUAL \"\")
-    list(REMOVE_ITEM programs \${programs_images} \${programs_tutorials})
-  endif()
-
-  set (resolved_programs \"\")
-  foreach (program \${programs})
-    get_filename_component(res_program \"\${program}\" REALPATH)
-    set (regex_match \"\")
-    # Do not install uuid as its dependencies are not resolved when using
-    # RPMBuild
-    file (STRINGS \"\${res_program}\" regex_match REGEX \"uuid\")
-    if (\"\${regex_match}\" STREQUAL \"\")
-      file (STRINGS \"\${res_program}\" regex_match REGEX \"#!${CMAKE_INSTALL_PREFIX}\")
-      if (\"\${regex_match}\" STREQUAL \"\")
-        list (APPEND resolved_programs \"\${res_program}\")
-      endif ()
-    endif ()
-  endforeach()
-  
-
-  file(INSTALL FILES \${resolved_programs} DESTINATION
-    \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-       PERMISSIONS USE_SOURCE_PERMISSIONS
-  )
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/images\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/images\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/images DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/tutorials DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Externals\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Externals\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Externals DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-      PERMISSIONS USE_SOURCE_PERMISSIONS
-      REGEX \"uuid\" EXCLUDE
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/include\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/include\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/include DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/lib\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/lib\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/lib DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  # Patch cgi.py to look for installed python
-  if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\")
-    file (READ \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" CGI_FILE)
-    SET (SEARCH_REGEX \"\\\#! /usr/local/bin/python\")
-    SET (REPLACEMENT_TEXT \"#! /usr/bin/env python\")
-    STRING (REGEX REPLACE \"\${SEARCH_REGEX}\" \"\${REPLACEMENT_TEXT}\"
-      MODIFIED_FILE \"\${CGI_FILE}\")
-    file (WRITE \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\"
-      \"\${MODIFIED_FILE}\")
-  endif ()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/share\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/share\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/share DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/wrappers\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/wrappers\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/wrappers DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/man\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/man\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/man DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/vistrails\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/vistrails\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/vistrails DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-
-  if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Library\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Library\")
-    file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Library DESTINATION
-      \"\${CMAKE_INSTALL_PREFIX}\"
-         PERMISSIONS USE_SOURCE_PERMISSIONS
-    )
-  endif()
-  
-  file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info)
-
-  # Unset QT_LIB_DIR as we need to use the one in user's environment
-  # We need to keep in ming that we might need to build Qt on some systems
-  # (e.g. RH6) in which case this might break something
-  set(QT_LIB_DIR)
-
-  # Configure the environment setup script to point to the installation
-  # Creating a temporary file that will be installed.
-  configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in
-    \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/setup_runtime.sh\"
-    @ONLY
-  )
-
-  # Finally, create a symlink for python to point to point to installed python
-  if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\"
-      AND
-      NOT EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\")
-    execute_process(COMMAND \${CMAKE_COMMAND} -E create_symlink
-      \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\"
-      \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\"
-      )
-  endif ()
-
-  if (APPLE)
-    configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.mac.in
-      ${CPACK_DESTINATION_BIN_PREFIX}/uvcdat
-      @ONLY
-    )
-    execute_process(COMMAND \${CMAKE_COMMAND} -E copy_directory ${cdat_SOURCE_DIR}/resources
-      ${CPACK_DESTINATION_PREFIX}/Resources
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
-    )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
-
-    execute_process(COMMAND \${CMAKE_COMMAND} -E copy ${cdat_CMAKE_SOURCE_DIR}/uvcdat.plist
-      ${CPACK_DESTINATION_PREFIX}/Info.plist
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
-    )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
-
-    execute_process(COMMAND ${PYTHON_EXECUTABLE} ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py
-      ${CPACK_DESTINATION_PREFIX}
-      OUTPUT_VARIABLE out
-      RESULT_VARIABLE res
-      ERROR_VARIABLE err
-    )
-    if(NOT \${res} EQUAL 0)
-      message(\"Output: \${out}; Result: \${res}; Error: \${err}\")
-    endif()
-  endif()"
-
-  COMPONENT superbuild
-)
-
+add_subdirectory(testing)
diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt
index 229488172..94dcc6dde 100644
--- a/testing/CMakeLists.txt
+++ b/testing/CMakeLists.txt
@@ -12,8 +12,7 @@ macro (cdat_add_test name)
     endif()
   endif()
 
-  add_test(${name} "${CMAKE_INSTALL_PREFIX}/bin/runtest"
-           ${ARGS})
+  add_test(${name} ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest  ${ARGS})
 
   if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
     set_tests_properties (${name}
@@ -25,48 +24,14 @@ macro (cdat_add_test name)
     )
   endif()
 endmacro()
-if (CDAT_BUILD_GRAPHICS)
-  add_subdirectory(regrid)
-  add_subdirectory(vcs)
-  add_subdirectory(vcsaddons)
-  add_subdirectory(dv3d)
-endif()
+add_subdirectory(regrid)
+add_subdirectory(vcs)
+add_subdirectory(vcsaddons)
+add_subdirectory(dv3d)
 add_subdirectory(cdutil)
 add_subdirectory(Thermo)
 add_subdirectory(unidata)
 add_subdirectory(cdms2)
 add_subdirectory(xmgrace)
-if (CDAT_BUILD_OCGIS)
-  add_subdirectory(ocgis)
-endif()
-if (CDAT_BUILD_UVCMETRICSPKG)
-  add_subdirectory(metrics)
-endif()
 
-# Disabling ParaView tests
-#if (CDAT_BUILD_PARAVIEW)
-#  add_subdirectory(paraview)
-#endif()
-
-# Test RPY2
-if (CDAT_BUILD_RPY2)
-  add_subdirectory(rpy2)
-endif()
-
-# Test Matplotlib
-if (CDAT_BUILD_MATPLOTLIB)
-  add_subdirectory(matplotlib)
-endif()
-
-# PCMDI Tools
-if (CDAT_BUILD_PCMDI)
-    add_subdirectory(pcmdi)
-endif()
-
-# CMake module tests:
-# Test that out-of-source build detection is working:
-add_test(cmake_checkBuildOutOfSource
-  "${CMAKE_COMMAND}"
-    -DTEST_check_build_out_of_source=ON
-    -P "${cdat_SOURCE_DIR}/CMake/cmake_modules/CheckBuildOutOfSource.cmake"
-)
+add_subdirectory(pcmdi)
-- 
GitLab


From d1c8a97738a721e04452ff538d8b4b8cbeaaa799 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 1 Jun 2016 16:52:04 -0700
Subject: [PATCH 130/196] closer but got some reloc issues

---
 CMake/cdat_modules/cdat_external.cmake              |  7 +------
 CMake/cdat_modules/cdat_pkg.cmake                   |  1 -
 ...m_conda.bash => install_cdat_from_conda.bash.in} | 10 +++-------
 CMake/cdat_modules_extra/runtest                    |  4 ----
 CMake/cdat_modules_extra/runtest.in                 |  4 ++++
 CMakeLists.txt                                      | 13 +++++++++++++
 testing/CMakeLists.txt                              |  2 +-
 7 files changed, 22 insertions(+), 19 deletions(-)
 rename CMake/cdat_modules_extra/{install_cdat_from_conda.bash => install_cdat_from_conda.bash.in} (53%)
 delete mode 100755 CMake/cdat_modules_extra/runtest
 create mode 100755 CMake/cdat_modules_extra/runtest.in

diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake
index 3a273bcb0..2f79aa507 100644
--- a/CMake/cdat_modules/cdat_external.cmake
+++ b/CMake/cdat_modules/cdat_external.cmake
@@ -1,11 +1,6 @@
 set(CDAT_source "${cdat_SOURCE_DIR}")
-
 set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}")
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake
-  @ONLY)
 
-message("[CDAT BUILD SCRIPT:${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash")
 ExternalProject_Add(CDAT
   DOWNLOAD_DIR ""
   SOURCE_DIR ${cdat_SOURCE_DIR}
@@ -14,7 +9,7 @@ ExternalProject_Add(CDAT
   PATCH_COMMAND ""
   CONFIGURE_COMMAND ""
   BUILD_COMMAND ""
-  INSTALL_COMMAND ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash
+  INSTALL_COMMAND ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash
   DEPENDS ${CDAT_deps}
   ${ep_log_options}
 )
diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake
index 3997c0d96..92aa4ed99 100644
--- a/CMake/cdat_modules/cdat_pkg.cmake
+++ b/CMake/cdat_modules/cdat_pkg.cmake
@@ -30,6 +30,5 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in
   ${cdat_BINARY_DIR}/version
   @ONLY
 )
-message("[INFO] ADDIBNG CDAT")
 add_cdat_package(CDAT "" "" ON)
 
diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
similarity index 53%
rename from CMake/cdat_modules_extra/install_cdat_from_conda.bash
rename to CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 9d393b5da..d8e7ec958 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,14 +1,10 @@
 #!/usr/bin/env bash
-SRCS=`dirname $0`
-conda create -n TEST_UVCDAT -c uvcdat uvcdat
-source activate TEST_UVCDAT
-cd ${SRCS}/../..
-echo "PATH:"`pwd`
+conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ uvcdat
+source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-    cd Packages/${pkg}
+    cd @cdat_SOURCE_DIR@/Packages/${pkg}
     rm -rf build
     python setup.py install
-    cd ../..
 done
 
 
diff --git a/CMake/cdat_modules_extra/runtest b/CMake/cdat_modules_extra/runtest
deleted file mode 100755
index 8ca0cbf3b..000000000
--- a/CMake/cdat_modules_extra/runtest
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-source activate TEST _UVCDAT
-echo `which python`
-python $@
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
new file mode 100755
index 000000000..ee8f59a2c
--- /dev/null
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -0,0 +1,4 @@
+#!/bin/bash
+source activate @CONDA_ENVIRONMENT_NAME@
+echo "Python:" `which python`
+python $@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 982d6d3e7..a9e2fb3f6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -233,6 +233,9 @@ set(CDAT_PACKAGE_CACHE_DIR
 
 include(ExternalProject)
 include(cdat_pkg)
+# CONDA Options
+set(CONDA_ENVIRONMENT_NAME ${cdat_VERSION} CACHE STRING "Name of conda environment we want to build CDAT in")
+set(CONDA_CHANNEL_UVCDAT uvcdat CACHE STRING "channels to use (if more than one use '-c' between channels e.g. uvcdat/label/nightly -c uvcdat)")
 # Now sort and include external packages
 #=============================================================================
 include(TopologicalSort)
@@ -263,4 +266,14 @@ foreach(package ${external_packages})
     include("${lc_package}_external")
   endif()
 endforeach()
+
+configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in
+    ${cdat_CMAKE_BINARY_DIR}/runtest
+    @ONLY
+    )
+configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash.in
+    ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash
+    @ONLY
+    )
+
 add_subdirectory(testing)
diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt
index 94dcc6dde..0e7286fbe 100644
--- a/testing/CMakeLists.txt
+++ b/testing/CMakeLists.txt
@@ -12,7 +12,7 @@ macro (cdat_add_test name)
     endif()
   endif()
 
-  add_test(${name} ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest  ${ARGS})
+  add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest  ${ARGS})
 
   if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
     set_tests_properties (${name}
-- 
GitLab


From a8aa5a8db11f51ce776c0e4a060947fac1ea7168 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 1 Jun 2016 17:07:17 -0700
Subject: [PATCH 131/196] still not there

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index d8e7ec958..e7dd07f48 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,5 +1,7 @@
 #!/usr/bin/env bash
-conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ uvcdat
+
+conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info
+
 source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
     cd @cdat_SOURCE_DIR@/Packages/${pkg}
@@ -7,6 +9,3 @@ for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace ge
     python setup.py install
 done
 
-
-
-
-- 
GitLab


From 5babb4b6d4483a8e61e75b625899345cf20ef642 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 2 Jun 2016 10:11:28 -0700
Subject: [PATCH 132/196] added -y so it doesn't wait for user answer

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index e7dd07f48..8b1382431 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info
+conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info
 
 source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-- 
GitLab


From e7f1ea72b92fc09f37ea9e8067944a854f865a69 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Thu, 2 Jun 2016 09:36:43 -0400
Subject: [PATCH 133/196] BUG 1944: Extend the isoline attribute list with the
 last value from the existing attribute list

If there are no attributes in the list, we choose
default values: linewidth=1, linestyle='solid', linecolor='black'
---
 Packages/vcs/vcs/isoline.py                   |  2 ++
 Packages/vcs/vcs/vcsvtk/isolinepipeline.py    | 32 ++++++++-----------
 testing/vcs/CMakeLists.txt                    |  5 +++
 .../vcs/test_vcs_isoline_extend_attributes.py | 14 ++++++++
 testing/vcs/test_vcs_isoline_numpy.py         |  7 ++--
 5 files changed, 37 insertions(+), 23 deletions(-)
 create mode 100644 testing/vcs/test_vcs_isoline_extend_attributes.py

diff --git a/Packages/vcs/vcs/isoline.py b/Packages/vcs/vcs/isoline.py
index c1912c57a..5961dfbad 100755
--- a/Packages/vcs/vcs/isoline.py
+++ b/Packages/vcs/vcs/isoline.py
@@ -310,6 +310,8 @@ class Gi(object):
         iso.linewidths=([1,2,3,4,5,6,7,8])	# Will set the isoline to a specific
                                                 #     width size
         iso.linewidths=None			# Turns off the line width size
+    If the number of line styles, colors or widths are less than the number of levels
+    we extend the attribute list using the last attribute value in the attribute list.
 
     There are three ways to specify the text or font number:
         iso.text=(1,2,3,4,5,6,7,8,9)     	# Font numbers are between 1 and 9
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index 4cc1519a0..1204d973c 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -14,6 +14,14 @@ class IsolinePipeline(Pipeline2D):
         super(IsolinePipeline, self).__init__(gm, context_)
         self._needsCellData = False
 
+    def extendAttribute(self, attributes, default):
+        if len(attributes) < len(self._contourLevels):
+            if (len(attributes) == 0):
+                attributeValue = default
+            else:
+                attributeValue = attributes[-1]
+            attributes += [attributeValue] * (len(self._contourLevels) - len(attributes))
+
     def _updateContourLevelsAndColors(self):
         """Overrides baseclass implementation."""
         # Contour values:
@@ -31,9 +39,8 @@ class IsolinePipeline(Pipeline2D):
             else:
                 if numpy.allclose(self._contourLevels[0], 1.e20):
                     self._contourLevels[0] = -1.e20
-
-        # Contour colors:
         self._contourColors = self._gm.linecolors
+        self.extendAttribute(self._contourColors, default='black')
 
     def _plotInternal(self):
         """Overrides baseclass implementation."""
@@ -43,15 +50,10 @@ class IsolinePipeline(Pipeline2D):
         tmpLineStyles = []
 
         linewidth = self._gm.linewidths
-        linestyle = self._gm.line
-
-        if len(linewidth) < len(self._contourLevels):
-            # fill up the line width values
-            linewidth += [1.0] * (len(self._contourLevels) - len(linewidth))
+        self.extendAttribute(linewidth, default=1.0)
 
-        if len(linestyle) < len(self._contourLevels):
-            # fill up the line style values
-            linestyle += ['solid'] * (len(self._contourLevels) - len(linestyle))
+        linestyle = self._gm.line
+        self.extendAttribute(linestyle, default='solid')
 
         plotting_dataset_bounds = self.getPlottingBounds()
         x1, x2, y1, y2 = plotting_dataset_bounds
@@ -69,20 +71,14 @@ class IsolinePipeline(Pipeline2D):
                 if W == linewidth[i] and S == linestyle[i]:
                     # Ok same style and width, lets keep going
                     L.append(l)
-                    if i >= len(self._contourColors):
-                        C.append(self._contourColors[-1])
-                    else:
-                        C.append(self._contourColors[i])
+                    C.append(self._contourColors[i])
                 else:
                     tmpLevels.append(L)
                     tmpColors.append(C)
                     tmpLineWidths.append(W)
                     tmpLineStyles.append(S)
                     L = [l]
-                    if i >= len(self._contourColors):
-                        C = [self._contourColors[-1]]
-                    else:
-                        C = [self._contourColors[i]]
+                    C = [self._contourColors[i]]
                     W = linewidth[i]
                     S = linestyle[i]
 
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index b1e6247e7..d32e9cb90 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -692,6 +692,11 @@ cdat_add_test(test_vcs_settings_color_name_rgba
    ENDFOREACH(ptype)
   ENDFOREACH(gm)
 
+  cdat_add_test(test_vcs_isoline_extend_attributes
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_extend_attributes.py
+    ${BASELINE_DIR}/test_vcs_isoline_extend_attributes.png
+    )
   cdat_add_test(test_vcs_isoline_numpy
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
diff --git a/testing/vcs/test_vcs_isoline_extend_attributes.py b/testing/vcs/test_vcs_isoline_extend_attributes.py
new file mode 100644
index 000000000..77b212c3e
--- /dev/null
+++ b/testing/vcs/test_vcs_isoline_extend_attributes.py
@@ -0,0 +1,14 @@
+import cdms2
+import vcs
+import testing.regression as regression
+
+x = regression.init()
+isoline = vcs.createisoline()
+f = cdms2.open(vcs.sample_data + '/clt.nc')
+s = f("clt")
+isoline.line = ["dash-dot"]
+isoline.linecolors = [250]
+isoline.linewidths = [5]
+x.plot(s, isoline)
+fnm = "test_vcs_isoline_extend_attributes.png"
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_isoline_numpy.py b/testing/vcs/test_vcs_isoline_numpy.py
index 147f2f499..4534529ba 100644
--- a/testing/vcs/test_vcs_isoline_numpy.py
+++ b/testing/vcs/test_vcs_isoline_numpy.py
@@ -1,13 +1,10 @@
 import os, sys, cdms2, vcs, testing.regression as regression
 
 x = regression.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.drawlogooff()
 fnm = os.path.join(vcs.sample_data,'clt.nc')
 f = cdms2.open(fnm)
 s = f("clt")
 gm = x.createisofill()
-x.plot(s.filled(),gm,bg=1)
+x.plot(s.filled(),gm)
 fnm = "test_vcs_isoline_numpy.png"
-regression.run(x, fnm)
\ No newline at end of file
+regression.run(x, fnm)
-- 
GitLab


From 888462c5bb9f64129f14ec8ee703e31c1284f060 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 2 Jun 2016 21:43:32 -0700
Subject: [PATCH 134/196] add DYLD env library for mac in runtest

developpers will also need it in their path in order to use outside of runtest
---
 CMake/cdat_modules_extra/runtest.in | 1 +
 1 file changed, 1 insertion(+)

diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index ee8f59a2c..8d37c2033 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -1,4 +1,5 @@
 #!/bin/bash
 source activate @CONDA_ENVIRONMENT_NAME@
+export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
 echo "Python:" `which python`
 python $@
-- 
GitLab


From d7581f3a24cbcaee0adffe369443b604d74b763c Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Fri, 3 Jun 2016 11:30:04 -0400
Subject: [PATCH 135/196] BUG: datawc does not work on a time axis.

This happened because datawc is converted to cdtime.reltime type.
---
 Packages/vcs/vcs/utils.py                   | 36 ++++++++++++++-------
 testing/vcs/CMakeLists.txt                  |  5 +++
 testing/vcs/test_vcs_boxfill_datawc_time.py | 23 +++++++++++++
 3 files changed, 52 insertions(+), 12 deletions(-)
 create mode 100644 testing/vcs/test_vcs_boxfill_datawc_time.py

diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index e69466eab..db4db640e 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -1666,13 +1666,25 @@ def creategraphicsmethod(gtype, gname='default', name=None):
     return copy_mthd
 
 
+# Returns the float value for datawc_...
+# datawc_ can be a float or a cdtime.reltime
+# TODO: Investigate why datawc is converted to a cdtime.reltime
+def getDataWcValue(v):
+    if (type(v) is type(cdtime.reltime(0, 'months since 1900'))):
+        return v.value
+    else:
+        return v
+
+
 def getworldcoordinates(gm, X, Y):
     """Given a graphics method and two axes
     figures out correct world coordinates"""
     # compute the spanning in x and y, and adjust for the viewport
     wc = [0, 1, 0, 1]
     try:
-        if gm.datawc_x1 > 9.E19:
+        datawc = [getDataWcValue(gm.datawc_x1), getDataWcValue(gm.datawc_x2),
+                  getDataWcValue(gm.datawc_y1), getDataWcValue(gm.datawc_y2)]
+        if numpy.isclose(datawc[0], 1.e20):
             try:
                 i = 0
                 try:
@@ -1684,8 +1696,8 @@ def getworldcoordinates(gm, X, Y):
             except:
                 wc[0] = X[:].min()
         else:
-            wc[0] = gm.datawc_x1
-        if gm.datawc_x2 > 9.E19:
+            wc[0] = datawc[0]
+        if numpy.isclose(datawc[1], 1.e20):
             try:
                 i = -1
                 try:
@@ -1697,18 +1709,18 @@ def getworldcoordinates(gm, X, Y):
             except:
                 wc[1] = X[:].max()
         else:
-            wc[1] = gm.datawc_x2
+            wc[1] = datawc[1]
     except:
         return wc
     if (((not isinstance(X, cdms2.axis.TransientAxis) and
           isinstance(Y, cdms2.axis.TransientAxis)) or
          not vcs.utils.monotonic(X[:])) and
-        numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20))\
+        numpy.allclose([datawc[0], datawc[1]], 1.e20))\
             or (hasattr(gm, "projection") and
                 vcs.elements["projection"][gm.projection].type != "linear"):
         wc[0] = X[:].min()
         wc[1] = X[:].max()
-    if gm.datawc_y1 > 9.E19:
+    if numpy.isclose(datawc[2], 1.e20):
         try:
             i = 0
             try:
@@ -1720,8 +1732,8 @@ def getworldcoordinates(gm, X, Y):
         except:
             wc[2] = Y[:].min()
     else:
-        wc[2] = gm.datawc_y1
-    if gm.datawc_y2 > 9.E19:
+        wc[2] = datawc[2]
+    if numpy.isclose(datawc[3], 1.e20):
         try:
             i = -1
             try:
@@ -1733,16 +1745,16 @@ def getworldcoordinates(gm, X, Y):
         except:
             wc[3] = Y[:].max()
     else:
-        wc[3] = gm.datawc_y2
+        wc[3] = datawc[3]
     if (((not isinstance(Y, cdms2.axis.TransientAxis) and
           isinstance(X, cdms2.axis.TransientAxis)) or not vcs.utils.monotonic(Y[:])) and
-        numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20)) \
+        numpy.allclose([datawc[2], datawc[3]], 1.e20)) \
             or (hasattr(gm, "projection") and
                 vcs.elements["projection"][
                 gm.projection].type.lower().split()[0]
                 not in ["linear", "polar"] and
-                numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20) and
-                numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20)):
+                numpy.allclose([datawc[2], datawc[3]], 1.e20) and
+                numpy.allclose([datawc[0], datawc[1]], 1.e20)):
         wc[2] = Y[:].min()
         wc[3] = Y[:].max()
     if wc[3] == wc[2]:
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index d32e9cb90..dc8cfc15f 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -442,6 +442,11 @@ if (CDAT_DOWNLOAD_SAMPLE_DATA)
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
     "${BASELINE_DIR}/test_vcs_boxfill_custom.png"
     )
+  cdat_add_test(test_vcs_boxfill_datawc_time
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_datawc_time.py
+    "${BASELINE_DIR}/test_vcs_boxfill_datawc_time.png"
+    )
   cdat_add_test(test_vcs_boxfill_custom_non_default_levels
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
diff --git a/testing/vcs/test_vcs_boxfill_datawc_time.py b/testing/vcs/test_vcs_boxfill_datawc_time.py
new file mode 100644
index 000000000..3b459b7e7
--- /dev/null
+++ b/testing/vcs/test_vcs_boxfill_datawc_time.py
@@ -0,0 +1,23 @@
+import cdms2, os, sys, vcs, cdtime, testing.regression as regression
+
+# Test that we can restrict the plot using datawc along a time axis
+dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = dataFile("clt")
+clt = clt(latitude=(-90.0, 90.0), longitude=(0.), squeeze=1,
+          time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
+
+# Initialize canvas:
+canvas = regression.init()
+
+# Create and plot quick boxfill with default settings:
+boxfill=canvas.createboxfill()
+
+# Change the type
+boxfill.boxfill_type = 'custom'
+boxfill.datawc_y1 = 12
+
+canvas.plot(clt, boxfill, bg=1)
+
+# Load the image testing module:
+# Create the test image and compare:
+regression.run(canvas, "test_vcs_boxfill_datawc_time.png")
-- 
GitLab


From 67300c9a4f3470daf8b95e61bb89cddd04634e4c Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 6 Jun 2016 16:23:39 -0700
Subject: [PATCH 136/196] closer, now build and some ctest pass

---
 .../install_cdat_from_conda.bash.in           | 10 +++--
 CMake/cdat_modules_extra/runtest.in           |  2 +
 .../files.txt => vcs/Share/sample_files.txt}  |  0
 Packages/vcs/setup.py                         |  1 +
 Packages/vcs/vcs/utils.py                     | 40 +++++++++++++++++++
 5 files changed, 50 insertions(+), 3 deletions(-)
 rename Packages/{dat/files.txt => vcs/Share/sample_files.txt} (100%)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 8b1382431..81a54299b 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,11 +1,15 @@
 #!/usr/bin/env bash
 
-conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info
+conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests
 
 source activate @CONDA_ENVIRONMENT_NAME@
-for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
+for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
     cd @cdat_SOURCE_DIR@/Packages/${pkg}
     rm -rf build
-    python setup.py install
+    if [ ${pkg} == "vcs" ]; then
+        python setup.py install --old-and-unmanageable
+    else
+        python setup.py install
+    fi
 done
 
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 8d37c2033..4cd4b5fd7 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -2,4 +2,6 @@
 source activate @CONDA_ENVIRONMENT_NAME@
 export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
 echo "Python:" `which python`
+# make sure data is downloaded
+python -c "import vcs;vcs.download_sample_data_files()"
 python $@
diff --git a/Packages/dat/files.txt b/Packages/vcs/Share/sample_files.txt
similarity index 100%
rename from Packages/dat/files.txt
rename to Packages/vcs/Share/sample_files.txt
diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py
index e3f9dd022..10f64da9b 100755
--- a/Packages/vcs/setup.py
+++ b/Packages/vcs/setup.py
@@ -40,6 +40,7 @@ setup(name="vcs",
                                  'Share/text_icon.png',
                                  'Share/fill_icon.png',
                                  'Share/line_icon.png',
+                                 'Share/sample_files.txt',
                                  'Fonts/Adelon_Regular.ttf',
                                  'Fonts/Arabic.ttf',
                                  'Fonts/Athens_Greek.ttf',
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index e69466eab..791ee41d2 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -1794,3 +1794,43 @@ def png_read_metadata(path):
     for i in range(0, numberOfTextChunks):
         m[reader.GetTextKey(i)] = reader.GetTextValue(i)
     return m
+
+def download_sample_data_files(path=None):
+    import requests
+    import sys
+    import hashlib
+    if path is None:
+        path = vcs.sample_data
+    samples = open(os.path.join(vcs.prefix,"share","vcs","sample_files.txt")).readlines()
+    for sample in samples:
+        good_md5,name = sample.split()
+        local_filename = os.path.join(path,name)
+        try:
+            os.makedirs(os.path.dirname(local_filename))
+        except Exception,err:
+            pass
+        attempts = 0
+        while attempts < 3:
+            md5 = hashlib.md5()
+            if os.path.exists(local_filename):
+                f=open(local_filename)
+                md5.update(f.read())
+                if md5.hexdigest()==good_md5:
+                    attempts = 5
+                    continue
+            print "Downloading:",name,"in",local_filename
+            r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/"+name,stream = True)
+            with open(local_filename, 'wb') as f:
+                for chunk in r.iter_content(chunk_size=1024): 
+                    if chunk: # filter local_filename keep-alive new chunks
+                        f.write(chunk)
+                        md5.update(chunk)
+            f.close()
+            if md5.hexdigest() == good_md5:
+                attempts = 5
+            else:
+                attempts+=1
+
+
+
+
-- 
GitLab


From 2247a41830f4971314644dddde2f3ffa3bc9c3a7 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 07:50:22 -0700
Subject: [PATCH 137/196] working on flake8

---
 CMake/cdat_modules_extra/runtest.in | 2 +-
 testing/Thermo/CMakeLists.txt       | 4 ++--
 testing/vcs/CMakeLists.txt          | 4 ++--
 testing/xmgrace/CMakeLists.txt      | 4 ++--
 4 files changed, 7 insertions(+), 7 deletions(-)

diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 4cd4b5fd7..1a470bbc8 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -4,4 +4,4 @@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(s
 echo "Python:" `which python`
 # make sure data is downloaded
 python -c "import vcs;vcs.download_sample_data_files()"
-python $@
+$*
diff --git a/testing/Thermo/CMakeLists.txt b/testing/Thermo/CMakeLists.txt
index bae57cea1..c855dc953 100644
--- a/testing/Thermo/CMakeLists.txt
+++ b/testing/Thermo/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test(flake8_Thermo
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/"
+cdat_add_test(flake8_Thermo
+  flake8 "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=120 # Reasonable line length
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index b1e6247e7..98a4f5fc2 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1,7 +1,7 @@
 set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs")
 
-add_test(flake8_vcs
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
+cdat_add_test(flake8_vcs
+  flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=120 # Reasonable line length
diff --git a/testing/xmgrace/CMakeLists.txt b/testing/xmgrace/CMakeLists.txt
index e1de5fd91..470aa056d 100644
--- a/testing/xmgrace/CMakeLists.txt
+++ b/testing/xmgrace/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test(flake8_xmgrace
-  "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/"
+cdat_add_test(flake8_xmgrace
+  flake8 "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/"
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=128 # Max line 128 not 80
-- 
GitLab


From cad058d3496ef5980193c9af7fe0301b9a780838 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 08:04:53 -0700
Subject: [PATCH 138/196] flake8 passe again

---
 Packages/Thermo/Lib/thermo.py               |  2 +-
 Packages/vcs/vcs/utils.py                   | 28 +++++++++------------
 Packages/xmgrace/Lib/ValidationFunctions.py | 10 +++-----
 testing/vcs/CMakeLists.txt                  |  1 +
 4 files changed, 18 insertions(+), 23 deletions(-)

diff --git a/Packages/Thermo/Lib/thermo.py b/Packages/Thermo/Lib/thermo.py
index c2d5ccc58..9f8cc6a93 100644
--- a/Packages/Thermo/Lib/thermo.py
+++ b/Packages/Thermo/Lib/thermo.py
@@ -4,9 +4,9 @@ import cdms2
 import genutil
 import unidata
 import vcs
+import numpy
 from vcs import VCS_validation_functions
 thermo_objects = []
-import numpy
 
 
 def Es(T, method=None):
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 791ee41d2..1f0f8edad 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -1795,42 +1795,38 @@ def png_read_metadata(path):
         m[reader.GetTextKey(i)] = reader.GetTextValue(i)
     return m
 
+
 def download_sample_data_files(path=None):
     import requests
-    import sys
     import hashlib
     if path is None:
         path = vcs.sample_data
-    samples = open(os.path.join(vcs.prefix,"share","vcs","sample_files.txt")).readlines()
+    samples = open(os.path.join(vcs.prefix, "share", "vcs", "sample_files.txt")).readlines()
     for sample in samples:
-        good_md5,name = sample.split()
-        local_filename = os.path.join(path,name)
+        good_md5, name = sample.split()
+        local_filename = os.path.join(path, name)
         try:
             os.makedirs(os.path.dirname(local_filename))
-        except Exception,err:
+        except:
             pass
         attempts = 0
         while attempts < 3:
             md5 = hashlib.md5()
             if os.path.exists(local_filename):
-                f=open(local_filename)
+                f = open(local_filename)
                 md5.update(f.read())
-                if md5.hexdigest()==good_md5:
+                if md5.hexdigest() == good_md5:
                     attempts = 5
                     continue
-            print "Downloading:",name,"in",local_filename
-            r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/"+name,stream = True)
+            print "Downloading:", name, "in", local_filename
+            r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/" + name, stream=True)
             with open(local_filename, 'wb') as f:
-                for chunk in r.iter_content(chunk_size=1024): 
-                    if chunk: # filter local_filename keep-alive new chunks
+                for chunk in r.iter_content(chunk_size=1024):
+                    if chunk:  # filter local_filename keep-alive new chunks
                         f.write(chunk)
                         md5.update(chunk)
             f.close()
             if md5.hexdigest() == good_md5:
                 attempts = 5
             else:
-                attempts+=1
-
-
-
-
+                attempts += 1
diff --git a/Packages/xmgrace/Lib/ValidationFunctions.py b/Packages/xmgrace/Lib/ValidationFunctions.py
index b9325ba02..b2a68514c 100644
--- a/Packages/xmgrace/Lib/ValidationFunctions.py
+++ b/Packages/xmgrace/Lib/ValidationFunctions.py
@@ -74,8 +74,8 @@ def isNumber(value):
 def checkPositiveInt(self, name, value):
     if not isNumber(value):
         raise ValueError(name + ' must be an integer')
-    elif (not (isinstance(value, int) or isinstance(value, long))
-            and (not int(value) == value)):
+    elif (not (isinstance(value, int) or isinstance(value, long)) and
+            (not int(value) == value)):
         raise ValueError(name + ' must be an integer')
     elif value < 0:
         raise ValueError(name + ' must be positve')
@@ -172,8 +172,7 @@ def checkSide(self, name, value):
 def checkLoc(self, name, value):
     """ check the loc (auto) or a location """
     if not (
-        (isinstance(value, str) and value.lower() == 'auto')
-        or
+        (isinstance(value, str) and value.lower() == 'auto') or
         isListorTuple(value)
     ):
         raise ValueError(name + 'must be a "auto" or a tuple/list')
@@ -296,8 +295,7 @@ def checkFormat(self, name, value):
 def checkAuto(self, name, value):
     """ check for 'auto' or a value """
     if not (
-        (isinstance(value, str) and value.lower() == 'auto')
-        or
+        (isinstance(value, str) and value.lower() == 'auto') or
         isNumber(value)
     ):
         raise ValueError(name + 'must be a "auto" or a number')
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 98a4f5fc2..83042128b 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -5,6 +5,7 @@ cdat_add_test(flake8_vcs
   --show-source # Show context for detected errors
   --statistics  # Show summary of errors at end of output
   --max-line-length=120 # Reasonable line length
+  --ignore=F999,E121,E123,E126,E226,E24,E704 # recent version show zillions of errors if object come from an import * line
 )
 
 cdat_add_test(test_vcs_bad_png_path
-- 
GitLab


From 3aa5426d6d7b4caf365aecca4ba6dd34223ca90e Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 08:21:30 -0700
Subject: [PATCH 139/196] added code to automatically update the baselines

---
 CMake/cdat_modules_extra/runtest.in |    1 +
 Packages/testing/regression.py      |    8 +-
 testing/vcs/CMakeLists.txt          | 1007 +++++++++++++--------------
 3 files changed, 511 insertions(+), 505 deletions(-)

diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 1a470bbc8..194632e5d 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -4,4 +4,5 @@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(s
 echo "Python:" `which python`
 # make sure data is downloaded
 python -c "import vcs;vcs.download_sample_data_files()"
+echo "Running:"$*
 $*
diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 6b1b2bf9b..72047380e 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True):
+                       baseline=True, cleanup=True, update_baselines = False):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
@@ -133,6 +133,12 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh
 
     print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold)
 
+    if update_baselines:
+        print "Update baselines is ON so we are assuming you know what you're doing"
+        print "Replacing baseline %s with new baseline from %s" % (bestFilename, fname)
+        import shutil
+        shutil.copy2(fname, bestFilename)
+
     sp = fname.split(".")
     diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1]
     print "Saving image diff at '%s'."%diffFilename
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 83042128b..ba48af6b3 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1,4 +1,5 @@
 set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs")
+set(PYTHON_EXECUTABLE python)
 
 cdat_add_test(flake8_vcs
   flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
@@ -406,531 +407,529 @@ cdat_add_test(test_vcs_geometry
 # These tests perform plotting and need sample data
 #
 ##############################################################################
-if (CDAT_DOWNLOAD_SAMPLE_DATA)
-  FOREACH(gm boxfill isofill meshfill isoline vector)
-    FOREACH(src vcs canvas gm)
-      cdat_add_test(test_vcs_colormaps_source_${gm}_${src}
-        "${PYTHON_EXECUTABLE}"
-        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py
-        -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png
-        -g ${gm}
-        -s ${src}
-        )
-    ENDFOREACH()
-  ENDFOREACH()
-
-  # NOTE Fix baseline name
-  cdat_add_test(test_vcs_meshfill_regular_grid
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py
-    "${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png"
-  )
-  # NOTE Fix baseline name
-  cdat_add_test(test_vcs_plot_unstructured_via_boxfill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
-    "${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png"
-  )
-  # NOTE Fix baseline name
-  cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
-    "${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png"
-    )
-  # NOTE Fix baseline name
-  cdat_add_test(test_vcs_boxfill_custom
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
-    "${BASELINE_DIR}/test_vcs_boxfill_custom.png"
-    )
-  cdat_add_test(test_vcs_boxfill_custom_non_default_levels
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
-    "${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png"
-    )
-  cdat_add_test(test_vcs_boxfill_custom_ext1
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py
-    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png"
-    )
-  cdat_add_test(test_vcs_boxfill_custom_ext2
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py
-    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png"
-    )
-  cdat_add_test(test_vcs_boxfill_custom_ext1_ext2
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
-    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png"
-    )
-  cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py
-    )
-  cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels
+FOREACH(gm boxfill isofill meshfill isoline vector)
+FOREACH(src vcs canvas gm)
+  cdat_add_test(test_vcs_colormaps_source_${gm}_${src}
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py
-    )
-  cdat_add_test(test_vcs_user_passed_date
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py
-    "${BASELINE_DIR}/test_vcs_user_passed_date.png"
-    )
-  cdat_add_test(test_vcs_user_passed_date_as_string
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py
-    "${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png"
-    )
-  cdat_add_test(test_vcs_auto_time_labels
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py
-    "${BASELINE_DIR}/test_vcs_auto_time_labels.png"
-    )
-  cdat_add_test(test_vcs_isofill_data_read_north_to_south
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
-    "${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png"
-    )
-  # Rename baseline
-  cdat_add_test(test_vcs_lon_axes_freak_out
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py
-    "${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py
+    -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png
+    -g ${gm}
+    -s ${src}
     )
+ENDFOREACH()
+ENDFOREACH()
+
+# NOTE Fix baseline name
+cdat_add_test(test_vcs_meshfill_regular_grid
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py
+"${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png"
+)
+# NOTE Fix baseline name
+cdat_add_test(test_vcs_plot_unstructured_via_boxfill
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
+"${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png"
+)
+# NOTE Fix baseline name
+cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
+"${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png"
+)
+# NOTE Fix baseline name
+cdat_add_test(test_vcs_boxfill_custom
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
+"${BASELINE_DIR}/test_vcs_boxfill_custom.png"
+)
+cdat_add_test(test_vcs_boxfill_custom_non_default_levels
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
+"${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png"
+)
+cdat_add_test(test_vcs_boxfill_custom_ext1
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py
+"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png"
+)
+cdat_add_test(test_vcs_boxfill_custom_ext2
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py
+"${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png"
+)
+cdat_add_test(test_vcs_boxfill_custom_ext1_ext2
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
+"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png"
+)
+cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py
+)
+cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py
+)
+cdat_add_test(test_vcs_user_passed_date
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py
+"${BASELINE_DIR}/test_vcs_user_passed_date.png"
+)
+cdat_add_test(test_vcs_user_passed_date_as_string
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py
+"${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png"
+)
+cdat_add_test(test_vcs_auto_time_labels
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py
+"${BASELINE_DIR}/test_vcs_auto_time_labels.png"
+)
+cdat_add_test(test_vcs_isofill_data_read_north_to_south
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
+"${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png"
+)
+# Rename baseline
+cdat_add_test(test_vcs_lon_axes_freak_out
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py
+"${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png"
+)
 cdat_add_test(test_vcs_set_colors_name_rgba_1d
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
-    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png"
-    )
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
+"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png"
+)
 cdat_add_test(test_vcs_set_colors_name_rgba_isoline
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
-    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png"
-    )
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
+"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png"
+)
 cdat_add_test(test_vcs_settings_color_name_rgba_meshfill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
-    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png"
-    )
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
+"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png"
+)
 cdat_add_test(test_vcs_settings_color_name_rgba_boxfill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
-    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png"
-    )
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
+"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png"
+)
 cdat_add_test(test_vcs_settings_color_name_rgba
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py
-    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png"
-    )
-  cdat_add_test(test_vcs_isofill_mask_cell_shift
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py
-    "${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png"
-    )
-  cdat_add_test(test_vcs_bad_time_units
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py
-    )
-  cdat_add_test(test_vcs_plot_file_varible
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py
-    )
-  FOREACH(gm boxfill isofill meshfill)
-    FOREACH(ori horizontal vertical)
-      FOREACH(ext1 y n)
-        FOREACH(ext2 y n)
-          cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}
-            "${PYTHON_EXECUTABLE}"
-            ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py
-            --gm_type=${gm}
-            --orientation=${ori}
-            --ext1=${ext1}
-            --ext2=${ext2}
-            "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png"
-            )
-        ENDFOREACH(ext2)
-      ENDFOREACH(ext1)
-    ENDFOREACH(ori)
-  ENDFOREACH(gm)
-  FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter)
-    cdat_add_test(test_vcs_basic_${gm}_transparent
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --transparent
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_zero
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --zero
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_masked
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --mask
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png"
-      )
-  ENDFOREACH(gm)
-  cdat_add_test(test_vcs_show
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py
-    )
-  FOREACH(gm boxfill isofill isoline meshfill )
-   FOREACH(ptype 0 -3 aeqd)
-    cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --mask
-      --projection=${ptype}
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      --lon1=-180
-      --lon2=180
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      --lon1=0
-      --lon2=360
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=90
-      --lat2=0
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      --range_via_gm
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      --lon1=-180
-      --lon2=180
-      --range_via_gm
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=-90
-      --lat2=0
-      --lon1=0
-      --lon2=360
-      --range_via_gm
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=90
-      --lat2=0
-      --range_via_gm
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png"
-      )
-    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-      --gm_type=${gm}
-      --projection=${ptype}
-      --lat1=90
-      --lat2=0
-      --range_via_gm
-      --gm_flips_lat_range
-      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png"
-      )
-   ENDFOREACH(ptype)
-  ENDFOREACH(gm)
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py
+"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png"
+)
+cdat_add_test(test_vcs_isofill_mask_cell_shift
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py
+"${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png"
+)
+cdat_add_test(test_vcs_bad_time_units
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py
+)
+cdat_add_test(test_vcs_plot_file_varible
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py
+)
+FOREACH(gm boxfill isofill meshfill)
+FOREACH(ori horizontal vertical)
+  FOREACH(ext1 y n)
+    FOREACH(ext2 y n)
+      cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}
+        "${PYTHON_EXECUTABLE}"
+        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py
+        --gm_type=${gm}
+        --orientation=${ori}
+        --ext1=${ext1}
+        --ext2=${ext2}
+        "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png"
+        )
+    ENDFOREACH(ext2)
+  ENDFOREACH(ext1)
+ENDFOREACH(ori)
+ENDFOREACH(gm)
+FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter)
+cdat_add_test(test_vcs_basic_${gm}_transparent
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --transparent
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_zero
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --zero
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_masked
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --mask
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png"
+  )
+ENDFOREACH(gm)
+cdat_add_test(test_vcs_show
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py
+)
+FOREACH(gm boxfill isofill isoline meshfill )
+FOREACH(ptype 0 -3 aeqd)
+cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --mask
+  --projection=${ptype}
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  --lon1=-180
+  --lon2=180
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  --lon1=0
+  --lon2=360
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=90
+  --lat2=0
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  --range_via_gm
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  --lon1=-180
+  --lon2=180
+  --range_via_gm
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=-90
+  --lat2=0
+  --lon1=0
+  --lon2=360
+  --range_via_gm
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=90
+  --lat2=0
+  --range_via_gm
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png"
+  )
+cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+  --gm_type=${gm}
+  --projection=${ptype}
+  --lat1=90
+  --lat2=0
+  --range_via_gm
+  --gm_flips_lat_range
+  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png"
+  )
+ENDFOREACH(ptype)
+ENDFOREACH(gm)
 
-  cdat_add_test(test_vcs_isoline_numpy
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
-    ${BASELINE_DIR}/test_vcs_isoline_numpy.png
-    )
-  # Rename baseline
-  cdat_add_test(test_vcs_meshfill_draw_mesh
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py
-    ${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png
-    )
-  # @dlonie is looking into why RH6 fails for these
-  # Rename baselines
-  if (NOT EXISTS /etc/redhat-release)
-    cdat_add_test(test_vcs_isoline_labels_multi_label_input_types
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py"
-      "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png"
-      )
-    cdat_add_test(test_vcs_isoline_labels
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py"
-      "${BASELINE_DIR}/test_vcs_isoline_labels.png"
-      )
-    cdat_add_test(test_vcs_isoline_labelskipdistance
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py"
-      "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png"
-      )
-    cdat_add_test(test_vcs_isofill_isoline_labels
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py"
-      "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png"
-      )
-    # Rename baseline
-    cdat_add_test(test_vcs_isoline_width_stipple
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py"
-      "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png"
-      )
-    cdat_add_test(test_vcs_isoline_labels_background
-      "${PYTHON_EXECUTABLE}"
-      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py"
-      "${BASELINE_DIR}/test_vcs_isoline_labels_background.png"
-      )
-  endif()
-  cdat_add_test(test_vcs_oned_level_axis
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py
-    "${BASELINE_DIR}/test_vcs_oned_level_axis.png"
-    )
-  cdat_add_test(test_vcs_first_png_blank
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py
-    "${BASELINE_DIR}/first_png_blank.png"
-    )
-  #    cdat_add_test(test_vcs_aspect_ratio
-  #  "${PYTHON_EXECUTABLE}"
-  #  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
-  #  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
-  #  )
-  cdat_add_test(test_vcs_polar_set_opt_param_polar
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py
-    "${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png"
-    )
-  cdat_add_test(test_vcs_boxfill_lev1_lev2
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py
-    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png"
-    )
-  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1
+cdat_add_test(test_vcs_isoline_numpy
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
+${BASELINE_DIR}/test_vcs_isoline_numpy.png
+)
+# Rename baseline
+cdat_add_test(test_vcs_meshfill_draw_mesh
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py
+${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png
+)
+# @dlonie is looking into why RH6 fails for these
+# Rename baselines
+if (NOT EXISTS /etc/redhat-release)
+cdat_add_test(test_vcs_isoline_labels_multi_label_input_types
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py"
+  "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png"
+  )
+cdat_add_test(test_vcs_isoline_labels
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py"
+  "${BASELINE_DIR}/test_vcs_isoline_labels.png"
+  )
+cdat_add_test(test_vcs_isoline_labelskipdistance
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py"
+  "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png"
+  )
+cdat_add_test(test_vcs_isofill_isoline_labels
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py"
+  "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png"
+  )
+# Rename baseline
+cdat_add_test(test_vcs_isoline_width_stipple
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py"
+  "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png"
+  )
+cdat_add_test(test_vcs_isoline_labels_background
+  "${PYTHON_EXECUTABLE}"
+  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py"
+  "${BASELINE_DIR}/test_vcs_isoline_labels_background.png"
+  )
+endif()
+cdat_add_test(test_vcs_oned_level_axis
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py
+"${BASELINE_DIR}/test_vcs_oned_level_axis.png"
+)
+cdat_add_test(test_vcs_first_png_blank
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py
+"${BASELINE_DIR}/first_png_blank.png"
+)
+#    cdat_add_test(test_vcs_aspect_ratio
+#  "${PYTHON_EXECUTABLE}"
+#  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
+#  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
+#  )
+cdat_add_test(test_vcs_polar_set_opt_param_polar
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py
+"${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png"
+)
+cdat_add_test(test_vcs_boxfill_lev1_lev2
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png"
+)
+cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
+"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png"
+)
+cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
+"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png"
+)
+cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
+"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png"
+)
+
+cdat_add_test(test_vcs_hatches_patterns
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py
+"${BASELINE_DIR}/test_vcs_hatches_patterns.png"
+)
+FOREACH(gm isofill boxfill meshfill)
+FOREACH(style solid pattern hatch)
+  cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
-    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png"
+    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+    --gm_type=${gm}
+    --fill_style=${style}
+    --non-contiguous
+    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png"
+    "--threshold=45"
     )
-  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2
+  cdat_add_test(test_vcs_${gm}_${style}_fill
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
-    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png"
+    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+    --gm_type=${gm}
+    --fill_style=${style}
+    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png"
+    "--threshold=45"
     )
-  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2
+  cdat_add_test(test_vcs_${gm}_${style}_fill_0_360
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
-    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png"
+    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+    --gm_type=${gm}
+    --fill_style=${style}
+    --lon1=0
+    --lon2=360
+    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png"
+    "--threshold=45"
     )
+ENDFOREACH(style)
+ENDFOREACH(gm)
 
-  cdat_add_test(test_vcs_hatches_patterns
+FOREACH(gm isofill meshfill boxfill)
+FOREACH(proj robinson)
+  cdat_add_test(test_vcs_animate_projected_${gm}_${proj}
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py
-    "${BASELINE_DIR}/test_vcs_hatches_patterns.png"
+    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py"
+    --gm_type=${gm}
+    --projection_type=${proj}
+    --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png
+    --threshold=40
     )
-  FOREACH(gm isofill boxfill meshfill)
-    FOREACH(style solid pattern hatch)
-      cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig
-        "${PYTHON_EXECUTABLE}"
-        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-        --gm_type=${gm}
-        --fill_style=${style}
-        --non-contiguous
-        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png"
-        "--threshold=45"
-        )
-      cdat_add_test(test_vcs_${gm}_${style}_fill
-        "${PYTHON_EXECUTABLE}"
-        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-        --gm_type=${gm}
-        --fill_style=${style}
-        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png"
-        "--threshold=45"
-        )
-      cdat_add_test(test_vcs_${gm}_${style}_fill_0_360
-        "${PYTHON_EXECUTABLE}"
-        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-        --gm_type=${gm}
-        --fill_style=${style}
-        --lon1=0
-        --lon2=360
-        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png"
-        "--threshold=45"
-        )
-    ENDFOREACH(style)
-  ENDFOREACH(gm)
+ENDFOREACH(proj)
+ENDFOREACH(gm)
 
-  FOREACH(gm isofill meshfill boxfill)
-    FOREACH(proj robinson)
-      cdat_add_test(test_vcs_animate_projected_${gm}_${proj}
-        "${PYTHON_EXECUTABLE}"
-        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py"
-        --gm_type=${gm}
-        --projection_type=${proj}
-        --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png
-        --threshold=40
-        )
-    ENDFOREACH(proj)
-  ENDFOREACH(gm)
-
-  FOREACH(flip None X XY Y)
-    cdat_add_test(test_vcs_flip${flip}
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py
-      "${BASELINE_DIR}/test_vcs_flip${flip}.png"
-      )
-  ENDFOREACH(flip)
+FOREACH(flip None X XY Y)
+cdat_add_test(test_vcs_flip${flip}
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py
+  "${BASELINE_DIR}/test_vcs_flip${flip}.png"
+  )
+ENDFOREACH(flip)
 
-  cdat_add_test(test_vcs_lambert
-     "${PYTHON_EXECUTABLE}"
-     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py
-     "${BASELINE_DIR}/test_vcs_lambert.png"
-    )
-  # Rename baseline
-  cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing
-     "${PYTHON_EXECUTABLE}"
-     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
-     "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png"
-    )
+cdat_add_test(test_vcs_lambert
+ "${PYTHON_EXECUTABLE}"
+ ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py
+ "${BASELINE_DIR}/test_vcs_lambert.png"
+)
+# Rename baseline
+cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing
+ "${PYTHON_EXECUTABLE}"
+ ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
+ "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png"
+)
 
-  cdat_add_test(test_vcs_close
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py
-    "${BASELINE_DIR}/test_vcs_close.png"
-    )
+cdat_add_test(test_vcs_close
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py
+"${BASELINE_DIR}/test_vcs_close.png"
+)
 
-  cdat_add_test(test_vcs_basic_isofill_bigvalues
-    "${PYTHON_EXECUTABLE}"
-    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py"
-    --gm_type=isofill
-    --bigvalues
-    "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
-    )
-  cdat_add_test(test_vcs_issue_960_labels
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
-    ${BASELINE_DIR}/test_vcs_issue_960_labels_1.png
-    ${BASELINE_DIR}/test_vcs_issue_960_labels_2.png
-    )
-  cdat_add_test(test_vcs_animate_meshfill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py
-    ${BASELINE_DIR}
-    )
-  cdat_add_test(test_vcs_animate_isofill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py
-    ${BASELINE_DIR}
-    )
-  cdat_add_test(test_vcs_animate_boxfill
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py
-    ${BASELINE_DIR}
-    )
-  cdat_add_test(test_vcs_animate_isoline
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py
-    ${BASELINE_DIR}
-    )
-  cdat_add_test(test_vcs_animate_isoline_colored
-    "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py
-    ${BASELINE_DIR}
-    )
-  if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN))
-    cdat_add_test(test_vcs_animate_isoline_text_labels
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py
-      ${BASELINE_DIR}
-      )
-    cdat_add_test(test_vcs_animate_isoline_text_labels_colored
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
-      ${BASELINE_DIR}
-      )
-    cdat_add_test(test_vcs_patterns
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py
-      "${BASELINE_DIR}/test_vcs_patterns.png"
-      )
-    cdat_add_test(test_vcs_vectors_robinson
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py
-      "${BASELINE_DIR}/test_vcs_vectors_robinson.png"
-      )
-    cdat_add_test(test_vcs_vectors_robinson_wrap
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
-      "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
-      )
-    cdat_add_test(test_vcs_vectors_scale_options
-      "${PYTHON_EXECUTABLE}"
-      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
-      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
-      )
-  endif()
+cdat_add_test(test_vcs_basic_isofill_bigvalues
+"${PYTHON_EXECUTABLE}"
+"${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py"
+--gm_type=isofill
+--bigvalues
+"--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
+)
+cdat_add_test(test_vcs_issue_960_labels
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
+${BASELINE_DIR}/test_vcs_issue_960_labels_1.png
+${BASELINE_DIR}/test_vcs_issue_960_labels_2.png
+)
+cdat_add_test(test_vcs_animate_meshfill
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py
+${BASELINE_DIR}
+)
+cdat_add_test(test_vcs_animate_isofill
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py
+${BASELINE_DIR}
+)
+cdat_add_test(test_vcs_animate_boxfill
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py
+${BASELINE_DIR}
+)
+cdat_add_test(test_vcs_animate_isoline
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py
+${BASELINE_DIR}
+)
+cdat_add_test(test_vcs_animate_isoline_colored
+"${PYTHON_EXECUTABLE}"
+${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py
+${BASELINE_DIR}
+)
+if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN))
+cdat_add_test(test_vcs_animate_isoline_text_labels
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py
+  ${BASELINE_DIR}
+  )
+cdat_add_test(test_vcs_animate_isoline_text_labels_colored
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
+  ${BASELINE_DIR}
+  )
+cdat_add_test(test_vcs_patterns
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py
+  "${BASELINE_DIR}/test_vcs_patterns.png"
+  )
+cdat_add_test(test_vcs_vectors_robinson
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py
+  "${BASELINE_DIR}/test_vcs_vectors_robinson.png"
+  )
+cdat_add_test(test_vcs_vectors_robinson_wrap
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
+  "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
+  )
+cdat_add_test(test_vcs_vectors_scale_options
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
+  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
+  )
 endif()
 
 cdat_add_test(test_vcs_endconfigure
-- 
GitLab


From d7cc903cfca55568620a7006196ca3ec8ba9dfd7 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 09:13:19 -0700
Subject: [PATCH 140/196] do not remove test for SAMPLE_DATA makes impossible
 to merge master back in

---
 testing/vcs/CMakeLists.txt | 1007 ++++++++++++++++++------------------
 1 file changed, 505 insertions(+), 502 deletions(-)

diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index ba48af6b3..12c191412 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1,5 +1,6 @@
 set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs")
 set(PYTHON_EXECUTABLE python)
+set(CDAT_DOWNLOAD_SAMPLE_DATA ON)
 
 cdat_add_test(flake8_vcs
   flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
@@ -407,529 +408,531 @@ cdat_add_test(test_vcs_geometry
 # These tests perform plotting and need sample data
 #
 ##############################################################################
-FOREACH(gm boxfill isofill meshfill isoline vector)
-FOREACH(src vcs canvas gm)
-  cdat_add_test(test_vcs_colormaps_source_${gm}_${src}
+if (CDAT_DOWNLOAD_SAMPLE_DATA)
+  FOREACH(gm boxfill isofill meshfill isoline vector)
+    FOREACH(src vcs canvas gm)
+      cdat_add_test(test_vcs_colormaps_source_${gm}_${src}
+        "${PYTHON_EXECUTABLE}"
+        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py
+        -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png
+        -g ${gm}
+        -s ${src}
+        )
+    ENDFOREACH()
+  ENDFOREACH()
+
+  # NOTE Fix baseline name
+  cdat_add_test(test_vcs_meshfill_regular_grid
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py
+    "${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png"
+  )
+  # NOTE Fix baseline name
+  cdat_add_test(test_vcs_plot_unstructured_via_boxfill
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
+    "${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png"
+  )
+  # NOTE Fix baseline name
+  cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts
     "${PYTHON_EXECUTABLE}"
-    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py
-    -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png
-    -g ${gm}
-    -s ${src}
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
+    "${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png"
+    )
+  # NOTE Fix baseline name
+  cdat_add_test(test_vcs_boxfill_custom
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
+    "${BASELINE_DIR}/test_vcs_boxfill_custom.png"
+    )
+  cdat_add_test(test_vcs_boxfill_custom_non_default_levels
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
+    "${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png"
+    )
+  cdat_add_test(test_vcs_boxfill_custom_ext1
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py
+    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png"
+    )
+  cdat_add_test(test_vcs_boxfill_custom_ext2
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py
+    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png"
+    )
+  cdat_add_test(test_vcs_boxfill_custom_ext1_ext2
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
+    "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png"
+    )
+  cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py
+    )
+  cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py
+    )
+  cdat_add_test(test_vcs_user_passed_date
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py
+    "${BASELINE_DIR}/test_vcs_user_passed_date.png"
+    )
+  cdat_add_test(test_vcs_user_passed_date_as_string
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py
+    "${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png"
+    )
+  cdat_add_test(test_vcs_auto_time_labels
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py
+    "${BASELINE_DIR}/test_vcs_auto_time_labels.png"
+    )
+  cdat_add_test(test_vcs_isofill_data_read_north_to_south
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
+    "${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png"
+    )
+  # Rename baseline
+  cdat_add_test(test_vcs_lon_axes_freak_out
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py
+    "${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png"
     )
-ENDFOREACH()
-ENDFOREACH()
-
-# NOTE Fix baseline name
-cdat_add_test(test_vcs_meshfill_regular_grid
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py
-"${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png"
-)
-# NOTE Fix baseline name
-cdat_add_test(test_vcs_plot_unstructured_via_boxfill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
-"${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png"
-)
-# NOTE Fix baseline name
-cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
-"${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png"
-)
-# NOTE Fix baseline name
-cdat_add_test(test_vcs_boxfill_custom
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py
-"${BASELINE_DIR}/test_vcs_boxfill_custom.png"
-)
-cdat_add_test(test_vcs_boxfill_custom_non_default_levels
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
-"${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png"
-)
-cdat_add_test(test_vcs_boxfill_custom_ext1
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py
-"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png"
-)
-cdat_add_test(test_vcs_boxfill_custom_ext2
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py
-"${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png"
-)
-cdat_add_test(test_vcs_boxfill_custom_ext1_ext2
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
-"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png"
-)
-cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py
-)
-cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py
-)
-cdat_add_test(test_vcs_user_passed_date
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py
-"${BASELINE_DIR}/test_vcs_user_passed_date.png"
-)
-cdat_add_test(test_vcs_user_passed_date_as_string
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py
-"${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png"
-)
-cdat_add_test(test_vcs_auto_time_labels
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py
-"${BASELINE_DIR}/test_vcs_auto_time_labels.png"
-)
-cdat_add_test(test_vcs_isofill_data_read_north_to_south
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
-"${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png"
-)
-# Rename baseline
-cdat_add_test(test_vcs_lon_axes_freak_out
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py
-"${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png"
-)
 cdat_add_test(test_vcs_set_colors_name_rgba_1d
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
-"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png"
-)
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
+    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png"
+    )
 cdat_add_test(test_vcs_set_colors_name_rgba_isoline
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
-"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png"
-)
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
+    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png"
+    )
 cdat_add_test(test_vcs_settings_color_name_rgba_meshfill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
-"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png"
-)
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
+    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png"
+    )
 cdat_add_test(test_vcs_settings_color_name_rgba_boxfill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
-"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png"
-)
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
+    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png"
+    )
 cdat_add_test(test_vcs_settings_color_name_rgba
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py
-"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png"
-)
-cdat_add_test(test_vcs_isofill_mask_cell_shift
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py
-"${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png"
-)
-cdat_add_test(test_vcs_bad_time_units
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py
-)
-cdat_add_test(test_vcs_plot_file_varible
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py
-)
-FOREACH(gm boxfill isofill meshfill)
-FOREACH(ori horizontal vertical)
-  FOREACH(ext1 y n)
-    FOREACH(ext2 y n)
-      cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}
-        "${PYTHON_EXECUTABLE}"
-        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py
-        --gm_type=${gm}
-        --orientation=${ori}
-        --ext1=${ext1}
-        --ext2=${ext2}
-        "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png"
-        )
-    ENDFOREACH(ext2)
-  ENDFOREACH(ext1)
-ENDFOREACH(ori)
-ENDFOREACH(gm)
-FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter)
-cdat_add_test(test_vcs_basic_${gm}_transparent
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --transparent
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_zero
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --zero
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_masked
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --mask
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png"
-  )
-ENDFOREACH(gm)
-cdat_add_test(test_vcs_show
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py
-)
-FOREACH(gm boxfill isofill isoline meshfill )
-FOREACH(ptype 0 -3 aeqd)
-cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --mask
-  --projection=${ptype}
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  --lon1=-180
-  --lon2=180
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  --lon1=0
-  --lon2=360
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=90
-  --lat2=0
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  --range_via_gm
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  --lon1=-180
-  --lon2=180
-  --range_via_gm
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=-90
-  --lat2=0
-  --lon1=0
-  --lon2=360
-  --range_via_gm
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=90
-  --lat2=0
-  --range_via_gm
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png"
-  )
-cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
-  --gm_type=${gm}
-  --projection=${ptype}
-  --lat1=90
-  --lat2=0
-  --range_via_gm
-  --gm_flips_lat_range
-  "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png"
-  )
-ENDFOREACH(ptype)
-ENDFOREACH(gm)
-
-cdat_add_test(test_vcs_isoline_numpy
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
-${BASELINE_DIR}/test_vcs_isoline_numpy.png
-)
-# Rename baseline
-cdat_add_test(test_vcs_meshfill_draw_mesh
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py
-${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png
-)
-# @dlonie is looking into why RH6 fails for these
-# Rename baselines
-if (NOT EXISTS /etc/redhat-release)
-cdat_add_test(test_vcs_isoline_labels_multi_label_input_types
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py"
-  "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png"
-  )
-cdat_add_test(test_vcs_isoline_labels
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py"
-  "${BASELINE_DIR}/test_vcs_isoline_labels.png"
-  )
-cdat_add_test(test_vcs_isoline_labelskipdistance
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py"
-  "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png"
-  )
-cdat_add_test(test_vcs_isofill_isoline_labels
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py"
-  "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png"
-  )
-# Rename baseline
-cdat_add_test(test_vcs_isoline_width_stipple
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py"
-  "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png"
-  )
-cdat_add_test(test_vcs_isoline_labels_background
-  "${PYTHON_EXECUTABLE}"
-  "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py"
-  "${BASELINE_DIR}/test_vcs_isoline_labels_background.png"
-  )
-endif()
-cdat_add_test(test_vcs_oned_level_axis
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py
-"${BASELINE_DIR}/test_vcs_oned_level_axis.png"
-)
-cdat_add_test(test_vcs_first_png_blank
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py
-"${BASELINE_DIR}/first_png_blank.png"
-)
-#    cdat_add_test(test_vcs_aspect_ratio
-#  "${PYTHON_EXECUTABLE}"
-#  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
-#  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
-#  )
-cdat_add_test(test_vcs_polar_set_opt_param_polar
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py
-"${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png"
-)
-cdat_add_test(test_vcs_boxfill_lev1_lev2
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py
-"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png"
-)
-cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
-"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png"
-)
-cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
-"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png"
-)
-cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
-"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png"
-)
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py
+    "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png"
+    )
+  cdat_add_test(test_vcs_isofill_mask_cell_shift
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py
+    "${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png"
+    )
+  cdat_add_test(test_vcs_bad_time_units
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py
+    )
+  cdat_add_test(test_vcs_plot_file_varible
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py
+    )
+  FOREACH(gm boxfill isofill meshfill)
+    FOREACH(ori horizontal vertical)
+      FOREACH(ext1 y n)
+        FOREACH(ext2 y n)
+          cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}
+            "${PYTHON_EXECUTABLE}"
+            ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py
+            --gm_type=${gm}
+            --orientation=${ori}
+            --ext1=${ext1}
+            --ext2=${ext2}
+            "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png"
+            )
+        ENDFOREACH(ext2)
+      ENDFOREACH(ext1)
+    ENDFOREACH(ori)
+  ENDFOREACH(gm)
+  FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter)
+    cdat_add_test(test_vcs_basic_${gm}_transparent
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --transparent
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_zero
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --zero
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_masked
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --mask
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png"
+      )
+  ENDFOREACH(gm)
+  cdat_add_test(test_vcs_show
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py
+    )
+  FOREACH(gm boxfill isofill isoline meshfill )
+   FOREACH(ptype 0 -3 aeqd)
+    cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --mask
+      --projection=${ptype}
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      --lon1=-180
+      --lon2=180
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      --lon1=0
+      --lon2=360
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=90
+      --lat2=0
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      --range_via_gm
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      --lon1=-180
+      --lon2=180
+      --range_via_gm
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=-90
+      --lat2=0
+      --lon1=0
+      --lon2=360
+      --range_via_gm
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=90
+      --lat2=0
+      --range_via_gm
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png"
+      )
+    cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py
+      --gm_type=${gm}
+      --projection=${ptype}
+      --lat1=90
+      --lat2=0
+      --range_via_gm
+      --gm_flips_lat_range
+      "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png"
+      )
+   ENDFOREACH(ptype)
+  ENDFOREACH(gm)
 
-cdat_add_test(test_vcs_hatches_patterns
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py
-"${BASELINE_DIR}/test_vcs_hatches_patterns.png"
-)
-FOREACH(gm isofill boxfill meshfill)
-FOREACH(style solid pattern hatch)
-  cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig
+  cdat_add_test(test_vcs_isoline_numpy
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py
+    ${BASELINE_DIR}/test_vcs_isoline_numpy.png
+    )
+  # Rename baseline
+  cdat_add_test(test_vcs_meshfill_draw_mesh
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py
+    ${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png
+    )
+  # @dlonie is looking into why RH6 fails for these
+  # Rename baselines
+  if (NOT EXISTS /etc/redhat-release)
+    cdat_add_test(test_vcs_isoline_labels_multi_label_input_types
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py"
+      "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png"
+      )
+    cdat_add_test(test_vcs_isoline_labels
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py"
+      "${BASELINE_DIR}/test_vcs_isoline_labels.png"
+      )
+    cdat_add_test(test_vcs_isoline_labelskipdistance
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py"
+      "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png"
+      )
+    cdat_add_test(test_vcs_isofill_isoline_labels
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py"
+      "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png"
+      )
+    # Rename baseline
+    cdat_add_test(test_vcs_isoline_width_stipple
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py"
+      "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png"
+      )
+    cdat_add_test(test_vcs_isoline_labels_background
+      "${PYTHON_EXECUTABLE}"
+      "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py"
+      "${BASELINE_DIR}/test_vcs_isoline_labels_background.png"
+      )
+  endif()
+  cdat_add_test(test_vcs_oned_level_axis
     "${PYTHON_EXECUTABLE}"
-    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-    --gm_type=${gm}
-    --fill_style=${style}
-    --non-contiguous
-    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png"
-    "--threshold=45"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py
+    "${BASELINE_DIR}/test_vcs_oned_level_axis.png"
     )
-  cdat_add_test(test_vcs_${gm}_${style}_fill
+  cdat_add_test(test_vcs_first_png_blank
     "${PYTHON_EXECUTABLE}"
-    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-    --gm_type=${gm}
-    --fill_style=${style}
-    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png"
-    "--threshold=45"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py
+    "${BASELINE_DIR}/first_png_blank.png"
     )
-  cdat_add_test(test_vcs_${gm}_${style}_fill_0_360
+  #    cdat_add_test(test_vcs_aspect_ratio
+  #  "${PYTHON_EXECUTABLE}"
+  #  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
+  #  ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py
+  #  )
+  cdat_add_test(test_vcs_polar_set_opt_param_polar
     "${PYTHON_EXECUTABLE}"
-    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
-    --gm_type=${gm}
-    --fill_style=${style}
-    --lon1=0
-    --lon2=360
-    "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png"
-    "--threshold=45"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py
+    "${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png"
+    )
+  cdat_add_test(test_vcs_boxfill_lev1_lev2
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png"
+    )
+  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
+    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png"
+    )
+  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
+    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png"
+    )
+  cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
+    "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png"
     )
-ENDFOREACH(style)
-ENDFOREACH(gm)
 
-FOREACH(gm isofill meshfill boxfill)
-FOREACH(proj robinson)
-  cdat_add_test(test_vcs_animate_projected_${gm}_${proj}
+  cdat_add_test(test_vcs_hatches_patterns
     "${PYTHON_EXECUTABLE}"
-    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py"
-    --gm_type=${gm}
-    --projection_type=${proj}
-    --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png
-    --threshold=40
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py
+    "${BASELINE_DIR}/test_vcs_hatches_patterns.png"
     )
-ENDFOREACH(proj)
-ENDFOREACH(gm)
+  FOREACH(gm isofill boxfill meshfill)
+    FOREACH(style solid pattern hatch)
+      cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig
+        "${PYTHON_EXECUTABLE}"
+        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+        --gm_type=${gm}
+        --fill_style=${style}
+        --non-contiguous
+        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png"
+        "--threshold=45"
+        )
+      cdat_add_test(test_vcs_${gm}_${style}_fill
+        "${PYTHON_EXECUTABLE}"
+        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+        --gm_type=${gm}
+        --fill_style=${style}
+        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png"
+        "--threshold=45"
+        )
+      cdat_add_test(test_vcs_${gm}_${style}_fill_0_360
+        "${PYTHON_EXECUTABLE}"
+        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py"
+        --gm_type=${gm}
+        --fill_style=${style}
+        --lon1=0
+        --lon2=360
+        "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png"
+        "--threshold=45"
+        )
+    ENDFOREACH(style)
+  ENDFOREACH(gm)
 
-FOREACH(flip None X XY Y)
-cdat_add_test(test_vcs_flip${flip}
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py
-  "${BASELINE_DIR}/test_vcs_flip${flip}.png"
-  )
-ENDFOREACH(flip)
+  FOREACH(gm isofill meshfill boxfill)
+    FOREACH(proj robinson)
+      cdat_add_test(test_vcs_animate_projected_${gm}_${proj}
+        "${PYTHON_EXECUTABLE}"
+        "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py"
+        --gm_type=${gm}
+        --projection_type=${proj}
+        --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png
+        --threshold=40
+        )
+    ENDFOREACH(proj)
+  ENDFOREACH(gm)
 
-cdat_add_test(test_vcs_lambert
- "${PYTHON_EXECUTABLE}"
- ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py
- "${BASELINE_DIR}/test_vcs_lambert.png"
-)
-# Rename baseline
-cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing
- "${PYTHON_EXECUTABLE}"
- ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
- "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png"
-)
+  FOREACH(flip None X XY Y)
+    cdat_add_test(test_vcs_flip${flip}
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py
+      "${BASELINE_DIR}/test_vcs_flip${flip}.png"
+      )
+  ENDFOREACH(flip)
 
-cdat_add_test(test_vcs_close
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py
-"${BASELINE_DIR}/test_vcs_close.png"
-)
+  cdat_add_test(test_vcs_lambert
+     "${PYTHON_EXECUTABLE}"
+     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py
+     "${BASELINE_DIR}/test_vcs_lambert.png"
+    )
+  # Rename baseline
+  cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing
+     "${PYTHON_EXECUTABLE}"
+     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
+     "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png"
+    )
 
-cdat_add_test(test_vcs_basic_isofill_bigvalues
-"${PYTHON_EXECUTABLE}"
-"${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py"
---gm_type=isofill
---bigvalues
-"--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
-)
-cdat_add_test(test_vcs_issue_960_labels
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
-${BASELINE_DIR}/test_vcs_issue_960_labels_1.png
-${BASELINE_DIR}/test_vcs_issue_960_labels_2.png
-)
-cdat_add_test(test_vcs_animate_meshfill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py
-${BASELINE_DIR}
-)
-cdat_add_test(test_vcs_animate_isofill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py
-${BASELINE_DIR}
-)
-cdat_add_test(test_vcs_animate_boxfill
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py
-${BASELINE_DIR}
-)
-cdat_add_test(test_vcs_animate_isoline
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py
-${BASELINE_DIR}
-)
-cdat_add_test(test_vcs_animate_isoline_colored
-"${PYTHON_EXECUTABLE}"
-${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py
-${BASELINE_DIR}
-)
-if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN))
-cdat_add_test(test_vcs_animate_isoline_text_labels
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py
-  ${BASELINE_DIR}
-  )
-cdat_add_test(test_vcs_animate_isoline_text_labels_colored
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
-  ${BASELINE_DIR}
-  )
-cdat_add_test(test_vcs_patterns
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py
-  "${BASELINE_DIR}/test_vcs_patterns.png"
-  )
-cdat_add_test(test_vcs_vectors_robinson
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py
-  "${BASELINE_DIR}/test_vcs_vectors_robinson.png"
-  )
-cdat_add_test(test_vcs_vectors_robinson_wrap
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
-  "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
-  )
-cdat_add_test(test_vcs_vectors_scale_options
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
-  "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
-  )
+  cdat_add_test(test_vcs_close
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py
+    "${BASELINE_DIR}/test_vcs_close.png"
+    )
+
+  cdat_add_test(test_vcs_basic_isofill_bigvalues
+    "${PYTHON_EXECUTABLE}"
+    "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py"
+    --gm_type=isofill
+    --bigvalues
+    "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
+    )
+  cdat_add_test(test_vcs_issue_960_labels
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
+    ${BASELINE_DIR}/test_vcs_issue_960_labels_1.png
+    ${BASELINE_DIR}/test_vcs_issue_960_labels_2.png
+    )
+  cdat_add_test(test_vcs_animate_meshfill
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py
+    ${BASELINE_DIR}
+    )
+  cdat_add_test(test_vcs_animate_isofill
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py
+    ${BASELINE_DIR}
+    )
+  cdat_add_test(test_vcs_animate_boxfill
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py
+    ${BASELINE_DIR}
+    )
+  cdat_add_test(test_vcs_animate_isoline
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py
+    ${BASELINE_DIR}
+    )
+  cdat_add_test(test_vcs_animate_isoline_colored
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py
+    ${BASELINE_DIR}
+    )
+  if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN))
+    cdat_add_test(test_vcs_animate_isoline_text_labels
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py
+      ${BASELINE_DIR}
+      )
+    cdat_add_test(test_vcs_animate_isoline_text_labels_colored
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
+      ${BASELINE_DIR}
+      )
+    cdat_add_test(test_vcs_patterns
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py
+      "${BASELINE_DIR}/test_vcs_patterns.png"
+      )
+    cdat_add_test(test_vcs_vectors_robinson
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py
+      "${BASELINE_DIR}/test_vcs_vectors_robinson.png"
+      )
+    cdat_add_test(test_vcs_vectors_robinson_wrap
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
+      "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
+      )
+    cdat_add_test(test_vcs_vectors_scale_options
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
+      )
+  endif()
 endif()
 
 cdat_add_test(test_vcs_endconfigure
-- 
GitLab


From aeb94b7a6ac509d34e0fa36462c4164c9671d762 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 12:53:32 -0700
Subject: [PATCH 141/196] ok got the build system to download/check sample data
 only once

---
 CMake/cdat_modules_extra/runtest.in           |  2 --
 Packages/vcs/scripts/vcs_download_sample_data |  4 ++++
 Packages/vcs/setup.py                         |  1 +
 Packages/vcs/vcs/template.py                  |  5 ++++-
 Packages/vcs/vcs/utils.py                     |  2 +-
 testing/CMakeLists.txt                        | 15 ++++++++++++---
 testing/vcs/CMakeLists.txt                    |  2 --
 7 files changed, 22 insertions(+), 9 deletions(-)
 create mode 100755 Packages/vcs/scripts/vcs_download_sample_data

diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 194632e5d..19769f740 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -2,7 +2,5 @@
 source activate @CONDA_ENVIRONMENT_NAME@
 export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
 echo "Python:" `which python`
-# make sure data is downloaded
-python -c "import vcs;vcs.download_sample_data_files()"
 echo "Running:"$*
 $*
diff --git a/Packages/vcs/scripts/vcs_download_sample_data b/Packages/vcs/scripts/vcs_download_sample_data
new file mode 100755
index 000000000..de3829e37
--- /dev/null
+++ b/Packages/vcs/scripts/vcs_download_sample_data
@@ -0,0 +1,4 @@
+#!/usr/bin/env python
+import vcs
+vcs.download_sample_data_files()
+
diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py
index 10f64da9b..06f0ef5b9 100755
--- a/Packages/vcs/setup.py
+++ b/Packages/vcs/setup.py
@@ -27,6 +27,7 @@ setup(name="vcs",
       packages=find_packages(),
       package_dir={'vcs': 'vcs',
                    },
+      scripts= ["scripts/vcs_download_sample_data"],
       data_files=[('share/vcs', ('Share/wmo_symbols.json',
                                  'Share/data_continent_coarse',
                                  'Share/data_continent_political',
diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py
index fd2ee2f0c..adabacda0 100644
--- a/Packages/vcs/vcs/template.py
+++ b/Packages/vcs/vcs/template.py
@@ -1486,7 +1486,10 @@ class P(object):
                                                       axis=" ".join(["(%s)" %
                                                                      S for S in slab.getAxisIds()])))
                         except:
-                            meanstring = 'Mean %.4g' % slab.mean()
+                            try:
+                                meanstring = 'Mean %.4g' % slab.mean()
+                            except:
+                                meanstring = 'Mean %.4g' % numpy.mean(slab.filled())
                     tt.string = meanstring
                 else:
                     tt.string = str(getattr(slab, s))
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 4fc59ed89..d3a02dcda 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -1670,7 +1670,7 @@ def creategraphicsmethod(gtype, gname='default', name=None):
 # datawc_ can be a float or a cdtime.reltime
 # TODO: Investigate why datawc is converted to a cdtime.reltime
 def getDataWcValue(v):
-    if (type(v) is type(cdtime.reltime(0, 'months since 1900'))):
+    if (type(v) is type(cdtime.reltime(0, 'months since 1900'))):  # noqa
         return v.value
     else:
         return v
diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt
index 0e7286fbe..909790f68 100644
--- a/testing/CMakeLists.txt
+++ b/testing/CMakeLists.txt
@@ -1,5 +1,5 @@
-# Disabling GUI tests as they don't work
-#add_subdirectory(uvcdat)
+set(PYTHON_EXECUTABLE python)
+set(CDAT_DOWNLOAD_SAMPLE_DATA ON)
 
 # Helper macro that sets the environment correctly
 macro (cdat_add_test name)
@@ -13,6 +13,9 @@ macro (cdat_add_test name)
   endif()
 
   add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest  ${ARGS})
+  if ( NOT (${name} STREQUAL download_sample_data ))
+      set_tests_properties(${name} PROPERTIES DEPENDS download_sample_data)
+  endif()
 
   if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
     set_tests_properties (${name}
@@ -24,6 +27,13 @@ macro (cdat_add_test name)
     )
   endif()
 endmacro()
+
+#separate_arguments(DOWNLOAD_ARGS)
+# make sure data is downloaded
+cdat_add_test(download_sample_data
+    vcs_download_sample_data
+    )
+
 add_subdirectory(regrid)
 add_subdirectory(vcs)
 add_subdirectory(vcsaddons)
@@ -33,5 +43,4 @@ add_subdirectory(Thermo)
 add_subdirectory(unidata)
 add_subdirectory(cdms2)
 add_subdirectory(xmgrace)
-
 add_subdirectory(pcmdi)
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index c80cbc44f..dba30b168 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1,6 +1,4 @@
 set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs")
-set(PYTHON_EXECUTABLE python)
-set(CDAT_DOWNLOAD_SAMPLE_DATA ON)
 
 cdat_add_test(flake8_vcs
   flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/"
-- 
GitLab


From 3b995dbf4d6d75fd5f2e3f9f9fc0d9afb4426dbc Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 12:57:40 -0700
Subject: [PATCH 142/196] travis update

---
 .travis.yml | 25 +++++++++++++++----------
 1 file changed, 15 insertions(+), 10 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index dbf2b1399..e085b7f64 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,20 +2,25 @@ os:
   - linux
   - osx
 
-language: c++
+language: python
+    - "2.7"
 
 before_install:
-    - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran; fi
-    - if [ "$TRAVIS_OS_NAME" = "linux" ]; then pushd $HOME && mkdir cmake3.1 && cd cmake3.1 && (curl -L "http://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz" | gunzip -c | tar x) && cd cmake-*/bin && export PATH="${PWD}:${PATH}"; popd; fi
-    - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; brew outdated cmake || brew upgrade cmake ; fi
-    - cmake --version
+  - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran xvfb; fi
+  - if [ "$TRAVIS_OS_NAME" = "linux" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; fi 
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-MacOSX-x86_64.sh -O miniconda.sh; fi
+  - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://github.com/UV-CDAT/uvcdat/releases/download/v2.4.1/gfortran-4.9.2-Mac.tar.gz -O ~/gfortran-4.9.2-Mac.tar.gz ; pushd / ; sudo tar xzvf ~/gfortran-4.9.2-Mac.tar.gz ; pushd ; fi
+  - export PATH="$HOME/miniconda/bin:$PATH"
+  - bash miniconda.sh -b -p $HOME/miniconda
+  - conda config --set always_yes yes --set changeps1 no
+  - conda update -y -q conda
+  - conda install openssl=1.0.2d
 
 script:
-    - git submodule init
-    - git submodule update
     - cd ..
     - mkdir _build
     - cd _build
-    - cmake -DGIT_PROTOCOL=git:// -DCDAT_BUILD_MODE=LEAN -DCDAT_BUILD_GRAPHICS=ON -DCDAT_BUILD_SCIPY=OFF ../uvcdat
-    - ctest -VV -S ../uvcdat/CMake/travis_build.cmake
-    - ctest -VV -S ../uvcdat/CMake/travis_submit.cmake
+    - cmake -DGIT_PROTOCOL=git:// ../uvcdat
+    - make
+    - ctest -j8 -D Experimental
-- 
GitLab


From aecf027540fac030aead25a2bc16997d8e50a8e6 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 13:49:05 -0700
Subject: [PATCH 143/196] need to push to go to Linux, mac is dyimg

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 81a54299b..90de4ad7b 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests
+conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests numpy==1.9.2
 
 source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-- 
GitLab


From 29b67f0e2399ad14b06c0f1d3b8f12c43e726c8d Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 14:59:35 -0700
Subject: [PATCH 144/196] testing on mac

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 CMake/cdat_modules_extra/runtest.in                      | 3 +++
 Packages/testing/regression.py                           | 2 +-
 3 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 90de4ad7b..6655f8059 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests numpy==1.9.2
+conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 --show-channel-urls
 
 source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index 19769f740..f981c796d 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -1,6 +1,9 @@
 #!/bin/bash
+echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@
 source activate @CONDA_ENVIRONMENT_NAME@
 export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
 echo "Python:" `which python`
 echo "Running:"$*
+python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION"
+python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version"
 $*
diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 72047380e..aa8efa96b 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = False):
+                       baseline=True, cleanup=True, update_baselines = True):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
-- 
GitLab


From e2741805ecafda1ab0033a1513b9bc6b2ddd3761 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 15:46:17 -0700
Subject: [PATCH 145/196] somehow this numpy i acting up

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 4 ++--
 CMake/cdat_modules_extra/runtest.in                      | 2 +-
 Packages/testing/regression.py                           | 2 +-
 Packages/vcs/vcs/VTKPlots.py                             | 5 ++++-
 4 files changed, 8 insertions(+), 5 deletions(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 6655f8059..78e72d78e 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,9 +1,9 @@
 #!/usr/bin/env bash
 
-conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 --show-channel-urls
+conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
 
 source activate @CONDA_ENVIRONMENT_NAME@
-for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
+for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons EzTemplate cdutil unidata xmgrace genutil Thermo WK distarray; do
     cd @cdat_SOURCE_DIR@/Packages/${pkg}
     rm -rf build
     if [ ${pkg} == "vcs" ]; then
diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in
index f981c796d..4946cf488 100755
--- a/CMake/cdat_modules_extra/runtest.in
+++ b/CMake/cdat_modules_extra/runtest.in
@@ -3,7 +3,7 @@ echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@
 source activate @CONDA_ENVIRONMENT_NAME@
 export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"`
 echo "Python:" `which python`
-echo "Running:"$*
+echo "Running: "$*
 python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION"
 python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version"
 $*
diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index aa8efa96b..72047380e 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = True):
+                       baseline=True, cleanup=True, update_baselines = False):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 9d3d85c74..2d3efbdab 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1499,7 +1499,10 @@ class VTKVCSBackend(object):
                                 float(cdutil.averager(array1, axis=" ".join(["(%s)" %
                                                                              S for S in array1.getAxisIds()])))
                         except:
-                            meanstring = 'Mean %.4g' % array1.mean()
+                            try:
+                                meanstring = 'Mean %.4g' % array1.mean()
+                            except:
+                                meanstring = 'Mean %.4g' % numpy.mean(array1.filled())
                     t.SetInput(meanstring)
                 elif att == "crdate" and tstr is not None:
                     t.SetInput(tstr.split()[0].replace("-", "/"))
-- 
GitLab


From 761fc9b1b6db5282a10d4eaf57ecf1ad844fd7ac Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 16:05:07 -0700
Subject: [PATCH 146/196] major clean up

---
 Changes.txt                                   |    3 -
 TODO.txt                                      |    5 -
 exsrc/Makefile.am.pixman                      |   54 -
 exsrc/Numeric.sh                              |   16 -
 exsrc/Pmw.sh                                  |    6 -
 exsrc/R.sh                                    |    5 -
 exsrc/README.txt                              |   23 -
 exsrc/VTK_BUILD_ANSWERS.core                  | 1320 ----------
 exsrc/blas.sh                                 |   25 -
 exsrc/cairo.sh                                |   14 -
 exsrc/clean_script                            |    2 -
 exsrc/cmake.sh                                |    7 -
 exsrc/curl.sh                                 |   12 -
 exsrc/detect_fortran.py                       |   36 -
 exsrc/ffmpeg.sh                               |   14 -
 exsrc/find_site.py                            |    9 -
 exsrc/fontconfig.sh                           |   15 -
 exsrc/freetype.sh                             |   12 -
 exsrc/gdal.sh                                 |    5 -
 exsrc/ghostscript.sh                          |    5 -
 exsrc/gifmerge.sh                             |    5 -
 exsrc/gifsicle.sh                             |    5 -
 exsrc/gplot.sh                                |    6 -
 exsrc/hdf.sh                                  |   12 -
 exsrc/install_script.obsolete                 | 1154 ---------
 exsrc/ioapi.sh                                |   22 -
 exsrc/ipython.sh                              |    5 -
 exsrc/ipython1.sh                             |    6 -
 exsrc/jpeg.sh                                 |   12 -
 exsrc/lapack.sh                               |   25 -
 exsrc/lapack95.sh                             |   25 -
 exsrc/libcf.sh                                |   20 -
 exsrc/libdap.sh                               |   14 -
 exsrc/libnc-dap.sh                            |   14 -
 exsrc/libpixman.sh                            |   12 -
 exsrc/libpng.sh                               |   12 -
 exsrc/libxml.sh                               |   12 -
 exsrc/netcdf.sh                               |   50 -
 exsrc/netcdf_fortran.sh                       |   42 -
 exsrc/netpbm.input.conf                       |   19 -
 exsrc/netpbm.input.conf.Cygwin                |   18 -
 exsrc/netpbm.input.conf.Darwin                |   19 -
 exsrc/netpbm.input.conf.sun                   |   20 -
 exsrc/netpbm.input.inst                       |    9 -
 exsrc/netpbm.sh                               |   35 -
 exsrc/numpy.sh                                |   30 -
 exsrc/pbmplus.sh                              |    9 -
 exsrc/pixman.def                              |   62 -
 exsrc/pkgconfig.sh                            |   12 -
 exsrc/proj.sh                                 |    5 -
 exsrc/prolog.sh                               |   76 -
 exsrc/pyfort.sh                               |    4 -
 exsrc/setuptools.sh                           |    6 -
 exsrc/src/CMakeCache.txt.linux.in             | 1965 ---------------
 exsrc/src/CMakeCache.txt.mac.Framework.in     | 2066 ---------------
 exsrc/src/CMakeCache.txt.mac.in               | 1965 ---------------
 exsrc/src/cmake/multi.c                       | 1978 ---------------
 exsrc/src/h5diff_correct_ansi.c               | 2222 ----------------
 exsrc/src/o.txt                               |    3 -
 exsrc/src/pbmplus/Makefile.in                 |  134 -
 exsrc/src/pbmplus/libpbm1.c                   |  674 -----
 exsrc/src/pbmplus/pbmplus.h                   |  192 --
 exsrc/src/pbmplus/pnm/Makefile.in             |  188 --
 exsrc/src/png/pngconf.h                       |  632 -----
 exsrc/src/readline/shobj-conf                 |  579 -----
 exsrc/src/yes.txt                             |    2 -
 exsrc/twisted.sh                              |    6 -
 exsrc/vtk.sh                                  |   17 -
 exsrc/xgks.sh                                 |   21 -
 images/2leftarrow.gif                         |  Bin 1180 -> 0 bytes
 images/2rightarrow.gif                        |  Bin 1180 -> 0 bytes
 images/ASD.scr                                | 1268 ----------
 images/HARD_COPY                              |   76 -
 images/PCM_isofill.scr                        |  976 -------
 images/UV-CDAT_logo.png                       |  Bin 31325 -> 0 bytes
 images/UV-CDAT_logo_sites.png                 |  Bin 116663 -> 0 bytes
 images/add.gif                                |  Bin 986 -> 0 bytes
 images/animate_load.gif                       |  Bin 1893 -> 0 bytes
 images/animate_save.gif                       |  Bin 1966 -> 0 bytes
 images/base10.gif                             |  Bin 978 -> 0 bytes
 images/bookmark_folder.gif                    |  Bin 1302 -> 0 bytes
 images/cdatdemo.gif                           |  Bin 413 -> 0 bytes
 images/cdatnews                               |    2 -
 images/cos.gif                                |  Bin 964 -> 0 bytes
 images/cosh.gif                               |  Bin 978 -> 0 bytes
 images/cycle.gif                              |  Bin 1119 -> 0 bytes
 images/devel_20.gif                           |  Bin 825 -> 0 bytes
 images/devel_menu.gif                         |  Bin 1493 -> 0 bytes
 images/divide.gif                             |  Bin 987 -> 0 bytes
 images/edit.gif                               |  Bin 1500 -> 0 bytes
 images/edit_20.gif                            |  Bin 1258 -> 0 bytes
 images/edit_menu.gif                          |  Bin 1781 -> 0 bytes
 images/editdelete.gif                         |  Bin 1313 -> 0 bytes
 images/editdelete_20.gif                      |  Bin 825 -> 0 bytes
 images/editdelete_menu.gif                    |  Bin 1568 -> 0 bytes
 images/equal.gif                              |  Bin 194 -> 0 bytes
 images/exp.gif                                |  Bin 991 -> 0 bytes
 images/fabs.gif                               |  Bin 988 -> 0 bytes
 images/function.gif                           |  Bin 957 -> 0 bytes
 images/getmask.gif                            |  Bin 1096 -> 0 bytes
 images/gohome.gif                             |  Bin 1206 -> 0 bytes
 images/greater.gif                            |  Bin 194 -> 0 bytes
 images/grower.gif                             |  Bin 1250 -> 0 bytes
 images/hand1.gif                              |  Bin 1176 -> 0 bytes
 images/hand2.gif                              |  Bin 1179 -> 0 bytes
 images/info.gif                               |  Bin 1580 -> 0 bytes
 images/info_20.gif                            |  Bin 1258 -> 0 bytes
 images/info_menu.gif                          |  Bin 2152 -> 0 bytes
 images/inpin_red.gif                          |  Bin 1050 -> 0 bytes
 images/inverse.gif                            |  Bin 957 -> 0 bytes
 images/less.gif                               |  Bin 203 -> 0 bytes
 images/list_20.gif                            |  Bin 743 -> 0 bytes
 images/lock.gif                               |  Bin 1124 -> 0 bytes
 images/log.gif                                |  Bin 1640 -> 0 bytes
 images/log_20.gif                             |  Bin 1258 -> 0 bytes
 images/log_menu.gif                           |  Bin 2277 -> 0 bytes
 images/mask.gif                               |  Bin 1302 -> 0 bytes
 images/mlog.gif                               |  Bin 1002 -> 0 bytes
 images/mlog10.gif                             |  Bin 1011 -> 0 bytes
 images/multiply.gif                           |  Bin 995 -> 0 bytes
 images/not.gif                                |  Bin 1418 -> 0 bytes
 images/off.gif                                |  Bin 1457 -> 0 bytes
 images/on.gif                                 |  Bin 1966 -> 0 bytes
 images/open.gif                               |  Bin 1216 -> 0 bytes
 images/opendap.gif                            |  Bin 1292 -> 0 bytes
 images/outpin_red.gif                         |  Bin 1016 -> 0 bytes
 images/pan_down.gif                           |  Bin 587 -> 0 bytes
 images/pan_left.gif                           |  Bin 592 -> 0 bytes
 images/pan_right.gif                          |  Bin 596 -> 0 bytes
 images/pan_up.gif                             |  Bin 583 -> 0 bytes
 images/player_end2.gif                        |  Bin 1145 -> 0 bytes
 images/player_pause.gif                       |  Bin 639 -> 0 bytes
 images/player_play.gif                        |  Bin 1052 -> 0 bytes
 images/player_rev.gif                         |  Bin 1051 -> 0 bytes
 images/player_start.gif                       |  Bin 1142 -> 0 bytes
 images/player_stop.gif                        |  Bin 650 -> 0 bytes
 images/power.gif                              |  Bin 996 -> 0 bytes
 images/pydebug                                |   14 -
 images/pythonenv                              |   14 -
 images/recycle.gif                            |  Bin 1624 -> 0 bytes
 images/recycle_20.gif                         |  Bin 1258 -> 0 bytes
 images/recycle_file.gif                       |  Bin 1301 -> 0 bytes
 images/recycle_menu.gif                       |  Bin 2026 -> 0 bytes
 images/regrid.gif                             |  Bin 1098 -> 0 bytes
 images/remove.gif                             |  Bin 1457 -> 0 bytes
 images/save.gif                               |  Bin 1259 -> 0 bytes
 images/save_20.gif                            |  Bin 585 -> 0 bytes
 images/save_file.gif                          |  Bin 1335 -> 0 bytes
 images/save_menu.gif                          |  Bin 1299 -> 0 bytes
 images/sin.gif                                |  Bin 960 -> 0 bytes
 images/sinh.gif                               |  Bin 977 -> 0 bytes
 images/spk2scr.py                             |  117 -
 images/splash.gif                             |  Bin 25144 -> 0 bytes
 images/sqrt.gif                               |  Bin 1014 -> 0 bytes
 images/std.gif                                |  Bin 151 -> 0 bytes
 images/subtract.gif                           |  Bin 978 -> 0 bytes
 images/tan.gif                                |  Bin 959 -> 0 bytes
 images/tanh.gif                               |  Bin 972 -> 0 bytes
 images/templator                              |   10 -
 images/tg2_20.gif                             |  Bin 825 -> 0 bytes
 images/tg_20.gif                              |  Bin 743 -> 0 bytes
 images/tg_menu.gif                            |  Bin 1774 -> 0 bytes
 images/tiltedpin_red.gif                      |  Bin 1067 -> 0 bytes
 images/toggle_menu.gif                        |  Bin 1096 -> 0 bytes
 images/trashcan_empty.gif                     |  Bin 1500 -> 0 bytes
 images/trashcan_empty_20.gif                  |  Bin 444 -> 0 bytes
 images/trashcan_full.gif                      |  Bin 993 -> 0 bytes
 images/trashcan_full_20.gif                   |  Bin 825 -> 0 bytes
 images/trashcan_menu.gif                      |  Bin 1757 -> 0 bytes
 images/unlock.gif                             |  Bin 1048 -> 0 bytes
 images/vcs2py.py                              |  367 ---
 images/vcs_icon.xbm                           |  566 -----
 images/viewmag+.gif                           |  Bin 1069 -> 0 bytes
 images/viewmag-.gif                           |  Bin 1064 -> 0 bytes
 images/viewmag-.png                           |  Bin 1056 -> 0 bytes
 installation/DAP.py                           |   29 -
 installation/HDF.py                           |   26 -
 installation/cdmsonly.py                      |   16 -
 installation/contrib.py                       |   73 -
 installation/control.py                       |   72 -
 installation/debug.py                         |   12 -
 installation/irix.py                          |    2 -
 installation/pcmdi.py                         |   22 -
 installation/pp.py                            |    3 -
 installation/psql.py                          |    3 -
 installation/standard.py                      |   81 -
 pysrc/README.txt                              |   36 -
 pysrc/clean_script                            |    2 -
 pysrc/install_script.obsolete                 |  117 -
 pysrc/prolog.sh                               |   85 -
 pysrc/python.sh                               |   76 -
 pysrc/readline.sh                             |   23 -
 pysrc/src/setup-2.7.1.py                      | 2067 ---------------
 pysrc/src/setup-2.7.2.py                      | 2090 ---------------
 pysrc/src/setup-2.7.3.py                      | 2094 ---------------
 pysrc/src/setup-2.7.4.py                      | 2186 ----------------
 pysrc/src/setup-2.7.7.py                      | 2244 -----------------
 pysrc/src/setup.py                            | 2244 -----------------
 pysrc/src/site-2.7.7.py                       |  602 -----
 pysrc/tcl.sh                                  |   25 -
 pysrc/tk.sh                                   |   24 -
 pysrc/zlib.sh                                 |   25 -
 resources/uvcdat.icns                         |  Bin 40131 -> 0 bytes
 resources/uvcdat.jpg                          |  Bin 94795 -> 0 bytes
 scripts/clean_script                          |   14 -
 scripts/get_git_version.sh                    |    7 -
 scripts/git_hooks/commit-msg                  |    3 -
 scripts/git_hooks/pre-commit                  |   13 -
 scripts/git_hooks/pre-push                    |   14 -
 scripts/git_hooks/prepare-commit-msg          |    3 -
 scripts/git_setup/.gitattributes              |    9 -
 scripts/git_setup/LICENSE                     |  202 --
 scripts/git_setup/NOTICE                      |    5 -
 scripts/git_setup/README                      |   80 -
 scripts/git_setup/config                      |    2 -
 scripts/git_setup/config.sample               |   22 -
 scripts/git_setup/git-gerrit-push             |   73 -
 scripts/git_setup/setup-gerrit                |  147 --
 scripts/git_setup/setup-hooks                 |   63 -
 scripts/git_setup/setup-ssh                   |  111 -
 scripts/git_setup/setup-stage                 |   82 -
 scripts/git_setup/setup-user                  |   39 -
 scripts/git_setup/setup_aliases.sh            |    8 -
 scripts/git_setup/tips                        |   55 -
 scripts/last_update_time.py                   |   12 -
 scripts/nightly.sh                            |   35 -
 scripts/setup_for_development.sh              |   19 -
 scripts/tarballit.sh                          |    7 -
 tests/cdat/test_cdat.py                       |  500 ----
 tests/cdat/test_exsrc_ok.py                   |  107 -
 tests/test_script                             |   31 -
 uvcdatspt/scripts/MHTScreenshots.py           |  170 --
 uvcdatspt/scripts/MHTTemporalStatistics.py    |   26 -
 uvcdatspt/scripts/MOCScreenshots.py           |  535 ----
 uvcdatspt/scripts/MOCTemporalStatistics.py    |   26 -
 .../scripts/MWehnerTemporalStatistics.py      |   47 -
 uvcdatspt/scripts/POPGenerateImages.py        |  310 ---
 uvcdatspt/scripts/benchmark.py                |  626 -----
 uvcdatspt/scripts/ocean.py                    |  187 --
 239 files changed, 37288 deletions(-)
 delete mode 100644 Changes.txt
 delete mode 100644 TODO.txt
 delete mode 100644 exsrc/Makefile.am.pixman
 delete mode 100755 exsrc/Numeric.sh
 delete mode 100755 exsrc/Pmw.sh
 delete mode 100755 exsrc/R.sh
 delete mode 100644 exsrc/README.txt
 delete mode 100644 exsrc/VTK_BUILD_ANSWERS.core
 delete mode 100755 exsrc/blas.sh
 delete mode 100755 exsrc/cairo.sh
 delete mode 100755 exsrc/clean_script
 delete mode 100755 exsrc/cmake.sh
 delete mode 100755 exsrc/curl.sh
 delete mode 100644 exsrc/detect_fortran.py
 delete mode 100755 exsrc/ffmpeg.sh
 delete mode 100644 exsrc/find_site.py
 delete mode 100755 exsrc/fontconfig.sh
 delete mode 100755 exsrc/freetype.sh
 delete mode 100755 exsrc/gdal.sh
 delete mode 100755 exsrc/ghostscript.sh
 delete mode 100755 exsrc/gifmerge.sh
 delete mode 100755 exsrc/gifsicle.sh
 delete mode 100755 exsrc/gplot.sh
 delete mode 100755 exsrc/hdf.sh
 delete mode 100755 exsrc/install_script.obsolete
 delete mode 100755 exsrc/ioapi.sh
 delete mode 100755 exsrc/ipython.sh
 delete mode 100755 exsrc/ipython1.sh
 delete mode 100755 exsrc/jpeg.sh
 delete mode 100755 exsrc/lapack.sh
 delete mode 100755 exsrc/lapack95.sh
 delete mode 100755 exsrc/libcf.sh
 delete mode 100755 exsrc/libdap.sh
 delete mode 100755 exsrc/libnc-dap.sh
 delete mode 100755 exsrc/libpixman.sh
 delete mode 100755 exsrc/libpng.sh
 delete mode 100755 exsrc/libxml.sh
 delete mode 100755 exsrc/netcdf.sh
 delete mode 100755 exsrc/netcdf_fortran.sh
 delete mode 100644 exsrc/netpbm.input.conf
 delete mode 100644 exsrc/netpbm.input.conf.Cygwin
 delete mode 100644 exsrc/netpbm.input.conf.Darwin
 delete mode 100644 exsrc/netpbm.input.conf.sun
 delete mode 100644 exsrc/netpbm.input.inst
 delete mode 100755 exsrc/netpbm.sh
 delete mode 100755 exsrc/numpy.sh
 delete mode 100755 exsrc/pbmplus.sh
 delete mode 100644 exsrc/pixman.def
 delete mode 100755 exsrc/pkgconfig.sh
 delete mode 100755 exsrc/proj.sh
 delete mode 100755 exsrc/prolog.sh
 delete mode 100755 exsrc/pyfort.sh
 delete mode 100755 exsrc/setuptools.sh
 delete mode 100644 exsrc/src/CMakeCache.txt.linux.in
 delete mode 100644 exsrc/src/CMakeCache.txt.mac.Framework.in
 delete mode 100644 exsrc/src/CMakeCache.txt.mac.in
 delete mode 100644 exsrc/src/cmake/multi.c
 delete mode 100644 exsrc/src/h5diff_correct_ansi.c
 delete mode 100644 exsrc/src/o.txt
 delete mode 100644 exsrc/src/pbmplus/Makefile.in
 delete mode 100644 exsrc/src/pbmplus/libpbm1.c
 delete mode 100644 exsrc/src/pbmplus/pbmplus.h
 delete mode 100644 exsrc/src/pbmplus/pnm/Makefile.in
 delete mode 100644 exsrc/src/png/pngconf.h
 delete mode 100644 exsrc/src/readline/shobj-conf
 delete mode 100644 exsrc/src/yes.txt
 delete mode 100755 exsrc/twisted.sh
 delete mode 100755 exsrc/vtk.sh
 delete mode 100755 exsrc/xgks.sh
 delete mode 100644 images/2leftarrow.gif
 delete mode 100644 images/2rightarrow.gif
 delete mode 100755 images/ASD.scr
 delete mode 100755 images/HARD_COPY
 delete mode 100644 images/PCM_isofill.scr
 delete mode 100644 images/UV-CDAT_logo.png
 delete mode 100644 images/UV-CDAT_logo_sites.png
 delete mode 100644 images/add.gif
 delete mode 100644 images/animate_load.gif
 delete mode 100644 images/animate_save.gif
 delete mode 100644 images/base10.gif
 delete mode 100644 images/bookmark_folder.gif
 delete mode 100644 images/cdatdemo.gif
 delete mode 100755 images/cdatnews
 delete mode 100644 images/cos.gif
 delete mode 100644 images/cosh.gif
 delete mode 100644 images/cycle.gif
 delete mode 100644 images/devel_20.gif
 delete mode 100644 images/devel_menu.gif
 delete mode 100644 images/divide.gif
 delete mode 100644 images/edit.gif
 delete mode 100644 images/edit_20.gif
 delete mode 100644 images/edit_menu.gif
 delete mode 100644 images/editdelete.gif
 delete mode 100644 images/editdelete_20.gif
 delete mode 100644 images/editdelete_menu.gif
 delete mode 100644 images/equal.gif
 delete mode 100644 images/exp.gif
 delete mode 100644 images/fabs.gif
 delete mode 100644 images/function.gif
 delete mode 100644 images/getmask.gif
 delete mode 100644 images/gohome.gif
 delete mode 100644 images/greater.gif
 delete mode 100644 images/grower.gif
 delete mode 100644 images/hand1.gif
 delete mode 100644 images/hand2.gif
 delete mode 100644 images/info.gif
 delete mode 100644 images/info_20.gif
 delete mode 100644 images/info_menu.gif
 delete mode 100644 images/inpin_red.gif
 delete mode 100644 images/inverse.gif
 delete mode 100644 images/less.gif
 delete mode 100644 images/list_20.gif
 delete mode 100644 images/lock.gif
 delete mode 100644 images/log.gif
 delete mode 100644 images/log_20.gif
 delete mode 100644 images/log_menu.gif
 delete mode 100644 images/mask.gif
 delete mode 100644 images/mlog.gif
 delete mode 100644 images/mlog10.gif
 delete mode 100644 images/multiply.gif
 delete mode 100644 images/not.gif
 delete mode 100644 images/off.gif
 delete mode 100644 images/on.gif
 delete mode 100644 images/open.gif
 delete mode 100644 images/opendap.gif
 delete mode 100644 images/outpin_red.gif
 delete mode 100644 images/pan_down.gif
 delete mode 100644 images/pan_left.gif
 delete mode 100644 images/pan_right.gif
 delete mode 100644 images/pan_up.gif
 delete mode 100644 images/player_end2.gif
 delete mode 100644 images/player_pause.gif
 delete mode 100644 images/player_play.gif
 delete mode 100644 images/player_rev.gif
 delete mode 100644 images/player_start.gif
 delete mode 100644 images/player_stop.gif
 delete mode 100644 images/power.gif
 delete mode 100755 images/pydebug
 delete mode 100755 images/pythonenv
 delete mode 100644 images/recycle.gif
 delete mode 100644 images/recycle_20.gif
 delete mode 100644 images/recycle_file.gif
 delete mode 100644 images/recycle_menu.gif
 delete mode 100644 images/regrid.gif
 delete mode 100644 images/remove.gif
 delete mode 100644 images/save.gif
 delete mode 100644 images/save_20.gif
 delete mode 100644 images/save_file.gif
 delete mode 100644 images/save_menu.gif
 delete mode 100644 images/sin.gif
 delete mode 100644 images/sinh.gif
 delete mode 100755 images/spk2scr.py
 delete mode 100755 images/splash.gif
 delete mode 100644 images/sqrt.gif
 delete mode 100644 images/std.gif
 delete mode 100644 images/subtract.gif
 delete mode 100644 images/tan.gif
 delete mode 100644 images/tanh.gif
 delete mode 100755 images/templator
 delete mode 100644 images/tg2_20.gif
 delete mode 100644 images/tg_20.gif
 delete mode 100644 images/tg_menu.gif
 delete mode 100644 images/tiltedpin_red.gif
 delete mode 100644 images/toggle_menu.gif
 delete mode 100644 images/trashcan_empty.gif
 delete mode 100644 images/trashcan_empty_20.gif
 delete mode 100644 images/trashcan_full.gif
 delete mode 100644 images/trashcan_full_20.gif
 delete mode 100644 images/trashcan_menu.gif
 delete mode 100644 images/unlock.gif
 delete mode 100755 images/vcs2py.py
 delete mode 100644 images/vcs_icon.xbm
 delete mode 100644 images/viewmag+.gif
 delete mode 100644 images/viewmag-.gif
 delete mode 100644 images/viewmag-.png
 delete mode 100644 installation/DAP.py
 delete mode 100644 installation/HDF.py
 delete mode 100644 installation/cdmsonly.py
 delete mode 100644 installation/contrib.py
 delete mode 100644 installation/control.py
 delete mode 100644 installation/debug.py
 delete mode 100644 installation/irix.py
 delete mode 100644 installation/pcmdi.py
 delete mode 100644 installation/pp.py
 delete mode 100644 installation/psql.py
 delete mode 100644 installation/standard.py
 delete mode 100644 pysrc/README.txt
 delete mode 100755 pysrc/clean_script
 delete mode 100755 pysrc/install_script.obsolete
 delete mode 100755 pysrc/prolog.sh
 delete mode 100755 pysrc/python.sh
 delete mode 100755 pysrc/readline.sh
 delete mode 100644 pysrc/src/setup-2.7.1.py
 delete mode 100644 pysrc/src/setup-2.7.2.py
 delete mode 100644 pysrc/src/setup-2.7.3.py
 delete mode 100644 pysrc/src/setup-2.7.4.py
 delete mode 100644 pysrc/src/setup-2.7.7.py
 delete mode 100644 pysrc/src/setup.py
 delete mode 100644 pysrc/src/site-2.7.7.py
 delete mode 100755 pysrc/tcl.sh
 delete mode 100755 pysrc/tk.sh
 delete mode 100755 pysrc/zlib.sh
 delete mode 100644 resources/uvcdat.icns
 delete mode 100644 resources/uvcdat.jpg
 delete mode 100755 scripts/clean_script
 delete mode 100755 scripts/get_git_version.sh
 delete mode 100755 scripts/git_hooks/commit-msg
 delete mode 100755 scripts/git_hooks/pre-commit
 delete mode 100755 scripts/git_hooks/pre-push
 delete mode 100755 scripts/git_hooks/prepare-commit-msg
 delete mode 100644 scripts/git_setup/.gitattributes
 delete mode 100644 scripts/git_setup/LICENSE
 delete mode 100644 scripts/git_setup/NOTICE
 delete mode 100644 scripts/git_setup/README
 delete mode 100644 scripts/git_setup/config
 delete mode 100644 scripts/git_setup/config.sample
 delete mode 100755 scripts/git_setup/git-gerrit-push
 delete mode 100755 scripts/git_setup/setup-gerrit
 delete mode 100755 scripts/git_setup/setup-hooks
 delete mode 100755 scripts/git_setup/setup-ssh
 delete mode 100755 scripts/git_setup/setup-stage
 delete mode 100755 scripts/git_setup/setup-user
 delete mode 100755 scripts/git_setup/setup_aliases.sh
 delete mode 100755 scripts/git_setup/tips
 delete mode 100644 scripts/last_update_time.py
 delete mode 100755 scripts/nightly.sh
 delete mode 100755 scripts/setup_for_development.sh
 delete mode 100755 scripts/tarballit.sh
 delete mode 100644 tests/cdat/test_cdat.py
 delete mode 100644 tests/cdat/test_exsrc_ok.py
 delete mode 100755 tests/test_script
 delete mode 100644 uvcdatspt/scripts/MHTScreenshots.py
 delete mode 100644 uvcdatspt/scripts/MHTTemporalStatistics.py
 delete mode 100644 uvcdatspt/scripts/MOCScreenshots.py
 delete mode 100644 uvcdatspt/scripts/MOCTemporalStatistics.py
 delete mode 100644 uvcdatspt/scripts/MWehnerTemporalStatistics.py
 delete mode 100644 uvcdatspt/scripts/POPGenerateImages.py
 delete mode 100644 uvcdatspt/scripts/benchmark.py
 delete mode 100644 uvcdatspt/scripts/ocean.py

diff --git a/Changes.txt b/Changes.txt
deleted file mode 100644
index bc7cd069e..000000000
--- a/Changes.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-[updated_packages_versions]: Added distribute, added option to choose between ip and easy_install, added option to use cert for pip
-[updated_packages_versions]: Upgraded Packages to latest version
-1.3.1
diff --git a/TODO.txt b/TODO.txt
deleted file mode 100644
index fb03af1f8..000000000
--- a/TODO.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-- Fix ESMF build
-- Verify individual packages
-- Verify if we can build using system
-- Consistent install and build directories
-- Install headers and lib under their own package name
diff --git a/exsrc/Makefile.am.pixman b/exsrc/Makefile.am.pixman
deleted file mode 100644
index e57c21c46..000000000
--- a/exsrc/Makefile.am.pixman
+++ /dev/null
@@ -1,54 +0,0 @@
-lib_LTLIBRARIES = libpixman-1.la
-libpixman_1_la_LDFLAGS = -version-info $(LT_VERSION_INFO) -no-undefined -export-symbols pixman.def
-libpixman_1_la_LIBADD = @DEP_LIBS@ -lm
-libpixman_1_la_SOURCES =		\
-	pixman.h			\
-	pixman-access.c			\
-	pixman-access-accessors.c	\
-	pixman-region.c			\
-	pixman-private.h		\
-	pixman-image.c			\
-	pixman-combine.c		\
-	pixman-compose.c		\
-	pixman-compose-accessors.c	\
-	pixman-pict.c			\
-	pixman-source.c			\
-	pixman-transformed.c		\
-	pixman-transformed-accessors.c	\
-	pixman-utils.c			\
-	pixman-edge.c			\
-	pixman-edge-accessors.c		\
-	pixman-edge-imp.h		\
-	pixman-trap.c			\
-	pixman-compute-region.c		\
-	pixman-timer.c
-
-libpixmanincludedir = $(includedir)/pixman-1/
-libpixmaninclude_HEADERS = pixman.h pixman-version.h
-noinst_LTLIBRARIES = 
-
-EXTRA_DIST = Makefile.win32
-
-# mmx code
-if USE_MMX
-noinst_LTLIBRARIES += libpixman-mmx.la
-libpixman_mmx_la_SOURCES = \
-	pixman-mmx.c \
-	pixman-mmx.h
-libpixman_mmx_la_CFLAGS = $(DEP_CFLAGS) $(MMX_CFLAGS)
-libpixman_mmx_la_LIBADD = $(DEP_LIBS)
-libpixman_1_la_LIBADD += libpixman-mmx.la
-endif
-
-
-# sse2 code
-if USE_SSE2
-noinst_LTLIBRARIES += libpixman-sse.la
-libpixman_sse_la_SOURCES = \
-	pixman-sse.c \
-	pixman-sse.h
-libpixman_sse_la_CFLAGS = $(DEP_CFLAGS) $(SSE_CFLAGS)
-libpixman_sse_la_LIBADD = $(DEP_LIBS)
-libpixman_1_la_LIBADD += libpixman-sse.la
-endif
-
diff --git a/exsrc/Numeric.sh b/exsrc/Numeric.sh
deleted file mode 100755
index d82ca417b..000000000
--- a/exsrc/Numeric.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/sh
-PACKAGE="Numeric"
-. ./prolog.sh 
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-   echo "Numeric won't build on 64bit system, use numpy instead"
-   exit
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-   echo "Numeric won't build on 64bit system, use numpy instead"
-   exit
-fi
-
-# Numeric, MA, PropertiedClasses, etc.
-(cd Numeric-*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/Pmw.sh b/exsrc/Pmw.sh
deleted file mode 100755
index 70629fa8e..000000000
--- a/exsrc/Pmw.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="Pmw"
-. ./prolog.sh 
-# Twisted.
-(cd Pmw-* ; cd src;  ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/R.sh b/exsrc/R.sh
deleted file mode 100755
index 4e2a38f55..000000000
--- a/exsrc/R.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="R"
-. ./prolog.sh
-(cd R*; ./configure --enable-R-shlib --prefix=${prefix}/Externals/R ; make ; make install ; make install ; cd ${prefix}/Externals/R/lib ; ln -s Externals/R/bin/libR.so )
-
diff --git a/exsrc/README.txt b/exsrc/README.txt
deleted file mode 100644
index 72e35f7dc..000000000
--- a/exsrc/README.txt
+++ /dev/null
@@ -1,23 +0,0 @@
-This directory cannot be built until Python is built.
-
-This directory contains sources for some parts of the CDAT
-system that we didn't write or which change on very slow timescales. 
-
-./install_script /whereyouwanttoputit 
-
-The subdirectory src contains the tarred/zipped files that are used to make
-the product. A subdirectory build will be created that contains the output.
-Some of these products can be tested by changing to their directory under 
-build and typing "make test".
-
-This process will unpack the tar files from the src directory if there is no 
-build subdirectory. Otherwise it doesn't. If you put in a new source file
-into src you need to clean before building.
-
-Log files are created in the build subdirectory.
-
-Each of the pieces may be built individually using the corresponding .sh 
-files in this directory. Some warning errors are usual from 
-many of the packages and vary from architecture to architecture.
-
-
diff --git a/exsrc/VTK_BUILD_ANSWERS.core b/exsrc/VTK_BUILD_ANSWERS.core
deleted file mode 100644
index d20aa1e61..000000000
--- a/exsrc/VTK_BUILD_ANSWERS.core
+++ /dev/null
@@ -1,1320 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: CDAT_PREFIX/VTK
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=OFF
-
-//Build VTK with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake allow.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.0
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-//
-CMAKE_BUILD_TYPE:STRING=
-
-//C++ compiler
-CMAKE_CXX_COMPILER:STRING=c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3
-
-//Flags used by the compiler during Release with Debug Info builds.
-//
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler
-CMAKE_C_COMPILER:STRING=gcc
-
-//Flags for C compiler.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3
-
-//Flags used by the compiler during Release with Debug Info builds.
-//
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=OFF
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=CDAT_PREFIX
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-//
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=ON
-
-//If true, cmake will use relative paths in makefiles and projects.
-//
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=OFF
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=OFF
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=OFF
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=-I/usr/X11R6/include
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so
-
-//Path to program used to compress files for transfer to the dart
-// server
-COMPRESSIONCOMMAND:FILEPATH=/usr/bin/gzip
-
-//Path to the coverage program that Dart client uses for performing
-// coverage inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Limit of reported errors, -1 reports all.
-DART_BUILD_ERROR_REPORT_LIMIT:BOOL=OFF
-
-//Limit of reported warnings, -1 reports all.
-DART_BUILD_WARNING_REPORT_LIMIT:BOOL=OFF
-
-//If you have Dart installed, where is it located?
-DART_ROOT:PATH=DART_ROOT-NOTFOUND
-
-//Time alloted for a test before Dart will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Show the actual output of the build, or if off show a . for each
-// 1024 bytes.
-DART_VERBOSE_BUILD:BOOL=OFF
-
-//Should Dart server send email when build errors are found in
-// Continuous builds?
-DELIVER_CONTINUOUS_EMAIL:BOOL=OFF
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/DICOMParser
-
-//Path to gunzip executable
-GUNZIPCOMMAND:FILEPATH=/usr/bin/gunzip
-
-//Path to java command, used by the Dart server to create html.
-//
-JAVACOMMAND:FILEPATH=/usr/bin/java
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/gmake -i
-
-//Path to Rational purify command, used for memory error detection.
-//
-MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND
-
-//File that contains suppressions for the memmory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//What is the path where the file GL/gl.h can be found
-OPENGL_INCLUDE_DIR:PATH=/usr/share/doc/NVIDIA_GLX-1.0/include
-
-//Where can one of the MesaGL or GL libraries be found
-OPENGL_gl_LIBRARY:FILEPATH=/usr/lib/libGL.so
-
-//Where can one of the MesaGLU or GLU libraries be found
-OPENGL_glu_LIBRARY:FILEPATH=/usr/lib/libGLU.so
-
-//What is the path where the file GL/xmesa.h can be found
-OPENGL_xmesa_INCLUDE_DIR:PATH=OPENGL_xmesa_INCLUDE_DIR-NOTFOUND
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=CDAT_PREFIX/bin/python
-
-//What is the path where the file Python.h can be found
-PYTHON_INCLUDE_PATH:PATH=CDAT_PREFIX/include/pythonPY_VERSION
-
-//Where can one of the python23, python2.3, python2.3.dll, python22,
-// python2.2, python2.2.dll, python21, python2.1, python2.1.dll,
-// python20, python2.0, python2.0.dll, python16, python1.6, python1.6.dll,
-// python15, python1.5 or python1.5.dll libraries be found
-PYTHON_LIBRARY:FILEPATH=CDAT_PREFIX/lib/pythonPY_VERSION/config/libpythonPY_VERSION.a
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.so
-
-//Path to scp command, used by some Dart clients for submitting
-// results to a Dart server (when not using ftp for submissions)
-//
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=
-
-//What is the path where the file tcl.h can be found
-TCL_INCLUDE_PATH:PATH=CDAT_PREFIX/include
-
-//Where can one of the tcl, tcl84, tcl8.4, tcl83, tcl8.3, tcl82,
-// tcl8.2, tcl80 or tcl8.0 libraries be found
-TCL_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtclTCLTK_VERSION.a
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=CDAT_PREFIX/bin/tclshTCLTK_VERSION
-
-//What is the path where the file tk.h can be found
-TK_INCLUDE_PATH:PATH=CDAT_PREFIX/include
-
-//Where can one of the tk, tk84, tk8.4, tk83, tk8.3, tk82, tk8.2,
-// tk80 or tk8.0 libraries be found
-TK_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtkTCLTK_VERSION.a
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=CDAT_PREFIX/VTK
-
-//What is the path where the file VTKData.readme can be found
-VTK_DATA_ROOT:PATH=CDAT_BUILD_DIR/VTK/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-//
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK
-
-//Build with static Tcl/Tk support. TCL_LIBRARY and TK_LIBRARY
-// must point to the corresponding Tcl/Tk static libraries (example,
-// tcl84sx.lib, tk84sx.lib).
-VTK_TCL_TK_STATIC:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use the ANSI standard iostream library.
-VTK_USE_ANSI_STDLIB:BOOL=ON
-
-//Turn this option off and tests will not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Where can the hints file be found
-VTK_WRAP_HINTS:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Path to an internal program.
-VTK_WRAP_PYTHON_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPython
-
-//Path to an internal program.
-VTK_WRAP_PYTHON_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPythonInit
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=ON
-
-//Path to an internal program.
-VTK_WRAP_TCL_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTcl
-
-//Path to an internal program.
-VTK_WRAP_TCL_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTclInit
-
-//What is the path where the file X11/X.h can be found
-X11_X11_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//Where can the X11 library be found
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.so
-
-//Where can the Xext library be found
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.so
-
-//What is the path where the file X11/Xlib.h can be found
-X11_Xlib_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//What is the path where the file X11/Xutil.h can be found
-X11_Xutil_INCLUDE_PATH:PATH=/usr/X11R6/include
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=vtkCommon;
-
-//Dependencies for the target
-vtkCommonTCL_LIB_DEPENDS:STATIC=vtkCommon;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=-lpthread;-ldl;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonPython;
-
-//Dependencies for the target
-vtkFilteringTCL_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonTCL;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringPython;vtkGraphicsPython;
-
-//Dependencies for the target
-vtkGenericFilteringTCL_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringTCL;vtkGraphicsTCL;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=vtkFiltering;vtkGraphics;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringPython;
-
-//Dependencies for the target
-vtkGraphicsTCL_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=vtkFiltering;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingPython;vtkIOPython;
-
-//Dependencies for the target
-vtkHybridTCL_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingTCL;vtkIOTCL;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringPython;
-
-//Dependencies for the target
-vtkIOTCL_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=vtkFiltering;vtkDICOMParser;vtkpng;vtkzlib;vtkjpeg;vtktiff;vtkexpat;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringPython;
-
-//Dependencies for the target
-vtkImagingTCL_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringTCL;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=vtkFiltering;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=vtkRendering;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsPython;vtkImagingPython;
-
-//Dependencies for the target
-vtkRenderingTCL_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsTCL;vtkImagingTCL;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=vtkGraphics;vtkImaging;vtkIO;vtkftgl;vtkfreetype;vtkzlib;/usr/lib/libGL.so;-lXt;-lSM;-lICE;-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingPython;vtkIOPython;
-
-//Dependencies for the target
-vtkVolumeRenderingTCL_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingTCL;vtkIOTCL;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingPython;vtkHybridPython;
-
-//Dependencies for the target
-vtkWidgetsTCL_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingTCL;vtkHybridTCL;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=vtkRendering;vtkHybrid;
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=/usr/lib/libGL.so;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=vtkzlib;
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=vtkzlib;vtkjpeg;
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapPython:INTERNAL=vtkWrapPython
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapPythonInit:INTERNAL=vtkWrapPythonInit
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapTcl:INTERNAL=vtkWrapTcl
-//Executable to project name.
-CDAT_PREFIX/VTK/bin/vtkWrapTclInit:INTERNAL=vtkWrapTclInit
-//Advanced flag for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//Advanced flag for variable: BUILD_TESTING
-BUILD_TESTING-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//Advanced flag for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake
-//This is the directory where this CMakeCahe.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=CDAT_PREFIX/VTK
-//Major version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=0
-//Major version of cmake used to create the current loaded cache
-//
-CMAKE_CACHE_RELEASE_VERSION:INTERNAL=patch 6
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=CDAT_PREFIX/bin/cmake
-//Advanced flag for variable: CMAKE_CTEST_COMMAND
-CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=CDAT_PREFIX/bin/ctest
-//Advanced flag for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-//full path to the compiler cmake found
-CMAKE_CXX_COMPILER_FULLPATH:INTERNAL=/usr/bin/c++
-//Result of TRY_COMPILE
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-//full path to the compiler cmake found
-CMAKE_C_COMPILER_FULLPATH:INTERNAL=/usr/bin/gcc
-//Result of TRY_COMPILE
-CMAKE_C_COMPILER_WORKS:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=CDAT_PREFIX/bin/ccmake
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Have include sstream
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=1
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include limits.h
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include pthread.h
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have include sys/prctl.h
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=1
-//Have include unistd.h
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=CDAT_BUILD_DIR/VTK/VTK
-//Advanced flag for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//Advanced flag for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-//
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//Advanced flag for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//Advanced flag for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=CDAT_PREFIX/share/CMake
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-//
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-//
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-//
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of TRY_RUN
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of TRY_RUN
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of TRY_RUN
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//Advanced flag for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//Advanced flag for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/bin/uname
-//Advanced flag for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//Advanced flag for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: COMPRESSIONCOMMAND
-COMPRESSIONCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//Path to an executable
-CommonCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Advanced flag for variable: DART_BUILD_ERROR_REPORT_LIMIT
-DART_BUILD_ERROR_REPORT_LIMIT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_BUILD_WARNING_REPORT_LIMIT
-DART_BUILD_WARNING_REPORT_LIMIT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_ROOT
-DART_ROOT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DART_VERBOSE_BUILD
-DART_VERBOSE_BUILD-ADVANCED:INTERNAL=1
-//Advanced flag for variable: DELIVER_CONTINUOUS_EMAIL
-DELIVER_CONTINUOUS_EMAIL-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin
-//Path to an executable
-FilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering/Testing/Cxx
-//Advanced flag for variable: GUNZIPCOMMAND
-GUNZIPCOMMAND-ADVANCED:INTERNAL=1
-//Path to an executable
-GenericFilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering/Testing/Cxx
-//Path to an executable
-GraphicsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics/Testing/Cxx
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include fcntl.h
-HAVE_FCNTL_H:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UNSIGNED_CHAR:INTERNAL=TRUE
-//Have include HAVE_STDDEF_H
-HAVE_STDDEF_H:INTERNAL=1
-//Have include HAVE_STDINT_H
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TYPES_H
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include unistd.h
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Result of TRY_COMPILE
-HAVE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Path to an executable
-IOCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO/Testing/Cxx
-//Path to an executable
-ImagingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging/Testing/Cxx
-//Advanced flag for variable: JAVACOMMAND
-JAVACOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin
-//Advanced flag for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Advanced flag for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//Path to an executable
-RenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx
-//Advanced flag for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Advanced flag for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of TRY_RUN
-SIZEOF_FLOAT:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_INT:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_LONG:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_OFF_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_SHORT:INTERNAL=2
-//Result of TRY_RUN
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of TRY_RUN
-SIZEOF_UNSIGNED_CHAR:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//This value is not used by VTK.
-TCL_LIBRARY_DEBUG:INTERNAL=TCL_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TCL_STUB_LIBRARY
-TCL_STUB_LIBRARY-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TCL_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtclstubTCLTK_VERSION.a
-//Advanced flag for variable: TCL_STUB_LIBRARY_DEBUG
-TCL_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TCL_STUB_LIBRARY_DEBUG:INTERNAL=TCL_STUB_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_LIBRARY_DEBUG:INTERNAL=TK_LIBRARY_DEBUG-NOTFOUND
-//Advanced flag for variable: TK_STUB_LIBRARY
-TK_STUB_LIBRARY-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtkstubTCLTK_VERSION.a
-//Advanced flag for variable: TK_STUB_LIBRARY_DEBUG
-TK_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_STUB_LIBRARY_DEBUG:INTERNAL=TK_STUB_LIBRARY_DEBUG-NOTFOUND
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//Path to an executable
-TestCxxFeatures_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Path to an executable
-TestInstantiator_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx
-//Path to an executable
-VTKBenchMark_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Result of TRY_COMPILE
-VTK_CMAKE_EXTENSIONS_COMPILED:INTERNAL=TRUE
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//Advanced flag for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//Disables the automatic initialization of Tk widgets when loading
-// the rendering library.
-VTK_DISABLE_TK_INIT:INTERNAL=OFF
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//Advanced flag for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS:INTERNAL=FALSE
-//Already set VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS
-VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS_TESTED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=1
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//Advanced flag for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=CDAT_PREFIX/VTK/bin/vtkParseOGLExt
-//Result of TRY_RUN
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Path to the Tcl support library files.
-VTK_TCL_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tclTCLTK_VERSION
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-//
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=ON
-//Advanced flag for variable: VTK_TCL_TK_STATIC
-VTK_TCL_TK_STATIC-ADVANCED:INTERNAL=1
-//Path to the Tk support library files.
-VTK_TK_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tkTCLTK_VERSION
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Advanced flag for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_ANSI_STDLIB
-VTK_USE_ANSI_STDLIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//Have function glXGetProcAddressARB
-VTK_USE_GLX_GET_PROC_ADDRESS_ARB:INTERNAL=1
-//Advanced flag for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_PYTHON_EXE
-VTK_WRAP_PYTHON_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_PYTHON_INIT_EXE
-VTK_WRAP_PYTHON_INIT_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_TCL_EXE
-VTK_WRAP_TCL_EXE-ADVANCED:INTERNAL=1
-//Advanced flag for variable: VTK_WRAP_TCL_INIT_EXE
-VTK_WRAP_TCL_INIT_EXE-ADVANCED:INTERNAL=1
-//Path to an executable
-VolumeRenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering/Testing/Cxx
-//Result of TRY_RUN
-WORDS_BIGENDIAN:INTERNAL=0
-//Path to an executable
-WidgetsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets/Testing/Cxx
-//Have library /usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so
-//
-X11_LIB_X11_SOLO:INTERNAL=1
-//Advanced flag for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Advanced flag for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Path to an executable
-mkg3states_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff
-//Path to a library
-vtkCommonPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommonPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkCommonTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommonTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkCommon_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common
-//Whether a library is static, shared or module.
-vtkCommon_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkDICOMParser_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/DICOMParser
-//Whether a library is static, shared or module.
-vtkDICOMParser_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering
-//Whether a library is static, shared or module.
-vtkFiltering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGenericFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkGenericFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGenericFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering
-//Whether a library is static, shared or module.
-vtkGenericFiltering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGraphicsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphicsPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkGraphicsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphicsTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkGraphics_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics
-//Whether a library is static, shared or module.
-vtkGraphics_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkHybridPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybridPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkHybridTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybridTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkHybrid_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid
-//Whether a library is static, shared or module.
-vtkHybrid_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkIOPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIOPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkIOTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIOTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkIO_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO
-//Whether a library is static, shared or module.
-vtkIO_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkImagingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImagingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkImagingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImagingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkImaging_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging
-//Whether a library is static, shared or module.
-vtkImaging_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkNetCDF_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtknetcdf
-//Whether a library is static, shared or module.
-vtkNetCDF_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkParseOGLExt_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ParseOGLExt
-//Path to a library
-vtkRenderingPythonTkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingPythonTkWidgets_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering
-//Whether a library is static, shared or module.
-vtkRendering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkVolumeRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkVolumeRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkVolumeRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering
-//Whether a library is static, shared or module.
-vtkVolumeRendering_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkWidgetsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgetsPython_LIBRARY_TYPE:INTERNAL=MODULE
-//Path to a library
-vtkWidgetsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgetsTCL_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets
-//Whether a library is static, shared or module.
-vtkWidgets_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkWrapPythonInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapTclInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtkWrapTcl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping
-//Path to an executable
-vtk_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Tcl
-//Path to a library
-vtkexoIIc_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexodus2
-//Whether a library is static, shared or module.
-vtkexoIIc_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkexpat_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexpat
-//Whether a library is static, shared or module.
-vtkexpat_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkfreetype_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkfreetype
-//Whether a library is static, shared or module.
-vtkfreetype_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkftgl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ftgl
-//Whether a library is static, shared or module.
-vtkftgl_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkjpeg_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkjpeg
-//Whether a library is static, shared or module.
-vtkjpeg_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkpng_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkpng
-//Whether a library is static, shared or module.
-vtkpng_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to an executable
-vtkpython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Python
-//Path to a library
-vtksys_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/kwsys
-//Whether a library is static, shared or module.
-vtksys_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtktiff_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff
-//Whether a library is static, shared or module.
-vtktiff_LIBRARY_TYPE:INTERNAL=SHARED
-//Path to a library
-vtkzlib_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkzlib
-//Whether a library is static, shared or module.
-vtkzlib_LIBRARY_TYPE:INTERNAL=SHARED
-
diff --git a/exsrc/blas.sh b/exsrc/blas.sh
deleted file mode 100755
index 921446f3d..000000000
--- a/exsrc/blas.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="blas"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd blas*;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env FORTRAN=${FC} make; cp libblas.a ${prefix}/Externals/lib; \
-    
-
diff --git a/exsrc/cairo.sh b/exsrc/cairo.sh
deleted file mode 100755
index 795491483..000000000
--- a/exsrc/cairo.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="cairo"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-(cd cairo-* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/clean_script b/exsrc/clean_script
deleted file mode 100755
index 185cc2b0e..000000000
--- a/exsrc/clean_script
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin/rm -fr build >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/exsrc/cmake.sh b/exsrc/cmake.sh
deleted file mode 100755
index 069754011..000000000
--- a/exsrc/cmake.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-PACKAGE="cmake"
-. ./prolog.sh
-(   cd cmake*; \
-   ./configure --prefix=${prefix}/Externals; \
-   make; make install
-)
diff --git a/exsrc/curl.sh b/exsrc/curl.sh
deleted file mode 100755
index 951fa4c53..000000000
--- a/exsrc/curl.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="curl"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd curl* ; ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install )
-
diff --git a/exsrc/detect_fortran.py b/exsrc/detect_fortran.py
deleted file mode 100644
index 17c0c5661..000000000
--- a/exsrc/detect_fortran.py
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/usr/bin/env python
-import os,sys
-
-def detect_fortran_compiler(full_path=True):
-
-
-  fortrans = """
-g77
-gfortran
-f90
-f95
-g95
-xlf90
-fort77
-pgf77
-pgf90
-cf77
-xlf
-ghf77
-"""
-  if os.environ.has_key('FC'):
-    return os.environ['FC']
-
-  for f in fortrans.split():
-    i,o=os.popen4('which '+f)
-    ln=o.readlines()
-    o.close()
-    i.close()
-    if (ln!=[]) and (not 'no' in ln[0].lower().split()) and (not 'not' in ln[0].lower().split()) :
-      if full_path :
-        return ln[0].strip()
-      else:
-        return f
-
-if __name__=="__main__":
-  print detect_fortran_compiler()
diff --git a/exsrc/ffmpeg.sh b/exsrc/ffmpeg.sh
deleted file mode 100755
index 50c6b5949..000000000
--- a/exsrc/ffmpeg.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="ffmpeg"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-(cd ffmpeg ; ./configure --enable-pthreads --enable-gpl --enable-pp --enable-swscaler --enable-x11grab --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/find_site.py b/exsrc/find_site.py
deleted file mode 100644
index 39d76dbff..000000000
--- a/exsrc/find_site.py
+++ /dev/null
@@ -1,9 +0,0 @@
-# helper routine for installing Pmw since it has no installer.
-import sys, os
-for x in sys.path:
-    y = os.path.basename(x)
-    if y == 'site-packages':
-        print x
-        break
-else:  #If there is none such as on older windows versions
-    print sys.path[-1]
diff --git a/exsrc/fontconfig.sh b/exsrc/fontconfig.sh
deleted file mode 100755
index 060f335fb..000000000
--- a/exsrc/fontconfig.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-PACKAGE="fontconfig"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-PKG_CONFIG=${prefix}/Externals/bin/pkg-config
-export PKG_CONFIG
-
-(cd fontconfig-* ; ./configure --prefix=${prefix}/Externals --enable-libxml2 --with-freetype-config=${prefix}/Externals/bin/freetype-config ; make ; make install )
-
diff --git a/exsrc/freetype.sh b/exsrc/freetype.sh
deleted file mode 100755
index a540ae58f..000000000
--- a/exsrc/freetype.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="freetype"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd freetype-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ; ln -s ${prefix}/Externals/include/freetype2/freetype ${prefix}/Externals/include/freetype )
-
diff --git a/exsrc/gdal.sh b/exsrc/gdal.sh
deleted file mode 100755
index 714a94bb5..000000000
--- a/exsrc/gdal.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gdal"
-. ./prolog.sh
-(cd gdal* ; ./configure --with-libtiff=internal --with-gif=internal --without-cfitsio --prefix=${prefix}/Externals ; make ; make install; ${prefix}/${version}/bin/python setup.py install )
-
diff --git a/exsrc/ghostscript.sh b/exsrc/ghostscript.sh
deleted file mode 100755
index 0a100777b..000000000
--- a/exsrc/ghostscript.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="ghostscript"
-. ./prolog.sh
-(mkdir -p ${prefix}/Externals/share/ghostscript ; cd ghostscript-*; ln -s ../libpng-1.2.8 libpng ; ln -s ../jpeg-6b jpeg ; ./configure --prefix=${prefix}/Externals ; make ; make install ; mv ../fonts ${prefix}/Externals/share/ghostscript )
-
diff --git a/exsrc/gifmerge.sh b/exsrc/gifmerge.sh
deleted file mode 100755
index 85a4ac810..000000000
--- a/exsrc/gifmerge.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gifmerge"
-. ./prolog.sh
-(cd gifmerge* ; make ; mv gifmerge ${prefix}/Externals/bin )
-
diff --git a/exsrc/gifsicle.sh b/exsrc/gifsicle.sh
deleted file mode 100755
index 6ebe09f5f..000000000
--- a/exsrc/gifsicle.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="gifsicle"
-. ./prolog.sh
-(cd gifsicle*; ./configure --prefix=${prefix}/Externals ; make install )
-
diff --git a/exsrc/gplot.sh b/exsrc/gplot.sh
deleted file mode 100755
index 2b588cd1f..000000000
--- a/exsrc/gplot.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE=gplot
-. ./prolog.sh
-d=`uname`
-(cd gplot; make -f Makefile.${d} ; mv gplot ${prefix}/Externals/bin )
-
diff --git a/exsrc/hdf.sh b/exsrc/hdf.sh
deleted file mode 100755
index f4a8cbf53..000000000
--- a/exsrc/hdf.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="HDF"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd HDF* ; env CFLAGS=-DHAVE_NETCDF CXXFLAGS=-DHAVE_NETCDF ./configure --enable-fortran=no --disable-shared --with-jpeg=${prefix}/External/HDF --prefix=${prefix}/Externals/HDF ; make ; make install ; cp -pf ${prefix}/Externals/HDF/bin/* ${prefix}/Externals/bin )
-
diff --git a/exsrc/install_script.obsolete b/exsrc/install_script.obsolete
deleted file mode 100755
index 109797609..000000000
--- a/exsrc/install_script.obsolete
+++ /dev/null
@@ -1,1154 +0,0 @@
-#!/bin/sh
-
-if [ -n "$PYTHONPATH" ]; then
-    echo "PYTHONPATH environment variable should not be set!"
-    exit 1
-fi
-
-if [ -n "$PYTHONHOME" ]; then
-   echo "PYTHONHOME environment variable should not be set!"
-   exit 1
-fi
-
-cdmsonly=no
-
-OS=`uname`
-NetCDF=yes
-dap=no
-hdf=no 
-freetype=yes
-numpy=yes
-scipy=yes
-ipython=yes
-cairo=yes
-ffmpeg=yes
-blas=yes
-lapack=yes
-lapack95=yes
-
-echo $OS
-if [ "$OS" = "Linux" ]; then
-    pbmplus=no
-    netpbm=no
-elif [ "$OS" = "Darwin" ]; then
-    pbmplus=no
-    netpbm=yes  
-elif [ "$OS" = "CYGWIN_NT-5.1" ]; then
-    pbmplus=no
-    netpbm=yes  
-elif [ "$OS" = "CYGWIN_NT-6.0" ]; then
-    pbmplus=no
-    netpbm=yes  
-else
-    netpbm=no
-    pbmplus=yes
-fi
-
-s=$1; shift;
-Pyfort=yes
-Numeric=no
-XGKS=yes
-Pmw=yes
-gplot=no
-gifsicle=yes
-R=no
-VTK=no
-ghostscript=no
-ioapi=no
-ncfortran=no
-
-while [ "$#" -ne 0 ]
-do
-  # Translate $1 to lowercase
-  MYOPT=`echo $1 | tr 'A-Z' 'a-z'`
-    if [ "$MYOPT" = "--help" ]; then
-        echo "  Builds external software required by CDAT."
-        echo "  Packages builds are:"
-	echo "          numpy 1.3.0.1 (on)"
-	echo "          scipy 0.5.2.1 (on)"
-	echo "          ipython 0.8 (off) (includes ipython1 and Twisted 2.5.0)"
-        echo "          freetype 2.3.4 (on)"
-        echo "          cairo 1.4.12 (on)"
-        echo "          ffmpeg (11/4/2007) (on)"
-        echo "          Pyfort 8.5.5 (on)"
-	echo "          jpeg 6b (on)"
-	echo "          libpng 1.2.8 (on)"
-	echo "          Ghostscript 8.50 with jpeg 6b and libpng 1.2.8 (on)"
-        echo "          NetCDF 3.6.1"
-        echo "          NetCDF-Fortran 3.6.1 (off) to build NetCDF with Fortran"
-        echo "          XGKS (on) with plug to freetype fonts"
-        echo "          Numeric 23.1 (on)"
-        echo "          Pmw 1.3 (on)"
-        echo "          gplot (off)"
-        echo "          gifsicle 1.35 (on)"
-        echo "          netpbm 10.27 (on Linux/Mac, off otherwise)"
-        echo "          pbmplus (off Linux/Mac, on otherwise)"
-        echo "          gifmerge (on)"
-        echo "          opendap 3.5: libdap 3.5.3 libnc-dap 3.5.2"
-        echo "          HDF 4.2.r1 (off)"
-        echo "          R 2.5.0 (off)"
-        echo "          ioapi 3.0 (off) will turn off opendap and on NetCDF-Fortran"
-        echo "		gdal 1.4.3 (off) turned on by ioapi"
-	echo "		proj 4.4.9 (off) turned on by ioapi"
-        echo "  Packages can be turned on/off using --enable-PACKAGE --disable-PACKAGE"
-        echo "  You can build a single Package by passing --PACKAGE-only"
-        echo "  If you already built externals before, or do not wish to build them because you think you already have them"
-        echo "  pass: --disable-externals-build"
-        echo "        This will only build python-based externals"
-        echo "  Notes:"
-        echo "          opendap is very unlikely to build on any non standard platform"
-                                                                                                                             
-
-	exit 1
-    fi
-    if [ "$MYOPT" = "--cdms-only" ]; then
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-	ghostscript=no
-        freetype=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--ioapi-only" ]; then
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-	ghostscript=no
-	ioapi=yes
-	ncfortran=yes
-	NetCDF=no
-	dap=no
-	Numeric=no
-	hdf=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--numeric-only" ]; then
-	Numeric=yes
-	dap=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-	netpbm=no
-	gifmerge=no
-        ghostscript=no
-	ioapi=no
-	hdf=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--opendap-only" ]; then
-	Numeric=no
-	dap=yes
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--hdf4-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=yes
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--netcdf-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=yes
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
- 	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-  fi
-    if [ "$MYOPT" = "--netcdf-fortran-only" ]; then
-	Numeric=no
-	hdf=no
-	dap=no
-	NetCDF=no
-	ncfortran=yes
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--pyfort-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=yes
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--xgks-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=yes
-	Pmw=no
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--pmw-only" ]; then
-	Numeric=no
-	dap=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=yes
-	gplot=no
-	gifsicle=no
-	pbmplus=no
-        netpbm=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--gplot-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=yes
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--gifsicle-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=yes
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--netpbm-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=yes
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--pbmplus-only"  ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=yes
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--gifmerge-only"  ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=yes
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--r-only" ]; then
-	Numeric=no
-	dap=no
-	hdf=no
-	NetCDF=no
-	Pyfort=no
-	XGKS=no
-	Pmw=no
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-	R=yes
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-#    if [ "$MYOPT" = "--vtk-only" ]; then
-#	Numeric=no
-#	dap=no
-#	hdf=no
-#	NetCDF=no
-#	Pyfort=no
-#	XGKS=no
-#	Pmw=no
-#	gplot=no
-#	gifsicle=no
-#        netpbm=no
-#	pbmplus=no
-#	gifmerge=no
-#	VTK=yes
-#        ghostscript=no
-#        freetype=no
-#	numpy=no
-#	scipy=no
-#	ipython=no
-#    fi
-    if [ "$MYOPT" = "--ghostscript-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=yes
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--freetype-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=yes
-	cairo=no
-	ffmpeg=no
-	numpy=no
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--numpy-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=yes
-	scipy=no
-	ipython=no
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--scipy-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=yes
-	ipython=no
-	cairo=no
-	ffmpeg=no
- 	blas=no
-	lapack=no
-	lapack95=no
-   fi
-    if [ "$MYOPT" = "--ipython-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	numpy=no
-	scipy=no
-	ipython=yes
-	cairo=no
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--cairo-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-        numpy=no
-        scipy=no
-        ipython=no
-	cairo=yes
-	ffmpeg=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--ffmpeg-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=yes
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--blas-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=yes
-	lapack=no
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--lapack-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=yes
-	lapack95=no
-    fi
-    if [ "$MYOPT" = "--lapack95-only" ]; then
-        Numeric=no
-        dap=no
-	hdf=no
-        NetCDF=no
-        Pyfort=no
-        XGKS=no
-        Pmw=no
-        gplot=no
-        gifsicle=no
-        netpbm=no
-        pbmplus=no
-        gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-        numpy=no
-        scipy=no
-        ipython=no
-	blas=no
-	lapack=no
-	lapack95=yes
-    fi
-# Turn Off Options.....
-    if [ "$MYOPT" = "--disable-opendap" ]; then
-	dap=no
-        if [ $ioapi = no ]; then
-	   NetCDF=yes
-	fi
-	echo "Turning opendap Off"
-    fi
-    if [ "$MYOPT" = "--disable-hdf4" ]; then
-	hdf=no
-	echo "Turning hdf4 Off"
-    fi
-    if [ "$MYOPT" = "--disable-ioapi" ]; then
-	ioapi=no
-	echo "Turning ioapi Off"
-    fi
-    if [ "$MYOPT" = "--disable-ghostscript" ]; then
-	ghostscript=no
-	echo "Turning ghostscript Off"
-    fi
-    if [ "$MYOPT" = "--disable-pyfort" ]; then
-        Pyfort=no
-        echo "Turning Pyfort Off"
-    fi
-    if [ "$MYOPT" = "--disable-numeric" ]; then
-	Numeric=no
-	echo "Turning Numeric Off"
-    fi
-    if [ "$MYOPT" = "--disable-xgks" ]; then
-	XGKS=no
-	echo "Turning XGKS Off"
-    fi
-    if [ "$MYOPT" = "--disable-pmw" ]; then
-	Pmw=no
-	echo "Turning Pmw Off"
-    fi
-    if [ "$MYOPT" = "--disable-gplot" ]; then
-	gplot=no
-	echo "Turning gplot Off"
-    fi
-    if [ "$MYOPT" = "--disable-gifsicle" ]; then
-	gifsicle=no
-	echo "Turning gifsicle Off"
-    fi
-    if [ "$MYOPT" = "--disable-netpbm" ]; then
-	netpbm=no
-	echo "Turning netpbm Off"
-    fi
-    if [ "$MYOPT" = "--disable-pbmplus" ]; then
-	pbmplus=no
-	echo "Turning pbmplus Off"
-    fi
-    if [ "$MYOPT" = "--disable-gifmerge" ]; then
-	gifmerge=no
-	echo "Turning gifmerge Off"
-    fi
-    if [ "$MYOPT" = "--disable-netcdf" ]; then
-	NetCDF=no
-	echo "Turning NetCDF Off"
-    fi
-    if [ "$MYOPT" = "--disable-r"  ]; then
-	R=no
-	echo "Turning R Off"
-    fi
-#    if [ "$MYOPT" = "--disable-vtk"  ]; then
-#	VTK=no
-#	echo "Turning VTK Off"
-#    fi
-    if [ "$MYOPT" = "--disable-freetype" ]; then
-	freetype=no
-	echo "Turning freetype Off"
-    fi
-    if [ "$MYOPT" = "--disable-numpy" ]; then
-	numpy=no
-	echo "Turning numpy Off"
-    fi
-    if [ "$MYOPT" = "--disable-scipy" ]; then
-	scipy=no
-	echo "Turning scipy Off"
-    fi
-    if [ "$MYOPT" = "--disable-ipython" ]; then
-	ipython=no
-	echo "Turning ipython Off"
-    fi
-    if [ "$MYOPT" = "--disable-cairo" ]; then
-	cairo=no
-	echo "Turning cairo Off"
-    fi
-    if [ "$MYOPT" = "--disable-ffmpeg" ]; then
-	ffmpeg=no
-	echo "Turning ffmpeg Off"
-    fi
-    if [ "$MYOPT" = "--disable-blas" ]; then
-	blas=no
-	echo "Turning blas Off"
-    fi
-    if [ "$MYOPT" = "--disable-lapack" ]; then
-	lapack=no
-	lapack95=no
-	echo "Turning lapack and lapack95 Off"
-    fi
-    if [ "$MYOPT" = "--disable-lapack95" ]; then
-	lapack95=no
-	echo "Turning lapack95 Off"
-    fi
-# Turn On Options.....
-    if [ "$MYOPT" = "--enable-ioapi" ]; then
-	ioapi=yes
-	NetCDF=no
-	ncfortran=yes
-	echo "Turning ioapi On"
-    fi
-    if [ "$MYOPT" = "--enable-opendap" ]; then
-	dap=yes
-	echo "Turning opendap On"
-    fi
-    if [ "$MYOPT" = "--enable-pyfort" ]; then
-	Pyfort=yes
-	echo "Turning Pyfort On"
-    fi
-    if [ "$MYOPT" = "--enable-ghostscript" ]; then
-        ghostscript=yes
-        echo "Turning Ghostscript On"
-    fi
-    if [ "$MYOPT" = "--enable-numeric" ]; then
-	Numeric=yes
-	echo "Turning Numeric On"
-    fi
-    if [ "$MYOPT" = "--enable-xgks" ]; then
-	XGKS=yes
-	echo "Turning XGKS On"
-    fi
-    if [ "$MYOPT" = "--enable-pmw" ]; then
-	Pmw=yes
-	echo "Turning Pmw On"
-    fi
-    if [ "$MYOPT" = "--enable-gplot" ]; then
-	gplot=yes
-	echo "Turning gplot On"
-    fi
-    if [ "$MYOPT" = "--enable-gifsicle" ]; then
-	gifsicle=yes
-	echo "Turning gifsicle On"
-    fi
-    if [ "$MYOPT" = "--enable-netpbm" ]; then
-	netpbm=yes
-	echo "Turning netpbm On"
-    fi
-    if [ "$MYOPT" = "--enable-pbmplus" ]; then
-	pbmplus=yes
-	echo "Turning pbmplus On"
-    fi
-    if [ "$MYOPT" = "--enable-gifmerge" ]; then
-	gifmerge=yes
-	echo "Turning gifmerge On"
-    fi
-    if [ "$MYOPT" = "--enable-netcdf" ]; then
-	NetCDF=yes
-	echo "Turning NetCDF On"
-    fi
-    if [ "$MYOPT" = "--enable-r" ]; then
-	R=yes
-	echo "Turning R On"
-    fi
-    if [ "$MYOPT" = "--enable-hdf4" ]; then
-	hdf=yes
-	echo "Turning hdf4 On"
-    fi
-#    if [ "$MYOPT" = "--enable-vtk" ]; then
-#	VTK=yes
-#	echo "Turning VTK On"
-#    fi
-    if [ "$MYOPT" = "--enable-freetype" ]; then
-	freetype=yes
-	echo "Turning freetype On"
-    fi
-    if [ "$MYOPT" = "--enable-numpy" ]; then
-	numpy=yes
-	echo "Turning numpy On"
-    fi
-    if [ "$MYOPT" = "--enable-scipy" ]; then
-	scipy=yes
-	echo "Turning scipy On, do not turn off blas and lapack if they're not on your system"
-    fi
-    if [ "$MYOPT" = "--enable-ipython" ]; then
-	ipython=yes
-	echo "Turning ipython On"
-    fi
-    if [ "$MYOPT" = "--enable-cairo" ]; then
-	cairo=yes
-	echo "Turning cairo On"
-    fi
-    if [ "$MYOPT" = "--enable-ffmpeg" ]; then
-	ffmpeg=yes
-	echo "Turning ffmpeg On"
-    fi
-    if [ "$MYOPT" = "--enable-blas" ]; then
-	blas=yes
-	echo "Turning blas On"
-    fi
-    if [ "$MYOPT" = "--enable-lapack" ]; then
-	lapack=yes
-	echo "Turning lapack On"
-    fi
-    if [ "$MYOPT" = "--enable-ffmpeg" ]; then
-	lapack=yes
-	lapack95=yes
-	echo "Turning lapack and lapack95 On"
-    fi
-    if [ "$MYOPT" = "--disable-externals-build" ]; then
-	gplot=no
-	gifsicle=no
-        netpbm=no
-	pbmplus=no
-	gifmerge=no
-        ghostscript=no
-        freetype=no
-	cairo=no
-	ffmpeg=no
-	XGKS=no
-        dap=no
-	hdf=no
-        NetCDF=no
-	blas=no
-	lapack=no
-	lapack95=no
-    fi
-    shift
-done
-
-## Make sure we don't build NetCDF if opendap is there...
-if [ "$dap" = "yes" ]; then
-    NetCDF=no
-fi
-
-d=`pwd`
-echo "Building external software that CDAT requires."
-echo "See $d/build for logs of the build."
-echo "Any Package can be NOT build by passing --disable-Package"
-echo "If you wish to build 1 Package only, pass --Package-only"
-echo "Packages are: netcdf, netcdf-fortran, opendap, pyfort, numeric, xgks, pmw, gplot, gifsicle,"
-echo "              netpbm, pbmplus, gifmerge, r, ghostscript, ioapi, hdf4, freetype, cairo"
-echo "Note R is not built by default: Pass --enable-r to build R library (Linux only)."
-#echo "Note VTK is not built by default: Pass --enable-vtk to build (linux only)."
-echo "opendap MIGHT work on solaris but probably won't, try to build separately"
-echo "Warning errors in these builds are expected."
-
-
-#(./prolog.sh $s) || (echo "Unpack of tar files failed."; exit 1)
-mkdir -p build
-if [ "$Pyfort" = "yes" ]; then
-    echo "Building Pyfort (Fortran/C interface)"
-    (./pyfort.sh $s 2>&1 | tee build/pyfort.LOG > ../logs/pyfort.LOG) ||  (echo "Build of Pyfort failed."; exit 1)
-fi
-if [ "$ghostscript" = "yes" ]; then
-    echo "Building Ghostscript"
-    (./ghostscript.sh $s 2>&1 | tee build/ghostscript.LOG > ../logs/ghostscript.LOG) ||  (echo "Build of ghostscript failed."; exit 1)
-fi
-if [ "$ffmpeg" = "yes" ]; then
-    echo "Building ffmpeg"
-    (./ffmpeg.sh $s 2>&1 | tee build/ffmpeg.LOG > ../logs/ffmpeg.LOG) ||  (echo "Build of ffmpeg failed."; exit 1)
-fi
-if [ "$freetype" = "yes" ]; then
-    echo "Building Freetype"
-    (./freetype.sh $s 2>&1 | tee build/freetype.LOG > ../logs/freetype.LOG) ||  (echo "Build of freetype failed."; exit 1)
-fi
-if [ "$cairo" = "yes" ]; then
-    echo "Building necessary libs for cairo"
-    echo "  Building xml"
-    (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) ||  (echo "Build of libxml failed."; exit 1)
-    echo "  Building libpixman"
-    (./libpixman.sh $s 2>&1 | tee build/libpixman.LOG > ../logs/libpixman.LOG) ||  (echo "Build of libpixman failed."; exit 1)
-    echo "  Building libpng"
-    (./libpng.sh $s 2>&1 | tee build/libpng.LOG > ../logs/libpng.LOG) ||  (echo "Build of libpng failed."; exit 1)
-    echo "  Building pkgconfig"
-    (./pkgconfig.sh $s 2>&1 | tee build/pkgconfig.LOG > ../logs/pkgconfig.LOG) ||  (echo "Build of pkgconfig failed."; exit 1)
-    echo "  Building fontconfig"
-    (./fontconfig.sh $s 2>&1 | tee build/fontconfig.LOG > ../logs/fontconfig.LOG) ||  (echo "Build of fontconfig failed."; exit 1)
-    echo "Building Cairo"
-    (./cairo.sh $s 2>&1 | tee build/cairo.LOG > ../logs/cairo.LOG) ||  (echo "Build of cairo failed."; exit 1)
-fi
-if [ "$NetCDF" = "yes" ]; then
-    echo "Building netcdf without Fortran support"
-    (./netcdf.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf without fortran failed."; exit 1)
-fi
-if [ "$hdf" = "yes" ]; then
-    echo "Building HDF"
-    echo "... Building JPEG library required by HDF"
-    (./jpeg.sh $s 2>&1 | tee build/jpeg.LOG > ../logs/jpeg.LOG ) || (echo "Build of jpeg."; exit 1)
-    echo "... Building HDF4 library"
-    (./hdf.sh $s 2>&1 | tee build/hdf.LOG > ../logs/hdf.LOG ) || (echo "Build of hdf."; exit 1)
-fi
-if [ "$ncfortran" = "yes" ]; then
-    echo "Building netcdf with Fortran support"
-    (./netcdf_fortran.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf with fortran failed."; exit 1)
-fi
-if [ "$blas" = "yes" ]; then
-    echo "Building blas"
-    (./blas.sh $s 2>&1 | tee build/blas.LOG > ../logs/blas.LOG ) || (echo "Build of blas failed."; exit 1)
-fi
-if [ "$lapack" = "yes" ]; then
-    echo "Building lapack"
-    (./lapack.sh $s 2>&1 | tee build/lapack.LOG > ../logs/lapack.LOG ) || (echo "Build of lapack failed."; exit 1)
-fi
-if [ "$lapack95" = "yes" ]; then
-    echo "Building lapack95"
-    (./lapack95.sh $s 2>&1 | tee build/lapack95.LOG > ../logs/lapack95.LOG ) || (echo "Build of lapack95 failed."; exit 1)
-fi
-if [ "$numpy" = "yes" ]; then
-    if [ "$BLAS" = "" ]; then
-      BLAS=SETBLAS
-      export BLAS
-    fi
-    if [ "$LAPACK" = "" ]; then
-      LAPACK=SETLAPACK
-      export LAPACK
-    fi
-    echo "Building numpy" ${BLAS} ${LAPACK}
-    (./numpy.sh $s 2>&1 | tee build/numpy.LOG > ../logs/numpy.LOG ) || (echo "Build of numpy failed."; exit 1)
-fi
-if [ "$scipy" = "yes" ]; then
-    if [ "$BLAS" = "" ]; then
-      BLAS=SETBLAS
-      export BLAS
-    fi
-    if [ "$LAPACK" = "" ]; then
-      LAPACK=SETLAPACK
-      export LAPACK
-    fi
-    echo "Building scipy"
-    (./scipy.sh $s 2>&1 | tee build/scipy.LOG > ../logs/scipy.LOG ) || (echo "Build of scipy failed."; exit 1)
-fi
-if [ "$ipython" = "yes" ]; then
-    echo "Building ipython and its dependencies"
-    echo "... Building setuptools (with zope interface)"
-    (./setuptools.sh $s 2>&1 | tee build/setuptools.LOG > ../logs/setuptools.LOG ) || (echo "Build of setuptools failed."; exit 1)
-    echo "... Building Twisted (with zope interface)"
-    (./twisted.sh $s 2>&1 | tee build/twisted.LOG > ../logs/twisted.LOG ) || (echo "Build of Twisted failed."; exit 1)
-    echo "... Building ipython1"
-    (./ipython1.sh $s 2>&1 | tee build/ipython1.LOG > ../logs/ipython1.LOG ) || (echo "Build of ipython1 failed."; exit 1)
-    echo "... Building ipython"
-    (./ipython.sh $s 2>&1 | tee build/ipython.LOG > ../logs/ipython.LOG ) || (echo "Build of ipython failed."; exit 1)
-fi
-if [ "$ioapi" = "yes" ]; then
-    echo "Building IOAPI and its dependencies"
-    echo "... Building ioapi"
-    (./ioapi.sh $s 2>&1 | tee build/ioapi.LOG > ../logs/ioapi.LOG ) || (echo "Build of ioapi failed."; exit 1)
-    echo "... Building proj"
-    (./proj.sh $s 2>&1 | tee build/proj.LOG > ../logs/proj.LOG ) || (echo "Build of proj failed."; exit 1)
-    echo "... Building gdal"
-    (./gdal.sh $s 2>&1 | tee build/gdal.LOG > ../logs/gdal.LOG ) || (echo "Build of gdal failed."; exit 1)
-fi
-if [ "$XGKS" = "yes" ]; then
-    echo "Building xgks header files and fonts. (graphics display)"
-    (./xgks.sh $s 2>&1 | tee build/xgks.LOG > ../logs/xgks.LOG ) || (echo "Build of xgks failed."; exit 1)
-fi
-if [ "$Numeric" = "yes" ]; then
-    echo "Building Numeric (numerical operations, masked arrays, etc...)"
-    (./Numeric.sh $s 2>&1 | tee build/Numeric.LOG > ../logs/Numeric.LOG) || (echo "Build of Numeric failed."; exit 1)
-fi
-if [ "$dap" = "yes" ]; then
-    echo "Building opendap (client side only)"
-    echo "... Building curl required by opendap"
-    (./curl.sh $s 2>&1 | tee build/curl.LOG > ../logs/curl.LOG) || (echo "Build of curl failed";exit 1)
-    echo "... Building libxml required by opendap"
-    (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed";exit 1)
-    echo "... Building libdap required by opendap"
-    (./libdap.sh $s 2>&1 | tee build/libdap.LOG > ../logs/libdap.LOG) || (echo "Build of libdap failed";exit 1)
-    echo "... Building ncdap required by opendap, replaces standard netCDF libraries"
-    (./libnc-dap.sh $s 2>&1 | tee build/libnc-dap.LOG > ../logs/libnc-dap.LOG) || (echo "Build of libncdap failed";exit 1)
-fi
-if [ "$Pmw" = "yes" ]; then
-    echo "Building Pmw (Python Mega Widget, to design GUIs)"
-    (./Pmw.sh $s 2>&1 | tee build/Pmw.LOG > ../logs/Pmw.LOG) || (echo "Build of Pmw failed."; exit 1)
-fi
-if [ "$gplot" = "yes" ]; then
-    echo "Building gplot (postscript output)"
-    (./gplot.sh $s 2>&1 | tee build/gplot.LOG > ../logs/gplot.LOG) || (echo "Build of gplot failed, try manualy."; exit 1)
-fi
-if [ "$gifsicle" = "yes" ]; then
-    echo "Building gifsicle (for animated GIF output)"
-    (./gifsicle.sh $s 2>&1 | tee build/gifsicle.LOG > ../logs/gifsicle.LOG) || (echo "Build of gifsicle failed."; exit 1)
-fi
-if [ "$netpbm" = "yes" ]; then
-    echo "Building netpbm (for GIF output)"
-    (./netpbm.sh $s 2>&1 | tee build/netpbm.LOG > ../logs/netpbm.LOG) || (echo "Build of netpbm failed."; exit 1)
-fi
-if [ "$pbmplus" = "yes" ]; then
-    echo "Building pbmplus (for GIF output)"
-    (./pbmplus.sh $s 2>&1 | tee build/pbmplus.LOG > ../logs/pbmplus.LOG) || (echo "Build of pbmplus failed."; exit 1)
-fi
-if [ "$gifmerge" = "yes" ]; then
-    echo "Building gifmerge (for GIF output)"
-    (./gifmerge.sh $s 2>&1 | tee build/gifmerge.LOG > ../logs/gifmerge.LOG) || (echo "Build of gifmerge failed."; exit 1)
-fi
-if [ "$R" = "yes" ]; then
-    echo "Building R statistical library"
-    (./R.sh $s 2>&1 | tee build/R.LOG > ../logs/R.LOG) || (echo "Build of R failed";exit 1)
-fi
-#if [ "$VTK" = "yes" ]; then
-#    echo "Building cmake (required by VTK)"
-#    (./cmake.sh $s 2>&1 | tee build/cmake.LOG > ../logs/cmake.LOG) || (echo "Build of cmake failed";exit 1)
-#    echo "Building VTK"
-#    (./vtk.sh $s 2>&1 | tee build/VTK.LOG > ../logs/VTK.LOG) || (echo "Build of VTK failed";exit 1)
-#fi
-echo "Done with building the external software."
diff --git a/exsrc/ioapi.sh b/exsrc/ioapi.sh
deleted file mode 100755
index a2f973a90..000000000
--- a/exsrc/ioapi.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/sh
-BUILD=`pwd`
-export BUILD
-PACKAGE="ioapi"
-. ./prolog.sh
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-(cd ioapi*/ioapi; \
-    # build the library
-    make -f Makefile.nocpl; \
-    # go to the object/lib directory
-    # and run ranlib (only needed for Darwin)
-    # but doesn't effect the build
-    cd ../neutral_g77; \
-    ranlib libioapi.a; \
-
-    # copy the library to pyIoapi contrib package
-    # and the installation directory (prefix)
-#    echo "Copying IOAPI library to pyIoapi package" ; \
-#    cp libioapi.a ../../../../contrib/pyIoapi/Src/lib_external; \
-    cp libioapi.a ${prefix}/Externals/lib;
-)
diff --git a/exsrc/ipython.sh b/exsrc/ipython.sh
deleted file mode 100755
index 66166ce8a..000000000
--- a/exsrc/ipython.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="ipython"
-. ./prolog.sh 
-# ipython.
-(cd ipython-* ; ${prefix}/${version}/bin/python setup.py build ${D} install)
diff --git a/exsrc/ipython1.sh b/exsrc/ipython1.sh
deleted file mode 100755
index db6b6e84e..000000000
--- a/exsrc/ipython1.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="ipython1"
-. ./prolog.sh 
-# ipython1.
-(cd ipython1*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/jpeg.sh b/exsrc/jpeg.sh
deleted file mode 100755
index 206570e3a..000000000
--- a/exsrc/jpeg.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="jpeg"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(mkdir ${prefix}/Externals/HDF ; mkdir ${prefix}/Externals/HDF/lib ; mkdir ${prefix}/Externals/HDF/include ; cd jpeg* ; ./configure --prefix=${prefix}/Externals/HDF ; make ; mv libjpeg.a ${prefix}/Externals/HDF/lib ; cp *.h ${prefix}/Externals/HDF/include )
-
diff --git a/exsrc/lapack.sh b/exsrc/lapack.sh
deleted file mode 100755
index 73df47e3d..000000000
--- a/exsrc/lapack.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="lapack-lite"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd lapack-lite*;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env LOADER=${FC} FORTRAN=${FC} BLAS=${prefix}/Externals/libblas.a make; cp liblapack.a libtmglib.a ${prefix}/Externals/lib; \
-    
-
diff --git a/exsrc/lapack95.sh b/exsrc/lapack95.sh
deleted file mode 100755
index b4344cd6d..000000000
--- a/exsrc/lapack95.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-PACKAGE="lapack95"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-cd lapack95*/SRC;\
-    # Add f77 support
-unset PGI; \
-    echo $FC ; \
-    env LAPACK_PATH=${prefix}/Externals/lib make; cp ../lapack95.a  ${prefix}/Externals/lib/liblapack95.a; cp ../lapack95_modules/* ${prefix}/Externals/include  \
-    
-
diff --git a/exsrc/libcf.sh b/exsrc/libcf.sh
deleted file mode 100755
index 5e0add5c3..000000000
--- a/exsrc/libcf.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/sh
-
-PACKAGE="libcf"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-NC4LOC=`grep NC4LOC ../config.log | sed 's/NC4LOC=//' | sed "s/'//"`
-HDF5LOC=`grep HDF5LOC ../config.log | sed 's/HDF5LOC=//' | sed "s/'//"`
-
-echo "prefix is ${prefix}"
-echo "using netcdf at $NC4LOC, using hdf5 at $HDF5LOC"
-
-(cd libcf*; \ 
-  mkdir ${prefix}/Externals/libcf ; \  
-  mkdir ${prefix}/Externals/NetCDF ; \
-  ./configure --prefix=${prefix}/Externals/NetCDF --with-netcdf=$NC4LOC --with-hdf5=$HDF5LOC --enable-shared; \
-  make; make install
-)
-
diff --git a/exsrc/libdap.sh b/exsrc/libdap.sh
deleted file mode 100755
index d79e566c8..000000000
--- a/exsrc/libdap.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="libdap"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-(cd libdap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install )
-
diff --git a/exsrc/libnc-dap.sh b/exsrc/libnc-dap.sh
deleted file mode 100755
index de5bb66fc..000000000
--- a/exsrc/libnc-dap.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-PACKAGE="libnc-dap"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-  export CXX="g++ -fPIC"
-fi
-(cd libnc-dap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ; cp -pf ${prefix}/Externals/OpenDAP/bin/* ${prefix}/Externals/bin )
-
diff --git a/exsrc/libpixman.sh b/exsrc/libpixman.sh
deleted file mode 100755
index 2b8c09e00..000000000
--- a/exsrc/libpixman.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="pixman"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd pixman* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/libpng.sh b/exsrc/libpng.sh
deleted file mode 100755
index 2cb505cc3..000000000
--- a/exsrc/libpng.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="libpng"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd libpng* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/libxml.sh b/exsrc/libxml.sh
deleted file mode 100755
index de23dc8cb..000000000
--- a/exsrc/libxml.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="libxml"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd libxml2* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/netcdf.sh b/exsrc/netcdf.sh
deleted file mode 100755
index 6222460fd..000000000
--- a/exsrc/netcdf.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/sh
-PACKAGE="netcdf"
-. ./prolog.sh
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-    CXX=""; export CXX
-fi
-
-echo "prefix is"${prefix}
-# Define compilation flags for itanium based NEC TX-7 (and gcc) -> ia64
-# Also define compilation flags for SGI Altrix (and gcc) -> ia64
-# Same for AMD Opteron based HP Proliant DL585                  -> x86_64
-# export CFLAGS="$CFLAGS -fpic -O"
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-    export CFLAGS="$CFLAGS -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-    export CFLAGS="$CFLAGS -fPIC"
-fi
-
-if (test `uname ` = "CYGWIN_NT-5.1") then
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-elif (test `uname ` = "CYGWIN_NT-6.0") then
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-else
-(cd netcdf*; \
-    FC=''; export FC; \
-    F90='';export F90; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF ; \
-    ./configure --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-fi
diff --git a/exsrc/netcdf_fortran.sh b/exsrc/netcdf_fortran.sh
deleted file mode 100755
index bbf4c9886..000000000
--- a/exsrc/netcdf_fortran.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/sh
-PACKAGE="netcdf"
-BUILD=`pwd`
-export BUILD
-. ./prolog.sh
-
-FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py`
-export FC
-if ( test $FC = "gfortran") then
-    CPPFLAGS="-DpgiFortran"; export CPPFLAGS
-fi
-if (test `uname` = "HP-UX") then
-    CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS
-elif (test `uname` = "Darwin") then
-(    CXX=""; export CXX \
-)
-fi
-
-if (test `uname ` = "CYGWIN_NT-5.1") then 
-(cd netcdf*; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-elif (test `uname ` = "CYGWIN_NT-6.0") then 
-(cd netcdf*; \
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \
-    make; make install
-)
-else
-(cd netcdf*;\
-    # Add f77 support
-    unset PGI; \
-    mkdir ${prefix}/Externals/NetCDF; \
-    ./configure --prefix=${prefix}/Externals/NetCDF; \
-    make; make install; \
-)
-fi
-
diff --git a/exsrc/netpbm.input.conf b/exsrc/netpbm.input.conf
deleted file mode 100644
index a7f73f85f..000000000
--- a/exsrc/netpbm.input.conf
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-static
-
-none
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.Cygwin b/exsrc/netpbm.input.conf.Cygwin
deleted file mode 100644
index 5bd669a10..000000000
--- a/exsrc/netpbm.input.conf.Cygwin
+++ /dev/null
@@ -1,18 +0,0 @@
-
-gnu
-
-static
-
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.Darwin b/exsrc/netpbm.input.conf.Darwin
deleted file mode 100644
index 81ee29886..000000000
--- a/exsrc/netpbm.input.conf.Darwin
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-
-static
-
-none
-none
-none
-none
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.conf.sun b/exsrc/netpbm.input.conf.sun
deleted file mode 100644
index ae45aa38c..000000000
--- a/exsrc/netpbm.input.conf.sun
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-cc
-sun
-
-
-static
-none
-none
-none
-
-
-
-
-
-
-
-
-
-
diff --git a/exsrc/netpbm.input.inst b/exsrc/netpbm.input.inst
deleted file mode 100644
index c9167ec1e..000000000
--- a/exsrc/netpbm.input.inst
+++ /dev/null
@@ -1,9 +0,0 @@
-INST_PREFIX
-CDAT_PREFIX
-
-
-
-
-
-N
-
diff --git a/exsrc/netpbm.sh b/exsrc/netpbm.sh
deleted file mode 100755
index 1e5d07180..000000000
--- a/exsrc/netpbm.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-PACKAGE="netpbm"
-OS=`uname`
-if ( test "${OS}" = 'Darwin' ) then
-    echo "Darwin" ;
-    CONF_FILE=netpbm.input.conf.Darwin;
-elif ( test "${OS}" = 'sunOS' ) then
-    echo "Sun OS";
-    CONF_FILE=netpbm.input.conf.sun;
-elif ( test "${OS}" = 'Linux' ) then
-    echo "GNU Linux";
-    CONF_FILE=netpbm.input.conf;
-elif ( test "${OS}" = 'CYGWIN_NT-5.1' ) then
-    echo "GNU Build for Cygwin";
-    CONF_FILE=netpbm.input.conf.Cygwin;
-elif ( test "${OS}" = 'CYGWIN_NT-6.0' ) then
-    echo "GNU Build for Cygwin";
-    CONF_FILE=netpbm.input.conf.Cygwin;
-else
-    echo "Platform not tested, using GNU conf file";
-    echo "If hangs or fails try manually or use pbmplus";
-fi
-. ./prolog.sh
-(  
-   cd netpbm*; \
-   BUILD_DIR=`pwd`;\
-   sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \
-       -e 's@INST_PREFIX@'${BUILD_DIR}'/TMP@g' \
-        ../../netpbm.input.inst > netpbm.input.inst.feed ; \
-   ./configure < ../../${CONF_FILE} ; \
-   make ; \
-   make package pkgdir=${BUILD_DIR}/TMP; \
-   ./installnetpbm < netpbm.input.inst.feed ; \
-   rm -rf  ${BUILD_DIR}/TMP 
-)
diff --git a/exsrc/numpy.sh b/exsrc/numpy.sh
deleted file mode 100755
index a1560bcbb..000000000
--- a/exsrc/numpy.sh
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/bin/sh
-PACKAGE="numpy"
-. ./prolog.sh 
-# Handle x86_64 arch
-CDATARCH=`uname -m`
-if (test "${CDATARCH}" = "x86_64") then
-  cd numpy-*
-  cat >site.cfg <<EOF
-# Defaults
-#  ========
-# The settings given here will apply to all other sections if not overridden.
-# This is a good place to add general library and include directories like
-# /usr/local/{lib,include}
-#
-[DEFAULT]
-library_dirs = /usr/lib64
-EOF
-  cd ..
-fi
-if (test "${BLAS}" = "SETBLAS") then
-BLAS=${prefix}/Externals/lib/libblas.a
-export BLAS
-fi
-if (test "${LAPACK}" = "SETLAPACK") then
-LAPACK=${prefix}/Externals/lib/liblapack.a
-export LAPACK
-fi
-# Numpy.
-(cd numpy-*; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/pbmplus.sh b/exsrc/pbmplus.sh
deleted file mode 100755
index 03f48e043..000000000
--- a/exsrc/pbmplus.sh
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/bin/sh
-PACKAGE="pbmplus"
-. ./prolog.sh
-d=`uname`
-if ( test "Linux" = "${d}" ) then
-( INSTALLBINARIES=${prefix}/Externals/bin; export INSTALLBINARIES; INSTALLMANUALS=${prefix}/Externals/man/mann ; export INSTALLMANUALS ; CC="gcc -ansi" ; export CC;cd pbmplus; make install )
-else
-( INSTALLBINARIES=${prefix}/Externals/bin; export INSTALLBINARIES ; INSTALLMANUALS=${prefix}/Externals/man/mann ; export INSTALLMANUALS ; cd pbmplus; make install )
-fi
diff --git a/exsrc/pixman.def b/exsrc/pixman.def
deleted file mode 100644
index 4b69464c5..000000000
--- a/exsrc/pixman.def
+++ /dev/null
@@ -1,62 +0,0 @@
-EXPORTS
-pixman_region_set_static_pointers
-pixman_region_init
-pixman_region_init_rect
-pixman_region_init_with_extents
-pixman_region_fini
-pixman_region_translate
-pixman_region_copy
-pixman_region_intersect
-pixman_region_union
-pixman_region_union_rect
-pixman_region_subtract
-pixman_region_inverse
-pixman_region_contains_point
-pixman_region_contains_rectangle
-pixman_region_not_empty
-pixman_region_extents
-pixman_region_n_rects
-pixman_region_rectangles
-pixman_region_equal
-pixman_region_selfcheck
-pixman_region_reset
-pixman_region_init_rects
-pixman_blt
-pixman_fill
-pixman_image_create_solid_fill
-pixman_image_create_linear_gradient
-pixman_image_create_radial_gradient
-pixman_image_create_conical_gradient
-pixman_image_create_bits
-pixman_image_ref
-pixman_image_unref
-pixman_image_set_clip_region
-pixman_image_set_has_client_clip
-pixman_image_set_transform
-pixman_image_set_repeat
-pixman_image_set_filter
-pixman_image_set_source_clipping
-pixman_image_set_alpha_map
-pixman_image_set_component_alpha
-pixman_image_set_accessors
-pixman_image_set_indexed
-pixman_image_get_data
-pixman_image_get_width
-pixman_image_get_height
-pixman_image_get_stride
-pixman_image_get_depth
-pixman_image_fill_rectangles
-pixman_compute_composite_region
-pixman_image_composite
-pixman_sample_ceil_y
-pixman_sample_floor_y
-pixman_edge_step
-pixman_edge_init
-pixman_line_fixed_edge_init
-pixman_rasterize_edges
-pixman_add_traps
-pixman_add_trapezoids
-pixman_rasterize_trapezoid
-pixman_format_supported_destination
-pixman_transform_point_3d
-LIBRARY libpixman-1.dll
diff --git a/exsrc/pkgconfig.sh b/exsrc/pkgconfig.sh
deleted file mode 100755
index b72b9fedd..000000000
--- a/exsrc/pkgconfig.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/sh
-PACKAGE="pkgconfig"
-. ./prolog.sh
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-(cd pkgconfig-* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/proj.sh b/exsrc/proj.sh
deleted file mode 100755
index 108cce340..000000000
--- a/exsrc/proj.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-PACKAGE="proj"
-. ./prolog.sh
-(cd proj* ; ./configure --prefix=${prefix}/Externals ; make ; make install )
-
diff --git a/exsrc/prolog.sh b/exsrc/prolog.sh
deleted file mode 100755
index 9d0e7b6d6..000000000
--- a/exsrc/prolog.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/sh
-# superstition
-## Undoing superstition, let's be rational here.
-#unset PYTHONPATH
-#unset PYTHONSTARTUP
-#unset PYTHONHOME
-
-if (test "$1" = "--debug") then
-    D="--debug";
-    OPT=-g; 
-    shift
-else
-    D="";
-    OPT=${OPT:=-O}
-fi
-export OPT
-
-OS=`uname`
-if (test -z "$1") then
-    echo "Usage: $0 prefix";
-    exit 1
-fi
-version=`more ../version`
-
-if (test ! -d $1) then
-    echo -n "$1 is not a directory; create it? (y/[n])";
-    y='n'
-    read y;
-    if (test ${y} = 'y') then
-        mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $!/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include
-        if (test ! -d $1) then
-            echo "Could not create $1, installation aborted.";
-            exit 1
-        fi
-    else
-        echo 'Installation aborted.';
-        exit 1
-    fi
-fi
-prefix=`(cd $1;pwd)`
-
-cp -f detect_fortran.py ${prefix}/${version}/bin/detect_fortran.py;
-chmod +x  ${prefix}/${version}/bin/detect_fortran.py;
-
-#mkdir -p build
-/bin/cp src/${PACKAGE}*gz build
-cd build
-chmod +w ${PACKAGE}*gz
-echo "untarring "${PACKAGE}
-
-for x in ${PACKAGE}*gz;
-    do
-        echo $x;
-        echo `basename $x .gz`;
-	gunzip -f $x;
-	tar xf `basename $x .gz`;
-	/bin/rm -f `basename $x .gz`;
-    done
-
-
-#if (test ! -d build) then
-#    # Unpack everything into build
-#    mkdir -p build
-#    /bin/cp src/*.gz build
-#    cd build
-#    chmod +w *.gz 
-#    for x in *.gz; 
-#    do 
-#        echo "$x"; 
-#        gunzip -f $x;
-#        tar xf `basename $x .gz`;
-#        /bin/rm -f `basename $x .gz`
-#    done
-#    cd ..
-#fi
-echo "Installation of ${PACKAGE} to ${prefix}"
diff --git a/exsrc/pyfort.sh b/exsrc/pyfort.sh
deleted file mode 100755
index 361e67ea0..000000000
--- a/exsrc/pyfort.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-PACKAGE="Pyfort"
-. ./prolog.sh
-(cd Pyfort*; ${prefix}/${version}/bin/python setup.py build ${D} install )
diff --git a/exsrc/setuptools.sh b/exsrc/setuptools.sh
deleted file mode 100755
index 5082091d3..000000000
--- a/exsrc/setuptools.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="setuptools"
-. ./prolog.sh 
-# Twisted.
-(cd setuptools-* ; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/src/CMakeCache.txt.linux.in b/exsrc/src/CMakeCache.txt.linux.in
deleted file mode 100644
index 15a056b28..000000000
--- a/exsrc/src/CMakeCache.txt.linux.in
+++ /dev/null
@@ -1,1965 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=ON
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.5
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.5.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=/usr/local/bin/valgrind
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/usr/X11R6/lib/libGL.dylib
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/usr/X11R6/lib/libGLU.dylib
-
-//Path to a file.
-OPENGL_xmesa_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Path to a library.
-PYTHON_DEBUG_LIBRARY:FILEPATH=/Library/Frameworks/python.framework
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_PATH:PATH=@PREFIX_PATH@/include/python@PYVER@
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/lib/libpython@PYVER@.dylib
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=omar
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@BUILD_DIR@/VTK/Utilities/TclTk/internals/tk8.4
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//Path to a file.
-VTK_DATA_ROOT:PATH=@BUILD_DIR@/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=OFF
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=OFF
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=ON
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=ON
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_fontconfig_LIB:FILEPATH=/usr/X11R6/lib/libfontconfig.dylib
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lpthread;general;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/usr/X11R6/lib/libXt.dylib;general;/usr/X11R6/lib/libSM.dylib;general;/usr/X11R6/lib/libICE.dylib;general;/usr/X11R6/lib/libX11.dylib;general;/usr/X11R6/lib/libXext.dylib;general;/usr/X11R6/lib/libXss.dylib;general;/usr/X11R6/lib/libXft.dylib;general;/usr/X11R6/lib/libfontconfig.dylib;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/usr/X11R6/lib/libGL.dylib;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for target
-vtksqlite_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_EXAMPLES
-BUILD_EXAMPLES-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_TESTING
-BUILD_TESTING-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=cpack
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=44
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//Suppress Warnings that are meant for the author of the CMakeLists.txt
-// files.
-CMAKE_SUPPRESS_DEVELOPER_WARNINGS:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonInterp
-FIND_PACKAGE_MESSAGE_DETAILS_PythonInterp:INTERNAL=[@PREFIX_PATH@/bin/python@PYVER@]
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/lib/libpython@PYVER@.dylib][@PREFIX_PATH@/include/python@PYVER@]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_DEBUG_LIBRARY
-PYTHON_DEBUG_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_WISH
-TK_WISH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_COCOA
-VTK_USE_COCOA-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_X
-VTK_USE_X-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_fontconfig_LIB
-X11_fontconfig_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-
diff --git a/exsrc/src/CMakeCache.txt.mac.Framework.in b/exsrc/src/CMakeCache.txt.mac.Framework.in
deleted file mode 100644
index 9764e5a52..000000000
--- a/exsrc/src/CMakeCache.txt.mac.Framework.in
+++ /dev/null
@@ -1,2066 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: @EXTERNALS@/bin/cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Path to a library.
-ApplicationServices:FILEPATH=/System/Library/Frameworks/ApplicationServices.framework
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=OFF
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=ON
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.6
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.6.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Path to a library.
-IOKit:FILEPATH=/System/Library/Frameworks/IOKit.framework
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i -j 16
-
-//Value Computed by CMake
-MAPREDUCE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/mrmpi
-
-//Value Computed by CMake
-MAPREDUCE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/mrmpi
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Dependencies for the target
-MapReduceMPI_LIB_DEPENDS:STATIC=general;mpistubs;
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/System/Library/Frameworks/OpenGL.framework
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/System/Library/Frameworks/OpenGL.framework
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/System/Library/Frameworks/AGL.framework
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Add module vtkChartsPython
-PYTHON_ENABLE_MODULE_vtkChartsPython:BOOL=ON
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_DIR:PATH=@PREFIX_PATH@/Python.framework/Headers
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib
-
-//Add module vtkChartsPython shared
-PYTHON_MODULE_vtkChartsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=meryem.llnl.gov
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=/usr/bin/svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//The repository for data used for testing.  To obtain from CVS:
-// "cvs -d :pserver:anoncvs@www.vtk.org:/cvsroot/VTK co VTKData"
-VTK_DATA_ROOT:PATH=VTK_DATA_ROOT-NOTFOUND
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Add compiler flags to do stricter checking when building debug.
-VTK_EXTRA_COMPILER_WARNINGS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//The repository for large data used for testing.  To check out
-// this repository from CVS, first run
-//\n
-//\ncvs -d :pserver:anonymous@public.kitware.com:/cvsroot/VTK login
-//\n
-//\n(respond with password vtk) and then run
-//\n
-//\ncvs -d :pserver:anonymous@public.kitware.com:/cvsroot/VTK checkout
-// VTKLargeData
-VTK_LARGE_DATA_ROOT:PATH=VTK_LARGE_DATA_ROOT-NOTFOUND
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Extra flags for Objective C compilation
-VTK_REQUIRED_OBJCXX_FLAGS:STRING=-fobjc-gc
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//Locale for VTK C++ tests. Example fr_FR.utf8, empty string for
-// env variable.
-VTK_TESTING_LOCALE:STRING=
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//VTK c++ tests will start with specified locale.
-VTK_TESTING_USE_LOCALE:BOOL=OFF
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=ON
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build VTK chart support (OpenGL based)
-VTK_USE_CHARTS:BOOL=ON
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=ON
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Use GCC visibility support if available.
-VTK_USE_GCC_VISIBILITY:BOOL=ON
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Use Gnu R interface for VTK.  Requires Gnu R installation.
-VTK_USE_GNU_R:BOOL=OFF
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Use Matlab Engine and Matlab Mex files.  Requires valid Matlab
-// installation.
-VTK_USE_MATLAB_MEX:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=ON
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build experimental Ogg/Theora support
-VTK_USE_OGGTHEORA_ENCODER:BOOL=OFF
-
-//Build openfoam reader
-VTK_USE_OPENFOAM:BOOL=ON
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build Qt support
-VTK_USE_QT:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's gl2ps library.
-VTK_USE_SYSTEM_GL2PS:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Use 3Dconnexion device
-VTK_USE_TDX:BOOL=OFF
-
-//Build the vtkTextAnalysis kit.  Needed for performing text analysis.
-VTK_USE_TEXT_ANALYSIS:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=OFF
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=OFF
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Dependencies for the target
-mpistubs_LIB_DEPENDS:STATIC=general;vtksys;
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkChartsPythonD_LIB_DEPENDS:STATIC=general;vtkCharts;general;vtkHybridPythonD;general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkChartsPython_LIB_DEPENDS:STATIC=general;vtkChartsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCharts_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkViews;general;vtkIO;general;vtkftgl;general;vtkfreetype;
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lm;general;-lpthread;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;general;/System/Library/Frameworks/AGL.framework;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;general;vtkftgl;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;objc;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/System/Library/Frameworks/OpenGL.framework;general;-framework Cocoa;general;/System/Library/Frameworks/ApplicationServices.framework;general;/System/Library/Frameworks/IOKit.framework;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;general;@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;general;/System/Library/Frameworks/OpenGL.framework;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkfreetype_LIB_DEPENDS:STATIC=general;-framework ApplicationServices -framework CoreServices;
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/System/Library/Frameworks/OpenGL.framework;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;general;-lm;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for the target
-vtksqlite_LIB_DEPENDS:STATIC=general;-lpthread;
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;general;-lm;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=@EXTERNALS@/bin/cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=@EXTERNALS@/bin/cpack
-//ADVANCED property for variable: CMAKE_CTEST_COMMAND
-CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=@EXTERNALS@/bin/ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=@EXTERNALS@/bin/ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=83
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=8
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/Python.framework/Versions/Current/lib/python@PYVER@/config/libpython@PYVER@.dylib][@PREFIX_PATH@/Python.framework/Headers]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have include fenv.h
-HAVE_FENV_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Test HAVE_GCC_ERROR_RETURN_TYPE
-HAVE_GCC_ERROR_RETURN_TYPE:INTERNAL=1
-//Test HAVE_GCC_VISIBILITY
-HAVE_GCC_VISIBILITY:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Result of TRY_COMPILE
-HAVE_VTK_UINTPTR_T:INTERNAL=TRUE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDIO_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_TYPE_MACROS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_BINARY_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: MAKECOMMAND
-MAKECOMMAND-MODIFIED:INTERNAL=ON
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkChartsPython
-PYTHON_ENABLE_MODULE_vtkChartsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_DIR
-PYTHON_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Path to where Python.h is found (deprecated)
-PYTHON_INCLUDE_PATH:INTERNAL=@PREFIX_PATH@/Python.framework/Headers
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkChartsPython_BUILD_SHARED
-PYTHON_MODULE_vtkChartsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have symbol feenableexcept
-VTK_HAS_FEENABLEEXCEPT:INTERNAL=
-//Have symbol isinf
-VTK_HAS_ISINF:INTERNAL=
-//Have symbol isnan
-VTK_HAS_ISNAN:INTERNAL=
-//Have symbol _isnan
-VTK_HAS__ISNAN:INTERNAL=
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_REQUIRED_OBJCXX_FLAGS
-VTK_REQUIRED_OBJCXX_FLAGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_LOCALE
-VTK_TESTING_LOCALE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_TESTING_USE_LOCALE
-VTK_TESTING_USE_LOCALE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-VTK_UINTPTR_T:INTERNAL=8
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GCC_VISIBILITY
-VTK_USE_GCC_VISIBILITY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GNU_R
-VTK_USE_GNU_R-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MATLAB_MEX
-VTK_USE_MATLAB_MEX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_OGGTHEORA_ENCODER
-VTK_USE_OGGTHEORA_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_OPENFOAM
-VTK_USE_OPENFOAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//Build QVTK widget and plugin for Qt
-VTK_USE_QVTK:INTERNAL=OFF
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_GL2PS
-VTK_USE_SYSTEM_GL2PS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TDX
-VTK_USE_TDX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_TK
-VTK_USE_TK-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-//Have function _isinf
-float.h:INTERNAL=
-
diff --git a/exsrc/src/CMakeCache.txt.mac.in b/exsrc/src/CMakeCache.txt.mac.in
deleted file mode 100644
index 15a056b28..000000000
--- a/exsrc/src/CMakeCache.txt.mac.in
+++ /dev/null
@@ -1,1965 +0,0 @@
-# This is the CMakeCache file.
-# For build in directory: @BUILD_DIR@/VTK-build
-# It was generated by CMake: cmake
-# You can edit this file to change values found and used by cmake.
-# If you do not want to change any of the values, simply exit the editor.
-# If you do want to change a value, simply edit, save, and exit the editor.
-# The syntax for the file is as follows:
-# KEY:TYPE=VALUE
-# KEY is the name of a variable in the cache.
-# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!.
-# VALUE is the current value for the KEY.
-
-########################
-# EXTERNAL cache entries
-########################
-
-//Build the documentation (Doxygen).
-BUILD_DOCUMENTATION:BOOL=OFF
-
-//Build VTK examples.
-BUILD_EXAMPLES:BOOL=ON
-
-//Build Verdict with shared libraries.
-BUILD_SHARED_LIBS:BOOL=ON
-
-//Build the testing tree.
-BUILD_TESTING:BOOL=OFF
-
-//Path to a program.
-BZRCOMMAND:FILEPATH=BZRCOMMAND-NOTFOUND
-
-//Path to a program.
-CMAKE_AR:FILEPATH=/usr/bin/ar
-
-//For backwards compatibility, what version of CMake commands and
-// syntax should this version of CMake try to support.
-CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.4
-
-//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or
-// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel.
-CMAKE_BUILD_TYPE:STRING=
-
-//Enable/Disable color output during build.
-CMAKE_COLOR_MAKEFILE:BOOL=ON
-
-//CXX compiler.
-CMAKE_CXX_COMPILER:FILEPATH=/usr/bin/c++
-
-//Flags used by the compiler during all build types.
-CMAKE_CXX_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_CXX_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//C compiler.
-CMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc
-
-//Flags used by the compiler during all build types.
-CMAKE_C_FLAGS:STRING=
-
-//Flags used by the compiler during debug builds.
-CMAKE_C_FLAGS_DEBUG:STRING=-g
-
-//Flags used by the compiler during release minsize builds.
-CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os -DNDEBUG
-
-//Flags used by the compiler during release builds (/MD /Ob1 /Oi
-// /Ot /Oy /Gs will produce slightly less optimized but smaller
-// files).
-CMAKE_C_FLAGS_RELEASE:STRING=-O3 -DNDEBUG
-
-//Flags used by the compiler during Release with Debug Info builds.
-CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g
-
-//Flags used by the linker.
-CMAKE_EXE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Use HP pthreads.
-CMAKE_HP_PTHREADS:BOOL=
-
-//Path to a program.
-CMAKE_INSTALL_NAME_TOOL:FILEPATH=/usr/bin/install_name_tool
-
-//Install path prefix, prepended onto install directories.
-CMAKE_INSTALL_PREFIX:PATH=@PREFIX_PATH@
-
-//Path to a program.
-CMAKE_LINKER:FILEPATH=/usr/bin/ld
-
-//Path to a program.
-CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/make
-
-//Flags used by the linker during the creation of modules.
-CMAKE_MODULE_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_NM:FILEPATH=/usr/bin/nm
-
-//Path to a program.
-CMAKE_OBJCOPY:FILEPATH=CMAKE_OBJCOPY-NOTFOUND
-
-//Path to a program.
-CMAKE_OBJDUMP:FILEPATH=CMAKE_OBJDUMP-NOTFOUND
-
-//Build architectures for OSX
-CMAKE_OSX_ARCHITECTURES:STRING=
-
-//Minimum OS X version to target for deployment (at runtime); newer
-// APIs weak linked. Set to empty string for default value.
-CMAKE_OSX_DEPLOYMENT_TARGET:STRING=10.5
-
-//The product will be built against the headers and libraries located
-// inside the indicated SDK.
-CMAKE_OSX_SYSROOT:PATH=/Developer/SDKs/MacOSX10.5.sdk
-
-//Value Computed by CMake
-CMAKE_PROJECT_NAME:STATIC=VTK
-
-//Path to a program.
-CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib
-
-//Flags used by the linker during the creation of dll's.
-CMAKE_SHARED_LINKER_FLAGS:STRING=
-
-//Flags used by the linker during debug builds.
-CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING=
-
-//Flags used by the linker during release minsize builds.
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING=
-
-//Flags used by the linker during release builds.
-CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING=
-
-//Flags used by the linker during Release with Debug Info builds.
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING=
-
-//Path to a program.
-CMAKE_STRIP:FILEPATH=/usr/bin/strip
-
-//Thread library used.
-CMAKE_THREAD_LIBS:STRING=-lpthread
-
-//Use the pthreads library.
-CMAKE_USE_PTHREADS:BOOL=1
-
-//If true, cmake will use relative paths in makefiles and projects.
-CMAKE_USE_RELATIVE_PATHS:BOOL=OFF
-
-//Use sproc libs.
-CMAKE_USE_SPROC:BOOL=
-
-//Use the win32 thread library.
-CMAKE_USE_WIN32_THREADS:BOOL=
-
-//If this value is on, makefiles will be generated without the
-// .SILENT directive, and all commands will be echoed to the console
-// during the make.  This is useful for debugging only. With Visual
-// Studio IDE projects all commands are done without /nologo.
-CMAKE_VERBOSE_MAKEFILE:BOOL=FALSE
-
-//X11 extra flags.
-CMAKE_X_CFLAGS:STRING=
-
-//Libraries and options used in X11 programs.
-CMAKE_X_LIBS:STRING=/usr/X11R6/lib/libSM.dylib;/usr/X11R6/lib/libICE.dylib;/usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-
-//Path to the coverage program that CTest uses for performing coverage
-// inspection
-COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov
-
-//Path to a program.
-CVSCOMMAND:FILEPATH=/usr/bin/cvs
-
-//Options passed to the cvs update command.
-CVS_UPDATE_OPTIONS:STRING=-d -A -P
-
-//Maximum time allowed before CTest will kill the test.
-DART_TESTING_TIMEOUT:STRING=1500
-
-//Value Computed by CMake
-DICOMParser_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/DICOMParser
-
-//Value Computed by CMake
-DICOMParser_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/DICOMParser
-
-//Path to a program.
-HGCOMMAND:FILEPATH=HGCOMMAND-NOTFOUND
-
-//Command used to build entire project from the command line.
-MAKECOMMAND:STRING=/usr/bin/make -i
-
-//Path to the memory checking command, used for memory error detection.
-MEMORYCHECK_COMMAND:FILEPATH=/usr/local/bin/valgrind
-
-//File that contains suppressions for the memory checker
-MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH=
-
-//Value Computed by CMake
-MaterialLibrary_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary
-
-//Value Computed by CMake
-MaterialLibrary_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/MaterialLibrary
-
-//Include for OpenGL on OSX
-OPENGL_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//OpenGL lib for OSX
-OPENGL_gl_LIBRARY:FILEPATH=/usr/X11R6/lib/libGL.dylib
-
-//AGL lib for OSX
-OPENGL_glu_LIBRARY:FILEPATH=/usr/X11R6/lib/libGLU.dylib
-
-//Path to a file.
-OPENGL_xmesa_INCLUDE_DIR:PATH=/usr/X11R6/include
-
-//Does an external project define proj_list or should libproj4
-// define it?
-PROJ_LIST_EXTERNAL:BOOL=OFF
-
-//Should libproj4 include projection code that relies on GSL?
-PROJ_USE_GSL:BOOL=OFF
-
-//Should libproj4 be built as a thread-friendly library?
-PROJ_USE_PTHREADS:BOOL=OFF
-
-//Path to a library.
-PYTHON_DEBUG_LIBRARY:FILEPATH=/Library/Frameworks/python.framework
-
-//Add module vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython:BOOL=ON
-
-//Add module vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython:BOOL=ON
-
-//Add module vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython:BOOL=ON
-
-//Add module vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython:BOOL=ON
-
-//Add module vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython:BOOL=ON
-
-//Add module vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython:BOOL=ON
-
-//Add module vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython:BOOL=ON
-
-//Add module vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython:BOOL=ON
-
-//Add module vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython:BOOL=ON
-
-//Add module vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython:BOOL=ON
-
-//Add module vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython:BOOL=ON
-
-//Add module vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython:BOOL=ON
-
-//Add module vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython:BOOL=ON
-
-//Path to a program.
-PYTHON_EXECUTABLE:FILEPATH=@PREFIX_PATH@/bin/python@PYVER@
-
-//Extra libraries to link when linking to python (such as "z" for
-// zlib).  Separate multiple libraries with semicolons.
-PYTHON_EXTRA_LIBS:STRING=
-
-//Path to a file.
-PYTHON_INCLUDE_PATH:PATH=@PREFIX_PATH@/include/python@PYVER@
-
-//Path to a library.
-PYTHON_LIBRARY:FILEPATH=@PREFIX_PATH@/lib/libpython@PYVER@.dylib
-
-//Add module vtkCommonPython shared
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkFilteringPython shared
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGenericFilteringPython shared
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGeovisPython shared
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkGraphicsPython shared
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkHybridPython shared
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkIOPython shared
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkImagingPython shared
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkInfovisPython shared
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkRenderingPython shared
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkViewsPython shared
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkVolumeRenderingPython shared
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED:BOOL=ON
-
-//Add module vtkWidgetsPython shared
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED:BOOL=ON
-
-//Utility library needed for vtkpython
-PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.dylib
-
-//Path to scp command, used by CTest for submitting results to
-// a Dart server
-SCPCOMMAND:FILEPATH=/usr/bin/scp
-
-//Name of the computer/site where compile is being run
-SITE:STRING=omar
-
-//Path to the SLURM sbatch executable
-SLURM_SBATCH_COMMAND:FILEPATH=SLURM_SBATCH_COMMAND-NOTFOUND
-
-//Path to the SLURM srun executable
-SLURM_SRUN_COMMAND:FILEPATH=SLURM_SRUN_COMMAND-NOTFOUND
-
-//Path to a program.
-SVNCOMMAND:FILEPATH=svn
-
-//Path to a file.
-TCL_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//Path to a library.
-TCL_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib
-
-//Path to a program.
-TCL_TCLSH:FILEPATH=@EXTERNALS@/bin/tclsh@TCLTK_VERSION@
-
-//Path to a file.
-TK_INCLUDE_PATH:PATH=@EXTERNALS@/include
-
-//The path to the Tk internal headers (tkMacOSXDefault.h).
-TK_INTERNAL_PATH:PATH=@BUILD_DIR@/VTK/Utilities/TclTk/internals/tk8.4
-
-//Path to a library.
-TK_LIBRARY:FILEPATH=@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib
-
-//Build the 2007 Verdict User Manual
-VERDICT_BUILD_DOC:BOOL=OFF
-
-//Should tests of the VERDICT library be built?
-VERDICT_ENABLE_TESTING:BOOL=OFF
-
-//Mangle verdict names for inclusion in a larger library?
-VERDICT_MANGLE:BOOL=ON
-
-//VTK requires the verdict prefix to be vtk
-VERDICT_MANGLE_PREFIX:STRING=vtk
-
-//VTK requires doubles
-VERDICT_USE_FLOAT:BOOL=OFF
-
-//Path to a library.
-VLI_LIBRARY_FOR_VP1000:FILEPATH=VLI_LIBRARY_FOR_VP1000-NOTFOUND
-
-//Value Computed by CMake
-VTKEXPAT_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKEXPAT_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexpat
-
-//Value Computed by CMake
-VTKFREETYPE_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFREETYPE_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkfreetype
-
-//Value Computed by CMake
-VTKFTGL_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/ftgl
-
-//Value Computed by CMake
-VTKFTGL_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/ftgl
-
-//Value Computed by CMake
-VTKJPEG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKJPEG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkjpeg
-
-//Value Computed by CMake
-VTKNETCDF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKNETCDF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtknetcdf
-
-//Value Computed by CMake
-VTKPNG_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKPNG_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkpng
-
-//Value Computed by CMake
-VTKTIFF_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKTIFF_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtktiff
-
-//Value Computed by CMake
-VTKZLIB_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTKZLIB_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkzlib
-
-//Value Computed by CMake
-VTK_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build
-
-//Path to a file.
-VTK_DATA_ROOT:PATH=@BUILD_DIR@/VTKData
-
-//Build leak checking support into VTK.
-VTK_DEBUG_LEAKS:BOOL=OFF
-
-//Location of the OpenGL extensions header file (glext.h).
-VTK_GLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glext.h
-
-//Location of the GLX extensions header file (glxext.h).
-VTK_GLXEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/glxext.h
-
-//Remove all legacy code completely.
-VTK_LEGACY_REMOVE:BOOL=OFF
-
-//Silence all legacy code messages.
-VTK_LEGACY_SILENT:BOOL=OFF
-
-//; separated directories to search for materials/shaders
-VTK_MATERIALS_DIRS:STRING=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-
-//Disable multithreading support in the Python bindings
-VTK_NO_PYTHON_THREADS:BOOL=OFF
-
-//The opengl library being used supports off screen Mesa calls.
-VTK_OPENGL_HAS_OSMESA:BOOL=OFF
-
-//Arguments passed to "python setup.py install ..." during installation.
-VTK_PYTHON_SETUP_ARGS:STRING=--prefix="${CMAKE_INSTALL_PREFIX}"
-
-//Value Computed by CMake
-VTK_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK
-
-//VTK tests call vtkFloatingPointExceptions::Enable()
-VTK_TESTING_USE_FPE:BOOL=ON
-
-//Build VTK with 64 bit ids
-VTK_USE_64BIT_IDS:BOOL=OFF
-
-//Use Boost libraries for graph algorithms - www.boost.org.
-VTK_USE_BOOST:BOOL=OFF
-
-//Build classes using Carbon API.
-VTK_USE_CARBON:BOOL=OFF
-
-//Build pixel and vertex shader support for Cg.
-VTK_USE_CG_SHADERS:BOOL=OFF
-
-//Build classes using Cocoa API.
-VTK_USE_COCOA:BOOL=OFF
-
-//Turn this option off and tests and warning/error macros will
-// not popup windows
-VTK_USE_DISPLAY:BOOL=ON
-
-//If the FFMPEG library is available, should VTK use it for saving
-// .avi animation files?
-VTK_USE_FFMPEG_ENCODER:BOOL=OFF
-
-//Build the vtkGeovis kit.  Needed for performing geographic visualization.
-VTK_USE_GEOVIS:BOOL=ON
-
-//Build VTK with gl2ps support.
-VTK_USE_GL2PS:BOOL=ON
-
-//Build pixel and vertex shader support for GLSL.
-VTK_USE_GLSL_SHADERS:BOOL=ON
-
-//Build VTK with GUI Support
-VTK_USE_GUISUPPORT:BOOL=OFF
-
-//Build the vtkInfovis kit.  Needed for performing information
-// visualization.
-VTK_USE_INFOVIS:BOOL=ON
-
-//Use mangled Mesa with OpenGL.
-VTK_USE_MANGLED_MESA:BOOL=OFF
-
-//Build metaio
-VTK_USE_METAIO:BOOL=ON
-
-//Enable use of the patented mpeg2 library. You are solely responsible
-// for any legal issues associated with using patented code in
-// your software.
-VTK_USE_MPEG2_ENCODER:BOOL=OFF
-
-//Build the MySQL driver for vtkSQLDatabase.
-VTK_USE_MYSQL:BOOL=OFF
-
-//Add support for arbitrary-dimension sparse and dense arrays.
-VTK_USE_N_WAY_ARRAYS:BOOL=OFF
-
-//Build the ODBC database interface
-VTK_USE_ODBC:BOOL=OFF
-
-//Build the vtkParallel kit.
-VTK_USE_PARALLEL:BOOL=OFF
-
-//Build the PostgreSQL driver for vtkSQLDatabase.
-VTK_USE_POSTGRES:BOOL=OFF
-
-//Build the vtkRendering kit.  Needed for displaying data or using
-// widgets.
-VTK_USE_RENDERING:BOOL=ON
-
-//Build shared libraries with rpath.  This makes it easy to run
-// executables from the build tree when using shared libraries,
-// but removes install support.
-VTK_USE_RPATH:BOOL=ON
-
-//Use the system's expat library.
-VTK_USE_SYSTEM_EXPAT:BOOL=OFF
-
-//Use the system's freetype library.
-VTK_USE_SYSTEM_FREETYPE:BOOL=OFF
-
-//Use the system's jpeg library.
-VTK_USE_SYSTEM_JPEG:BOOL=OFF
-
-//Use the system's proj4 library.
-VTK_USE_SYSTEM_LIBPROJ4:BOOL=OFF
-
-//Use the system's libxml2 library.
-VTK_USE_SYSTEM_LIBXML2:BOOL=OFF
-
-//Use the system's png library.
-VTK_USE_SYSTEM_PNG:BOOL=OFF
-
-//Use the system's tiff library.
-VTK_USE_SYSTEM_TIFF:BOOL=OFF
-
-//Use the system's zlib library.
-VTK_USE_SYSTEM_ZLIB:BOOL=OFF
-
-//Build VTK with Tk support
-VTK_USE_TK:BOOL=ON
-
-//Build the vtkViews kit.  Needed for creating packaged and linked
-// views.
-VTK_USE_VIEWS:BOOL=ON
-
-//Enable support for VolumePro 1000.
-VTK_USE_VOLUMEPRO_1000:BOOL=OFF
-
-//Build classes for the X11 window system.
-VTK_USE_X:BOOL=ON
-
-//Location of the WGL extensions header file (wglext.h).
-VTK_WGLEXT_FILE:FILEPATH=@BUILD_DIR@/VTK/Utilities/ParseOGLExt/headers/wglext.h
-
-//Path to a file.
-VTK_WRAP_HINTS:FILEPATH=@BUILD_DIR@/VTK/Wrapping/hints
-
-//Wrap VTK classes into the Java language.
-VTK_WRAP_JAVA:BOOL=OFF
-
-//Wrap VTK classes into the Python language.
-VTK_WRAP_PYTHON:BOOL=ON
-
-//Wrap VTK classes into the TCL language.
-VTK_WRAP_TCL:BOOL=OFF
-
-//Path to a file.
-X11_ICE_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_ICE_LIB:FILEPATH=/usr/X11R6/lib/libICE.dylib
-
-//Path to a library.
-X11_SM_LIB:FILEPATH=/usr/X11R6/lib/libSM.dylib
-
-//Path to a file.
-X11_X11_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.dylib
-
-//Path to a file.
-X11_XShm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_XTest_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_XTest_LIB:FILEPATH=/usr/X11R6/lib/libXtst.dylib
-
-//Path to a file.
-X11_Xaccessrules_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xaccessstr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xau_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xau_LIB:FILEPATH=/usr/X11R6/lib/libXau.dylib
-
-//Path to a file.
-X11_Xcomposite_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcomposite_LIB:FILEPATH=/usr/X11R6/lib/libXcomposite.dylib
-
-//Path to a file.
-X11_Xcursor_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xcursor_LIB:FILEPATH=/usr/X11R6/lib/libXcursor.dylib
-
-//Path to a file.
-X11_Xdamage_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdamage_LIB:FILEPATH=/usr/X11R6/lib/libXdamage.dylib
-
-//Path to a file.
-X11_Xdmcp_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xdmcp_LIB:FILEPATH=/usr/X11R6/lib/libXdmcp.dylib
-
-//Path to a library.
-X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.dylib
-
-//Path to a file.
-X11_Xfixes_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xfixes_LIB:FILEPATH=/usr/X11R6/lib/libXfixes.dylib
-
-//Path to a file.
-X11_Xft_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xft_LIB:FILEPATH=/usr/X11R6/lib/libXft.dylib
-
-//Path to a file.
-X11_Xinerama_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinerama_LIB:FILEPATH=/usr/X11R6/lib/libXinerama.dylib
-
-//Path to a file.
-X11_Xinput_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xinput_LIB:FILEPATH=/usr/X11R6/lib/libXi.dylib
-
-//Path to a file.
-X11_Xkb_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xkblib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xlib_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xpm_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xpm_LIB:FILEPATH=/usr/X11R6/lib/libXpm.dylib
-
-//Path to a file.
-X11_Xrandr_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrandr_LIB:FILEPATH=/usr/X11R6/lib/libXrandr.dylib
-
-//Path to a file.
-X11_Xrender_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xrender_LIB:FILEPATH=/usr/X11R6/lib/libXrender.dylib
-
-//Path to a file.
-X11_Xscreensaver_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xscreensaver_LIB:FILEPATH=/usr/X11R6/lib/libXss.dylib
-
-//Path to a file.
-X11_Xshape_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xt_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xt_LIB:FILEPATH=/usr/X11R6/lib/libXt.dylib
-
-//Path to a file.
-X11_Xutil_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_Xv_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_Xv_LIB:FILEPATH=/usr/X11R6/lib/libXv.dylib
-
-//Path to a library.
-X11_Xxf86misc_LIB:FILEPATH=/usr/X11R6/lib/libXxf86misc.dylib
-
-//Path to a file.
-X11_dpms_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a library.
-X11_fontconfig_LIB:FILEPATH=/usr/X11R6/lib/libfontconfig.dylib
-
-//Path to a file.
-X11_xf86misc_INCLUDE_PATH:PATH=/usr/include
-
-//Path to a file.
-X11_xf86vmode_INCLUDE_PATH:PATH=/usr/include
-
-//Value Computed by CMake
-alglib_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkalglib
-
-//Value Computed by CMake
-alglib_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkalglib
-
-//Value Computed by CMake
-libproj4_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibproj4
-
-//Value Computed by CMake
-libproj4_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibproj4
-
-//Value Computed by CMake
-verdict_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/verdict
-
-//Value Computed by CMake
-verdict_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/verdict
-
-//Dependencies for the target
-vtkCommonPythonD_LIB_DEPENDS:STATIC=general;vtkCommon;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommonPython_LIB_DEPENDS:STATIC=general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkCommon_LIB_DEPENDS:STATIC=general;vtksys;general;-lpthread;general;-lm;
-
-//Dependencies for target
-vtkDICOMParser_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtkExodus2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkexodus2
-
-//Value Computed by CMake
-vtkExodus2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkexodus2
-
-//Dependencies for the target
-vtkFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkCommonPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFilteringPython_LIB_DEPENDS:STATIC=general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkFiltering_LIB_DEPENDS:STATIC=general;vtkCommon;
-
-//Dependencies for the target
-vtkGenericFilteringPythonD_LIB_DEPENDS:STATIC=general;vtkGenericFiltering;general;vtkFilteringPythonD;general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFilteringPython_LIB_DEPENDS:STATIC=general;vtkGenericFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGenericFiltering_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkGraphics;
-
-//Dependencies for the target
-vtkGeovisPythonD_LIB_DEPENDS:STATIC=general;vtkGeovis;general;vtkWidgetsPythonD;general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovisPython_LIB_DEPENDS:STATIC=general;vtkGeovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGeovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkViews;general;vtkproj4;
-
-//Dependencies for the target
-vtkGraphicsPythonD_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphicsPython_LIB_DEPENDS:STATIC=general;vtkGraphicsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkGraphics_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkverdict;
-
-//Dependencies for the target
-vtkHybridPythonD_LIB_DEPENDS:STATIC=general;vtkHybrid;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybridPython_LIB_DEPENDS:STATIC=general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkHybrid_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;general;vtkexoIIc;
-
-//Dependencies for the target
-vtkIOPythonD_LIB_DEPENDS:STATIC=general;vtkIO;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIOPython_LIB_DEPENDS:STATIC=general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkIO_LIB_DEPENDS:STATIC=general;vtkFiltering;general;vtkDICOMParser;general;vtkNetCDF;general;vtkmetaio;general;vtksqlite;general;vtkpng;general;vtkzlib;general;vtkjpeg;general;vtktiff;general;vtkexpat;general;vtksys;
-
-//Dependencies for the target
-vtkImagingPythonD_LIB_DEPENDS:STATIC=general;vtkImaging;general;vtkFilteringPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImagingPython_LIB_DEPENDS:STATIC=general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkImaging_LIB_DEPENDS:STATIC=general;vtkFiltering;
-
-//Dependencies for the target
-vtkInfovisPythonD_LIB_DEPENDS:STATIC=general;vtkInfovis;general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovisPython_LIB_DEPENDS:STATIC=general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkInfovis_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtklibxml2;general;vtkalglib;
-
-//Dependencies for target
-vtkNetCDF_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkGraphicsPythonD;general;vtkImagingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib;general;@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib;general;m;
-
-//Dependencies for the target
-vtkRenderingPython_LIB_DEPENDS:STATIC=general;vtkRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkRendering_LIB_DEPENDS:STATIC=general;vtkGraphics;general;vtkImaging;general;vtkIO;general;vtkftgl;general;vtkfreetype;general;vtkzlib;general;vtkpng;general;/usr/X11R6/lib/libXt.dylib;general;/usr/X11R6/lib/libSM.dylib;general;/usr/X11R6/lib/libICE.dylib;general;/usr/X11R6/lib/libX11.dylib;general;/usr/X11R6/lib/libXext.dylib;general;/usr/X11R6/lib/libXss.dylib;general;/usr/X11R6/lib/libXft.dylib;general;/usr/X11R6/lib/libfontconfig.dylib;
-
-//Dependencies for the target
-vtkViewsPythonD_LIB_DEPENDS:STATIC=general;vtkViews;general;vtkInfovisPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViewsPython_LIB_DEPENDS:STATIC=general;vtkViewsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkViews_LIB_DEPENDS:STATIC=general;vtkInfovis;
-
-//Dependencies for the target
-vtkVolumeRenderingPythonD_LIB_DEPENDS:STATIC=general;vtkVolumeRendering;general;vtkRenderingPythonD;general;vtkIOPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=general;vtkVolumeRenderingPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkVolumeRendering_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkIO;
-
-//Dependencies for the target
-vtkWidgetsPythonD_LIB_DEPENDS:STATIC=general;vtkWidgets;general;vtkRenderingPythonD;general;vtkHybridPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgetsPython_LIB_DEPENDS:STATIC=general;vtkWidgetsPythonD;optimized;@PREFIX_PATH@/lib/libpython@PYVER@.dylib;debug;/Library/Frameworks/python.framework;
-
-//Dependencies for the target
-vtkWidgets_LIB_DEPENDS:STATIC=general;vtkRendering;general;vtkHybrid;
-
-//Dependencies for target
-vtkalglib_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkexoIIc_LIB_DEPENDS:STATIC=general;vtkNetCDF;
-
-//Dependencies for target
-vtkexpat_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkfreetype_LIB_DEPENDS:STATIC=
-
-//Dependencies for the target
-vtkftgl_LIB_DEPENDS:STATIC=general;/usr/X11R6/lib/libGL.dylib;general;vtkfreetype;
-
-//Dependencies for target
-vtkjpeg_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtklibxml2_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtklibxml2
-
-//Dependencies for the target
-vtklibxml2_LIB_DEPENDS:STATIC=general;vtkzlib;general;dl;general;-lpthread;general;m;
-
-//Value Computed by CMake
-vtklibxml2_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtklibxml2
-
-//Value Computed by CMake
-vtkmetaio_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkmetaio_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtksys;
-
-//Value Computed by CMake
-vtkmetaio_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/vtkmetaio
-
-//Dependencies for the target
-vtkpng_LIB_DEPENDS:STATIC=general;vtkzlib;
-
-//Dependencies for the target
-vtkproj4_LIB_DEPENDS:STATIC=general;m;
-
-//Dependencies for target
-vtksqlite_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_BINARY_DIR:STATIC=@BUILD_DIR@/VTK-build/Utilities/kwsys
-
-//Dependencies for target
-vtksys_LIB_DEPENDS:STATIC=
-
-//Value Computed by CMake
-vtksys_SOURCE_DIR:STATIC=@BUILD_DIR@/VTK/Utilities/kwsys
-
-//Dependencies for the target
-vtktiff_LIB_DEPENDS:STATIC=general;vtkzlib;general;vtkjpeg;
-
-//Dependencies for target
-vtkverdict_LIB_DEPENDS:STATIC=
-
-//Dependencies for target
-vtkzlib_LIB_DEPENDS:STATIC=
-
-
-########################
-# INTERNAL cache entries
-########################
-
-ALGLIB_SHARED_LIB:INTERNAL=ON
-//ADVANCED property for variable: BUILD_DOCUMENTATION
-BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: BUILD_EXAMPLES
-BUILD_EXAMPLES-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_SHARED_LIBS
-BUILD_SHARED_LIBS-MODIFIED:INTERNAL=ON
-//MODIFIED property for variable: BUILD_TESTING
-BUILD_TESTING-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: BZRCOMMAND
-BZRCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE
-//Have include iostream
-CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_AR
-CMAKE_AR-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_BUILD_TOOL
-CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1
-//What is the target build tool cmake is generating for.
-CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/make
-//This is the directory where this CMakeCache.txt was created
-CMAKE_CACHEFILE_DIR:INTERNAL=@BUILD_DIR@/VTK-build
-//Major version of cmake used to create the current loaded cache
-CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2
-//Minor version of cmake used to create the current loaded cache
-CMAKE_CACHE_MINOR_VERSION:INTERNAL=8
-//Patch version of cmake used to create the current loaded cache
-CMAKE_CACHE_PATCH_VERSION:INTERNAL=0
-//ADVANCED property for variable: CMAKE_COLOR_MAKEFILE
-CMAKE_COLOR_MAKEFILE-ADVANCED:INTERNAL=1
-//Path to CMake executable.
-CMAKE_COMMAND:INTERNAL=cmake
-//Path to cpack program executable.
-CMAKE_CPACK_COMMAND:INTERNAL=cpack
-//Path to ctest program executable.
-CMAKE_CTEST_COMMAND:INTERNAL=ctest
-//ADVANCED property for variable: CMAKE_CXX_COMPILER
-CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_CXX_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS
-CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_DEBUG
-CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_MINSIZEREL
-CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELEASE
-CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO
-CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_COMPILER
-CMAKE_C_COMPILER-ADVANCED:INTERNAL=1
-CMAKE_C_COMPILER_WORKS:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS
-CMAKE_C_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_DEBUG
-CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_MINSIZEREL
-CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELEASE
-CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_C_FLAGS_RELWITHDEBINFO
-CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_CXX_ABI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-CMAKE_DETERMINE_C_ABI_COMPILED:INTERNAL=TRUE
-//Path to cache edit program executable.
-CMAKE_EDIT_COMMAND:INTERNAL=ccmake
-//Executable file format
-CMAKE_EXECUTABLE_FORMAT:INTERNAL=Unknown
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS
-CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG
-CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL
-CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE
-CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Name of generator.
-CMAKE_GENERATOR:INTERNAL=Unix Makefiles
-//Result of TRY_COMPILE
-CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=TRUE
-//Is X11 around.
-CMAKE_HAS_X:INTERNAL=1
-//Have function connect
-CMAKE_HAVE_CONNECT:INTERNAL=1
-//Have function gethostbyname
-CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1
-//Have include CMAKE_HAVE_LIMITS_H
-CMAKE_HAVE_LIMITS_H:INTERNAL=1
-//Have library pthreads
-CMAKE_HAVE_PTHREADS_CREATE:INTERNAL=
-//Have library pthread
-CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1
-//Have include CMAKE_HAVE_PTHREAD_H
-CMAKE_HAVE_PTHREAD_H:INTERNAL=1
-//Have function remove
-CMAKE_HAVE_REMOVE:INTERNAL=1
-//Have function shmat
-CMAKE_HAVE_SHMAT:INTERNAL=1
-//Have includes CMAKE_HAVE_SYS_PRCTL_H
-CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=
-//Have include CMAKE_HAVE_UNISTD_H
-CMAKE_HAVE_UNISTD_H:INTERNAL=1
-//Start directory with the top level CMakeLists.txt file for this
-// project
-CMAKE_HOME_DIRECTORY:INTERNAL=@BUILD_DIR@/VTK
-//ADVANCED property for variable: CMAKE_HP_PTHREADS
-CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_INSTALL_NAME_TOOL
-CMAKE_INSTALL_NAME_TOOL-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: CMAKE_INSTALL_PREFIX
-CMAKE_INSTALL_PREFIX-MODIFIED:INTERNAL=ON
-//Have library ICE
-CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1
-//ADVANCED property for variable: CMAKE_LINKER
-CMAKE_LINKER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MAKE_PROGRAM
-CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS
-CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG
-CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL
-CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE
-CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_NM
-CMAKE_NM-ADVANCED:INTERNAL=1
-//Does the compiler support ansi for scope.
-CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0
-//ADVANCED property for variable: CMAKE_NO_ANSI_STREAM_HEADERS
-CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1
-//Does the compiler support headers like iostream.
-CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0
-//Does the compiler support sstream
-CMAKE_NO_ANSI_STRING_STREAM:INTERNAL=0
-//Does the compiler support std::.
-CMAKE_NO_STD_NAMESPACE:INTERNAL=0
-//number of local generators
-CMAKE_NUMBER_OF_LOCAL_GENERATORS:INTERNAL=44
-//ADVANCED property for variable: CMAKE_OBJCOPY
-CMAKE_OBJCOPY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_OBJDUMP
-CMAKE_OBJDUMP-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_RANLIB
-CMAKE_RANLIB-ADVANCED:INTERNAL=1
-//Test Support for 64 bit file systems
-CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=1
-//Path to CMake installation.
-CMAKE_ROOT:INTERNAL=@EXTERNALS@/share/cmake-2.8
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS
-CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG
-CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL
-CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE
-CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO
-CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-CMAKE_SIZEOF_VOID_P:INTERNAL=4
-//ADVANCED property for variable: CMAKE_SKIP_RPATH
-CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1
-//Whether to build with rpath.
-CMAKE_SKIP_RPATH:INTERNAL=0
-//Result of TRY_COMPILE
-CMAKE_STD_NAMESPACE:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_STRIP
-CMAKE_STRIP-ADVANCED:INTERNAL=1
-//Suppress Warnings that are meant for the author of the CMakeLists.txt
-// files.
-CMAKE_SUPPRESS_DEVELOPER_WARNINGS:INTERNAL=TRUE
-//ADVANCED property for variable: CMAKE_THREAD_LIBS
-CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1
-//uname command
-CMAKE_UNAME:INTERNAL=/usr/bin/uname
-//ADVANCED property for variable: CMAKE_USE_PTHREADS
-CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_RELATIVE_PATHS
-CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_SPROC
-CMAKE_USE_SPROC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_USE_WIN32_THREADS
-CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_VERBOSE_MAKEFILE
-CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1
-//Result of TEST_BIG_ENDIAN
-CMAKE_WORDS_BIGENDIAN:INTERNAL=0
-//ADVANCED property for variable: CMAKE_X_CFLAGS
-CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CMAKE_X_LIBS
-CMAKE_X_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: COVERAGE_COMMAND
-COVERAGE_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVSCOMMAND
-CVSCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: CVS_UPDATE_OPTIONS
-CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1
-//CXX compiler accepts flag -no-cpp-precomp
-CXX_HAS_CPP_PRECOMP_FLAG:INTERNAL=TRUE
-//ADVANCED property for variable: DART_TESTING_TIMEOUT
-DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1
-//Single output directory for building all executables.
-EXECUTABLE_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//Have include malloc.h
-EX_HAVE_MALLOC_H:INTERNAL=
-//Details about finding PythonInterp
-FIND_PACKAGE_MESSAGE_DETAILS_PythonInterp:INTERNAL=[@PREFIX_PATH@/bin/python@PYVER@]
-//Details about finding PythonLibs
-FIND_PACKAGE_MESSAGE_DETAILS_PythonLibs:INTERNAL=[@PREFIX_PATH@/lib/libpython@PYVER@.dylib][@PREFIX_PATH@/include/python@PYVER@]
-//Details about finding TCL
-FIND_PACKAGE_MESSAGE_DETAILS_TCL:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TCLTK
-FIND_PACKAGE_MESSAGE_DETAILS_TCLTK:INTERNAL=[@EXTERNALS@/lib/libtcl@TCLTK_VERSION@.dylib][@EXTERNALS@/include][@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding TK
-FIND_PACKAGE_MESSAGE_DETAILS_TK:INTERNAL=[@EXTERNALS@/lib/libtk@TCLTK_VERSION@.dylib][@EXTERNALS@/include]
-//Details about finding Tclsh
-FIND_PACKAGE_MESSAGE_DETAILS_Tclsh:INTERNAL=[@EXTERNALS@/bin/tclsh@TCLTK_VERSION@]
-//Details about finding Threads
-FIND_PACKAGE_MESSAGE_DETAILS_Threads:INTERNAL=[TRUE]
-//Details about finding X11
-FIND_PACKAGE_MESSAGE_DETAILS_X11:INTERNAL=[/usr/X11R6/lib/libX11.dylib][/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include;/usr/include]
-//Have symbol alloca
-HAVE_ALLOCA:INTERNAL=1
-//Have include HAVE_ALLOCA_H
-HAVE_ALLOCA_H:INTERNAL=1
-//Have includes HAVE_ANSIDECL_H
-HAVE_ANSIDECL_H:INTERNAL=
-//Have include HAVE_ARPA_INET_H
-HAVE_ARPA_INET_H:INTERNAL=1
-//Have include HAVE_ARPA_NAMESER_H
-HAVE_ARPA_NAMESER_H:INTERNAL=1
-//Have include HAVE_ASSERT_H
-HAVE_ASSERT_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_CMAKE_REQUIRE_LARGE_FILE_SUPPORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_UNSIGNED_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE
-//Have include HAVE_CTYPE_H
-HAVE_CTYPE_H:INTERNAL=1
-//Have include HAVE_DIRENT_H
-HAVE_DIRENT_H:INTERNAL=1
-//Have include HAVE_DLFCN_H
-HAVE_DLFCN_H:INTERNAL=1
-//Have library dl;-lpthread;m
-HAVE_DLOPEN:INTERNAL=1
-//Have includes HAVE_DL_H
-HAVE_DL_H:INTERNAL=
-//Have include HAVE_ERRNO_H
-HAVE_ERRNO_H:INTERNAL=1
-//Have include HAVE_FCNTL_H
-HAVE_FCNTL_H:INTERNAL=1
-//Have symbol finite
-HAVE_FINITE:INTERNAL=1
-//Have include HAVE_FLOAT_H
-HAVE_FLOAT_H:INTERNAL=1
-//Have function floor
-HAVE_FLOOR:INTERNAL=1
-//Have symbol fpclass
-HAVE_FPCLASS:INTERNAL=
-//Have symbol fprintf
-HAVE_FPRINTF:INTERNAL=1
-//Have symbol fp_class
-HAVE_FP_CLASS:INTERNAL=
-//Have includes HAVE_FP_CLASS_H
-HAVE_FP_CLASS_H:INTERNAL=
-//Have symbol ftime
-HAVE_FTIME:INTERNAL=1
-//NetCDF test 
-HAVE_FTRUNCATE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_GETADDRINFO_COMPILED:INTERNAL=TRUE
-//Have function getopt
-HAVE_GETOPT:INTERNAL=1
-//Have symbol gettimeofday
-HAVE_GETTIMEOFDAY:INTERNAL=1
-//Have includes HAVE_IEEEFP_H
-HAVE_IEEEFP_H:INTERNAL=
-//Have include HAVE_INTTYPES_H
-HAVE_INTTYPES_H:INTERNAL=1
-//Have function isascii
-HAVE_ISASCII:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_CHAR:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_KWSYS_SIZEOF___INT64:INTERNAL=FALSE
-//Have library dl;
-HAVE_LIBDL:INTERNAL=1
-//Have include HAVE_LIMITS_H
-HAVE_LIMITS_H:INTERNAL=1
-//Have symbol localtime
-HAVE_LOCALTIME:INTERNAL=1
-//Have includes HAVE_MALLOC_H
-HAVE_MALLOC_H:INTERNAL=
-//Have include HAVE_MATH_H
-HAVE_MATH_H:INTERNAL=1
-//Have function memmove
-HAVE_MEMMOVE:INTERNAL=1
-//Have include HAVE_MEMORY_H
-HAVE_MEMORY_H:INTERNAL=1
-//Have function memset
-HAVE_MEMSET:INTERNAL=1
-//Have function mmap
-HAVE_MMAP:INTERNAL=1
-//Have includes HAVE_NAN_H
-HAVE_NAN_H:INTERNAL=
-//Have includes HAVE_NDIR_H
-HAVE_NDIR_H:INTERNAL=
-//Have include HAVE_NETDB_H
-HAVE_NETDB_H:INTERNAL=1
-//Have include HAVE_NETINET_IN_H
-HAVE_NETINET_IN_H:INTERNAL=1
-//Have function pow
-HAVE_POW:INTERNAL=1
-//Have symbol printf
-HAVE_PRINTF:INTERNAL=1
-//Have include HAVE_PTHREAD_H
-HAVE_PTHREAD_H:INTERNAL=1
-//Have include HAVE_RESOLV_H
-HAVE_RESOLV_H:INTERNAL=1
-//Have library dld;dl
-HAVE_SHLLOAD:INTERNAL=
-//Have symbol signal
-HAVE_SIGNAL:INTERNAL=1
-//Have include HAVE_SIGNAL_H
-HAVE_SIGNAL_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_FLOAT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_INT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_OFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SHORT:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_SIZEOF_UCHAR:INTERNAL=FALSE
-//Have symbol snprintf
-HAVE_SNPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SOCKLEN_T_COMPILED:INTERNAL=TRUE
-//Have symbol sprintf
-HAVE_SPRINTF:INTERNAL=1
-//Have function sqrt
-HAVE_SQRT:INTERNAL=1
-//Have symbol sscanf
-HAVE_SSCANF:INTERNAL=1
-//Have symbol stat
-HAVE_STAT:INTERNAL=1
-//Have include HAVE_STDARG_H
-HAVE_STDARG_H:INTERNAL=1
-//Have include stddef.h
-HAVE_STDDEF_H:INTERNAL=1
-//Have include stdint.h
-HAVE_STDINT_H:INTERNAL=1
-//Have include HAVE_STDIO_H
-HAVE_STDIO_H:INTERNAL=1
-//Have include HAVE_STDLIB_H
-HAVE_STDLIB_H:INTERNAL=1
-//Have function strcasecmp
-HAVE_STRCASECMP:INTERNAL=1
-//Have function strchr
-HAVE_STRCHR:INTERNAL=1
-//Have symbol strdup
-HAVE_STRDUP:INTERNAL=1
-//Have symbol strerror
-HAVE_STRERROR:INTERNAL=1
-//Have symbol strftime
-HAVE_STRFTIME:INTERNAL=1
-//Have include HAVE_STRINGS_H
-HAVE_STRINGS_H:INTERNAL=1
-//Have include HAVE_STRING_H
-HAVE_STRING_H:INTERNAL=1
-//Have symbol strndup
-HAVE_STRNDUP:INTERNAL=
-//Have function strrchr
-HAVE_STRRCHR:INTERNAL=1
-//Have function strstr
-HAVE_STRSTR:INTERNAL=1
-//Have function strtol
-HAVE_STRTOL:INTERNAL=1
-//Have function areroul
-HAVE_STRTOUL:INTERNAL=
-//NetCDF test 
-HAVE_ST_BLKSIZE:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_DIR_H_COMPILED:INTERNAL=TRUE
-//Have include HAVE_SYS_MMAN_H
-HAVE_SYS_MMAN_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_SYS_NDIR_H_COMPILED:INTERNAL=FALSE
-//Have include HAVE_SYS_SELECT_H
-HAVE_SYS_SELECT_H:INTERNAL=1
-//Have include HAVE_SYS_SOCKET_H
-HAVE_SYS_SOCKET_H:INTERNAL=1
-//Have include HAVE_SYS_STAT_H
-HAVE_SYS_STAT_H:INTERNAL=1
-//Have include HAVE_SYS_TIMEB_H
-HAVE_SYS_TIMEB_H:INTERNAL=1
-//Have include HAVE_SYS_TIME_H
-HAVE_SYS_TIME_H:INTERNAL=1
-//Have include sys/types.h
-HAVE_SYS_TYPES_H:INTERNAL=1
-//Have include HAVE_TIME_H
-HAVE_TIME_H:INTERNAL=1
-//Have include HAVE_UNISTD_H
-HAVE_UNISTD_H:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VA_COPY_COMPILED:INTERNAL=TRUE
-//Have symbol vfprintf
-HAVE_VFPRINTF:INTERNAL=1
-//Have symbol vsnprintf
-HAVE_VSNPRINTF:INTERNAL=1
-//Have symbol vsprintf
-HAVE_VSPRINTF:INTERNAL=1
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE
-//Result of TRY_COMPILE
-HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE
-//Have includes HAVE_WINDOWS_H
-HAVE_WINDOWS_H:INTERNAL=
-//Have symbol _stat
-HAVE__STAT:INTERNAL=
-//Result of TRY_COMPILE
-HAVE___VA_COPY_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: HGCOMMAND
-HGCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_RUN
-KWSYS_CHAR_IS_SIGNED:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_PTRDIFF_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_C_HAS_SSIZE_T_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-KWSYS_LFS_WORKS:INTERNAL=0
-//Result of TRY_COMPILE
-KWSYS_LFS_WORKS_COMPILED:INTERNAL=TRUE
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_CHAR:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-KWSYS_SIZEOF___INT64:INTERNAL=
-//Result of TRY_COMPILE
-KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE
-//Result of TRY_COMPILE
-KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE
-//Single output directory for building all libraries.
-LIBRARY_OUTPUT_PATH:INTERNAL=@BUILD_DIR@/VTK-build/bin
-//ADVANCED property for variable: MAKECOMMAND
-MAKECOMMAND-ADVANCED:INTERNAL=1
-//Path to vtkMaterialLibraryMacro.h
-MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h
-//Path to vtkShaderCodeLibraryMacro.h
-MATERIAL_LIBRARY_SHADER_MACRO_HEADER:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h
-//ADVANCED property for variable: MEMORYCHECK_COMMAND
-MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: MEMORYCHECK_SUPPRESSIONS_FILE
-MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_INCLUDE_DIR
-OPENGL_INCLUDE_DIR-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_gl_LIBRARY
-OPENGL_gl_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: OPENGL_glu_LIBRARY
-OPENGL_glu_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: OPENGL_xmesa_INCLUDE_DIR
-OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1
-//Have symbol atanh
-PROJ_HAVE_ATANH:INTERNAL=1
-//Have symbol csin
-PROJ_HAVE_COMPLEX:INTERNAL=1
-//ADVANCED property for variable: PROJ_LIST_EXTERNAL
-PROJ_LIST_EXTERNAL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_GSL
-PROJ_USE_GSL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PROJ_USE_PTHREADS
-PROJ_USE_PTHREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_DEBUG_LIBRARY
-PYTHON_DEBUG_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkCommonPython
-PYTHON_ENABLE_MODULE_vtkCommonPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkFilteringPython
-PYTHON_ENABLE_MODULE_vtkFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGenericFilteringPython
-PYTHON_ENABLE_MODULE_vtkGenericFilteringPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGeovisPython
-PYTHON_ENABLE_MODULE_vtkGeovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkGraphicsPython
-PYTHON_ENABLE_MODULE_vtkGraphicsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkHybridPython
-PYTHON_ENABLE_MODULE_vtkHybridPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkIOPython
-PYTHON_ENABLE_MODULE_vtkIOPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkImagingPython
-PYTHON_ENABLE_MODULE_vtkImagingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkInfovisPython
-PYTHON_ENABLE_MODULE_vtkInfovisPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkRenderingPython
-PYTHON_ENABLE_MODULE_vtkRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkViewsPython
-PYTHON_ENABLE_MODULE_vtkViewsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython
-PYTHON_ENABLE_MODULE_vtkVolumeRenderingPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_ENABLE_MODULE_vtkWidgetsPython
-PYTHON_ENABLE_MODULE_vtkWidgetsPython-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXECUTABLE
-PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_EXTRA_LIBS
-PYTHON_EXTRA_LIBS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_INCLUDE_PATH
-PYTHON_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: PYTHON_LIBRARY
-PYTHON_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: PYTHON_MODULE_vtkCommonPython_BUILD_SHARED
-PYTHON_MODULE_vtkCommonPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED
-PYTHON_MODULE_vtkGenericFilteringPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkGeovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED
-PYTHON_MODULE_vtkGraphicsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkHybridPython_BUILD_SHARED
-PYTHON_MODULE_vtkHybridPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkIOPython_BUILD_SHARED
-PYTHON_MODULE_vtkIOPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkImagingPython_BUILD_SHARED
-PYTHON_MODULE_vtkImagingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED
-PYTHON_MODULE_vtkInfovisPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkViewsPython_BUILD_SHARED
-PYTHON_MODULE_vtkViewsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED
-PYTHON_MODULE_vtkVolumeRenderingPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED
-PYTHON_MODULE_vtkWidgetsPython_BUILD_SHARED-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: PYTHON_UTIL_LIBRARY
-PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SCPCOMMAND
-SCPCOMMAND-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-SHARED_LIBRARY_PATH_INFO_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-SHARED_LIBRARY_PATH_TYPE:INTERNAL=0
-//runtime library path variable name.
-SHARED_LIBRARY_PATH_VAR_NAME:INTERNAL=DYLD_LIBRARY_PATH
-//ADVANCED property for variable: SITE
-SITE-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-SIZEOF_DOUBLE:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_FLOAT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_INT:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_LONG:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_OFF_T:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-SIZEOF_PTRDIFF_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SHORT:INTERNAL=2
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_SSIZE_T:INTERNAL=4
-//Result of CHECK_TYPE_SIZE
-SIZEOF_UCHAR:INTERNAL=
-//ADVANCED property for variable: SLURM_SBATCH_COMMAND
-SLURM_SBATCH_COMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: SLURM_SRUN_COMMAND
-SLURM_SRUN_COMMAND-ADVANCED:INTERNAL=1
-//Have include STDC_HEADERS
-STDC_HEADERS:INTERNAL=1
-//Result of TRY_COMPILE
-SUPPORT_IP6_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: SVNCOMMAND
-SVNCOMMAND-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_INCLUDE_PATH
-TCL_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_LIBRARY
-TCL_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_LIBRARY
-TCL_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TCL_TCLSH
-TCL_TCLSH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TCL_TCLSH
-TCL_TCLSH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_INCLUDE_PATH
-TK_INCLUDE_PATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_INTERNAL_PATH
-TK_INTERNAL_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: TK_LIBRARY
-TK_LIBRARY-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: TK_LIBRARY
-TK_LIBRARY-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: TK_WISH
-TK_WISH-ADVANCED:INTERNAL=1
-//This value is not used by VTK.
-TK_WISH:INTERNAL=/usr/bin/wish
-//ADVANCED property for variable: VERDICT_BUILD_DOC
-VERDICT_BUILD_DOC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_ENABLE_TESTING
-VERDICT_ENABLE_TESTING-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE
-VERDICT_MANGLE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_MANGLE_PREFIX
-VERDICT_MANGLE_PREFIX-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VERDICT_USE_FLOAT
-VERDICT_USE_FLOAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VLI_LIBRARY_FOR_VP1000
-VLI_LIBRARY_FOR_VP1000-ADVANCED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE
-//Result of TRY_RUN
-VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0
-//Support for C++ type bool
-VTK_COMPILER_HAS_BOOL:INTERNAL=1
-//Support for full template specialization syntax
-VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1
-//ADVANCED property for variable: VTK_DEBUG_LEAKS
-VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1
-//The directory in which code for Shaders is provided.
-VTK_DEFAULT_SHADERS_DIR:INTERNAL=@BUILD_DIR@/VTK-build/Utilities/MaterialLibrary/Repository
-//String encoder.
-VTK_ENCODESTRING_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkEncodeString
-//Support for C++ explict templates
-VTK_EXPLICIT_TEMPLATES:INTERNAL=1
-//ADVANCED property for variable: VTK_GLEXT_FILE
-VTK_GLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_GLXEXT_FILE
-VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1
-//Have include iosfwd
-VTK_HAVE_ANSI_STREAMS:INTERNAL=1
-//Support for getsockname with socklen_t
-VTK_HAVE_GETSOCKNAME_WITH_SOCKLEN_T:INTERNAL=1
-//Have library socket
-VTK_HAVE_LIBSOCKET:INTERNAL=
-//Have include iostream.h
-VTK_HAVE_OLD_STREAMS:INTERNAL=1
-//Have include strstream.h
-VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=
-//Have include strstrea.h
-VTK_HAVE_OLD_STRSTREA_H:INTERNAL=
-//Have symbol SO_REUSEADDR
-VTK_HAVE_SO_REUSEADDR:INTERNAL=1
-//Whether istream supports long long
-VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_REMOVE
-VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_LEGACY_SILENT
-VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_MATERIALS_DIRS
-VTK_MATERIALS_DIRS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_NO_PYTHON_THREADS
-VTK_NO_PYTHON_THREADS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_OPENGL_HAS_OSMESA
-VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1
-//Whether ostream supports long long
-VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1
-//OpenGL extensions parser.
-VTK_PARSEOGLEXT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkParseOGLExt
-//Install directory for Python .py and .pyc files
-VTK_PYTHON_MODULE_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//Install directory for Python binary modules
-VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR:INTERNAL=${CMAKE_INSTALL_PREFIX}/lib/python@PYVER@/site-packages
-//ADVANCED property for variable: VTK_PYTHON_SETUP_ARGS
-VTK_PYTHON_SETUP_ARGS-ADVANCED:INTERNAL=1
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF_LONG_LONG:INTERNAL=8
-//Result of CHECK_TYPE_SIZE
-VTK_SIZEOF___INT64:INTERNAL=
-//Very few users should worry about this option. If VTK is built
-// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared
-// Tcl/Tk bundled inside a project with no library support files
-// (ex: ParaViewComplete), this variable should be set to ON and
-// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH
-// should point to the directories that hold those files (typically,
-// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation,
-// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source
-// repository). Once this variable is set to ON, support files
-// will automatically be copied to the build directory and the
-// executables will try to use that location to initialize Tcl/Tk.
-VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=
-//ADVANCED property for variable: VTK_TESTING_USE_FPE
-VTK_TESTING_USE_FPE-ADVANCED:INTERNAL=1
-//Whether char is signed.
-VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1
-//Result of TRY_COMPILE
-VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE
-//ADVANCED property for variable: VTK_USE_64BIT_IDS
-VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_BOOST
-VTK_USE_BOOST-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_CG_SHADERS
-VTK_USE_CG_SHADERS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_COCOA
-VTK_USE_COCOA-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_DISPLAY
-VTK_USE_DISPLAY-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_FFMPEG_ENCODER
-VTK_USE_FFMPEG_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_GL2PS
-VTK_USE_GL2PS-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_GLSL_SHADERS
-VTK_USE_GLSL_SHADERS-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_GUISUPPORT
-VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MANGLED_MESA
-VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_METAIO
-VTK_USE_METAIO-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MPEG2_ENCODER
-VTK_USE_MPEG2_ENCODER-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_MYSQL
-VTK_USE_MYSQL-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_ODBC
-VTK_USE_ODBC-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_POSTGRES
-VTK_USE_POSTGRES-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_RPATH
-VTK_USE_RPATH-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_USE_SYSTEM_EXPAT
-VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_FREETYPE
-VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_JPEG
-VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBPROJ4
-VTK_USE_SYSTEM_LIBPROJ4-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_LIBXML2
-VTK_USE_SYSTEM_LIBXML2-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_PNG
-VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_TIFF
-VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_SYSTEM_ZLIB
-VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_TK
-VTK_USE_TK-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_USE_VOLUMEPRO_1000
-VTK_USE_VOLUMEPRO_1000-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_USE_X
-VTK_USE_X-MODIFIED:INTERNAL=ON
-//ADVANCED property for variable: VTK_WGLEXT_FILE
-VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: VTK_WRAP_HINTS
-VTK_WRAP_HINTS-ADVANCED:INTERNAL=1
-//MODIFIED property for variable: VTK_WRAP_PYTHON
-VTK_WRAP_PYTHON-MODIFIED:INTERNAL=ON
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPython
-//Location of program to do Python wrapping
-VTK_WRAP_PYTHON_INIT_EXE:INTERNAL=@BUILD_DIR@/VTK-build/bin/vtkWrapPythonInit
-//ADVANCED property for variable: X11_ICE_INCLUDE_PATH
-X11_ICE_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_ICE_LIB
-X11_ICE_LIB-ADVANCED:INTERNAL=1
-//Have library /usr/X11R6/lib/libX11.dylib;/usr/X11R6/lib/libXext.dylib
-X11_LIB_X11_SOLO:INTERNAL=1
-//ADVANCED property for variable: X11_SM_LIB
-X11_SM_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_INCLUDE_PATH
-X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_X11_LIB
-X11_X11_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XShm_INCLUDE_PATH
-X11_XShm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_INCLUDE_PATH
-X11_XTest_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_XTest_LIB
-X11_XTest_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessrules_INCLUDE_PATH
-X11_Xaccessrules_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xaccessstr_INCLUDE_PATH
-X11_Xaccessstr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_INCLUDE_PATH
-X11_Xau_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xau_LIB
-X11_Xau_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_INCLUDE_PATH
-X11_Xcomposite_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcomposite_LIB
-X11_Xcomposite_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_INCLUDE_PATH
-X11_Xcursor_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xcursor_LIB
-X11_Xcursor_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_INCLUDE_PATH
-X11_Xdamage_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdamage_LIB
-X11_Xdamage_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_INCLUDE_PATH
-X11_Xdmcp_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xdmcp_LIB
-X11_Xdmcp_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xext_LIB
-X11_Xext_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_INCLUDE_PATH
-X11_Xfixes_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xfixes_LIB
-X11_Xfixes_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_INCLUDE_PATH
-X11_Xft_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xft_LIB
-X11_Xft_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_INCLUDE_PATH
-X11_Xinerama_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinerama_LIB
-X11_Xinerama_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_INCLUDE_PATH
-X11_Xinput_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xinput_LIB
-X11_Xinput_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkb_INCLUDE_PATH
-X11_Xkb_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xkblib_INCLUDE_PATH
-X11_Xkblib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xlib_INCLUDE_PATH
-X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_INCLUDE_PATH
-X11_Xpm_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xpm_LIB
-X11_Xpm_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_INCLUDE_PATH
-X11_Xrandr_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrandr_LIB
-X11_Xrandr_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_INCLUDE_PATH
-X11_Xrender_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xrender_LIB
-X11_Xrender_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_INCLUDE_PATH
-X11_Xscreensaver_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xscreensaver_LIB
-X11_Xscreensaver_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xshape_INCLUDE_PATH
-X11_Xshape_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_INCLUDE_PATH
-X11_Xt_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xt_LIB
-X11_Xt_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xutil_INCLUDE_PATH
-X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_INCLUDE_PATH
-X11_Xv_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xv_LIB
-X11_Xv_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_Xxf86misc_LIB
-X11_Xxf86misc_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_dpms_INCLUDE_PATH
-X11_dpms_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_fontconfig_LIB
-X11_fontconfig_LIB-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86misc_INCLUDE_PATH
-X11_xf86misc_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//ADVANCED property for variable: X11_xf86vmode_INCLUDE_PATH
-X11_xf86vmode_INCLUDE_PATH-ADVANCED:INTERNAL=1
-//Already complained about update type.
-__CTEST_UPDATE_TYPE_COMPLAINED:INTERNAL=1
-
diff --git a/exsrc/src/cmake/multi.c b/exsrc/src/cmake/multi.c
deleted file mode 100644
index e4edb4d46..000000000
--- a/exsrc/src/cmake/multi.c
+++ /dev/null
@@ -1,1978 +0,0 @@
-/***************************************************************************
- *                                  _   _ ____  _
- *  Project                     ___| | | |  _ \| |
- *                             / __| | | | |_) | |
- *                            | (__| |_| |  _ <| |___
- *                             \___|\___/|_| \_\_____|
- *
- * Copyright (C) 1998 - 2007, Daniel Stenberg, <daniel@haxx.se>, et al.
- *
- * This software is licensed as described in the file COPYING, which
- * you should have received as part of this distribution. The terms
- * are also available at http://curl.haxx.se/docs/copyright.html.
- *
- * You may opt to use, copy, modify, merge, publish, distribute and/or sell
- * copies of the Software, and permit persons to whom the Software is
- * furnished to do so, under the terms of the COPYING file.
- *
- * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
- * KIND, either express or implied.
- *
- * $Id: multi.c,v 1.2 2007-03-15 19:22:13 andy Exp $
- ***************************************************************************/
-
-#include "setup.h"
-#include <stdlib.h>
-#include <string.h>
-
-#ifdef HAVE_SYS_TYPES_H
-#include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_SOCKET_H
-#include <sys/socket.h>
-#endif
-#ifdef HAVE_UNISTD_H
-#include <unistd.h>
-#endif
-
-#include <curl/curl.h>
-
-#include "urldata.h"
-#include "transfer.h"
-#include "url.h"
-#include "connect.h"
-#include "progress.h"
-#include "memory.h"
-#include "easyif.h"
-#include "multiif.h"
-#include "sendf.h"
-#include "timeval.h"
-
-/* The last #include file should be: */
-#include "memdebug.h"
-
-struct Curl_message {
-  /* the 'CURLMsg' is the part that is visible to the external user */
-  struct CURLMsg extmsg;
-  struct Curl_message *next;
-};
-
-typedef enum {
-  CURLM_STATE_INIT,        /* start in this state */
-  CURLM_STATE_CONNECT,     /* resolve/connect has been sent off */
-  CURLM_STATE_WAITRESOLVE, /* awaiting the resolve to finalize */
-  CURLM_STATE_WAITCONNECT, /* awaiting the connect to finalize */
-  CURLM_STATE_PROTOCONNECT, /* completing the protocol-specific connect
-                               phase */
-  CURLM_STATE_WAITDO,      /* wait for our turn to send the request */
-  CURLM_STATE_DO,          /* start send off the request (part 1) */
-  CURLM_STATE_DOING,       /* sending off the request (part 1) */
-  CURLM_STATE_DO_MORE,     /* send off the request (part 2) */
-  CURLM_STATE_DO_DONE,     /* done sending off request */
-  CURLM_STATE_WAITPERFORM, /* wait for our turn to read the response */
-  CURLM_STATE_PERFORM,     /* transfer data */
-  CURLM_STATE_TOOFAST,     /* wait because limit-rate exceeded */
-  CURLM_STATE_DONE,        /* post data transfer operation */
-  CURLM_STATE_COMPLETED,   /* operation complete */
-  CURLM_STATE_CANCELLED,   /* cancelled */
-
-  CURLM_STATE_LAST /* not a true state, never use this */
-} CURLMstate;
-
-/* we support N sockets per easy handle. Set the corresponding bit to what
-   action we should wait for */
-#define MAX_SOCKSPEREASYHANDLE 5
-#define GETSOCK_READABLE (0x00ff)
-#define GETSOCK_WRITABLE (0xff00)
-
-struct closure {
-  struct closure *next; /* a simple one-way list of structs */
-  struct SessionHandle *easy_handle;
-};
-
-struct Curl_one_easy {
-  /* first, two fields for the linked list of these */
-  struct Curl_one_easy *next;
-  struct Curl_one_easy *prev;
-
-  struct SessionHandle *easy_handle; /* the easy handle for this unit */
-  struct connectdata *easy_conn;     /* the "unit's" connection */
-
-  CURLMstate state;  /* the handle's state */
-  CURLcode result;   /* previous result */
-
-  struct Curl_message *msg; /* A pointer to one single posted message.
-                               Cleanup should be done on this pointer NOT on
-                               the linked list in Curl_multi.  This message
-                               will be deleted when this handle is removed
-                               from the multi-handle */
-  int msg_num; /* number of messages left in 'msg' to return */
-
-  /* Array with the plain socket numbers this handle takes care of, in no
-     particular order. Note that all sockets are added to the sockhash, where
-     the state etc are also kept. This array is mostly used to detect when a
-     socket is to be removed from the hash. See singlesocket(). */
-  curl_socket_t sockets[MAX_SOCKSPEREASYHANDLE];
-  int numsocks;
-};
-
-#define CURL_MULTI_HANDLE 0x000bab1e
-
-#define GOOD_MULTI_HANDLE(x) \
-  ((x)&&(((struct Curl_multi *)x)->type == CURL_MULTI_HANDLE))
-#define GOOD_EASY_HANDLE(x) \
- (((struct SessionHandle *)x)->magic == CURLEASY_MAGIC_NUMBER)
-
-/* This is the struct known as CURLM on the outside */
-struct Curl_multi {
-  /* First a simple identifier to easier detect if a user mix up
-     this multi handle with an easy handle. Set this to CURL_MULTI_HANDLE. */
-  long type;
-
-  /* We have a linked list with easy handles */
-  struct Curl_one_easy easy;
-
-  int num_easy; /* amount of entries in the linked list above. */
-  int num_msgs; /* amount of messages in the easy handles */
-  int num_alive; /* amount of easy handles that are added but have not yet
-                    reached COMPLETE state */
-
-  /* callback function and user data pointer for the *socket() API */
-  curl_socket_callback socket_cb;
-  void *socket_userp;
-
-  /* Hostname cache */
-  struct curl_hash *hostcache;
-
-  /* timetree points to the splay-tree of time nodes to figure out expire
-     times of all currently set timers */
-  struct Curl_tree *timetree;
-
-  /* 'sockhash' is the lookup hash for socket descriptor => easy handles (note
-     the pluralis form, there can be more than one easy handle waiting on the
-     same actual socket) */
-  struct curl_hash *sockhash;
-
-  /* Whether pipelining is enabled for this multi handle */
-  bool pipelining_enabled;
-
-  /* shared connection cache */
-  struct conncache *connc;
-
-  /* list of easy handles kept around for doing nice connection closures */
-  struct closure *closure;
-
-  /* timer callback and user data pointer for the *socket() API */
-  curl_multi_timer_callback timer_cb;
-  void *timer_userp;
-  time_t timer_lastcall; /* the fixed time for the timeout for the previous
-                            callback */
-};
-
-static bool multi_conn_using(struct Curl_multi *multi,
-                             struct SessionHandle *data);
-static void singlesocket(struct Curl_multi *multi,
-                         struct Curl_one_easy *easy);
-static void add_closure(struct Curl_multi *multi,
-                        struct SessionHandle *data);
-static int update_timer(struct Curl_multi *multi);
-
-#ifdef CURLDEBUG
-static const char *statename[]={
-  "INIT",
-  "CONNECT",
-  "WAITRESOLVE",
-  "WAITCONNECT",
-  "PROTOCONNECT",
-  "WAITDO",
-  "DO",
-  "DOING",
-  "DO_MORE",
-  "DO_DONE",
-  "WAITPERFORM",
-  "PERFORM",
-  "TOOFAST",
-  "DONE",
-  "COMPLETED",
-  "CANCELLED"
-};
-
-void curl_multi_dump(CURLM *multi_handle);
-#endif
-
-/* always use this function to change state, to make debugging easier */
-static void multistate(struct Curl_one_easy *easy, CURLMstate state)
-{
-#ifdef CURLDEBUG
-  long index = -1;
-#endif
-  CURLMstate oldstate = easy->state;
-
-  if(oldstate == state)
-    /* don't bother when the new state is the same as the old state */
-    return;
-
-  easy->state = state;
-
-#ifdef CURLDEBUG
-  if(easy->state > CURLM_STATE_CONNECT &&
-     easy->state < CURLM_STATE_COMPLETED)
-    index = easy->easy_conn->connectindex;
-
-  infof(easy->easy_handle,
-        "STATE: %s => %s handle %p; (connection #%ld) \n",
-        statename[oldstate], statename[easy->state],
-        (char *)easy, index);
-#endif
-  if(state == CURLM_STATE_COMPLETED)
-    /* changing to COMPLETED means there's one less easy handle 'alive' */
-    easy->easy_handle->multi->num_alive--;
-}
-
-/*
- * We add one of these structs to the sockhash for a particular socket
- */
-
-struct Curl_sh_entry {
-  struct SessionHandle *easy;
-  time_t timestamp;
-  long inuse;
-  int action;  /* what action READ/WRITE this socket waits for */
-  curl_socket_t socket; /* mainly to ease debugging */
-  void *socketp; /* settable by users with curl_multi_assign() */
-};
-/* bits for 'action' having no bits means this socket is not expecting any
-   action */
-#define SH_READ  1
-#define SH_WRITE 2
-
-/* make sure this socket is present in the hash for this handle */
-static struct Curl_sh_entry *sh_addentry(struct curl_hash *sh,
-                                         curl_socket_t s,
-                                         struct SessionHandle *data)
-{
-  struct Curl_sh_entry *there =
-    Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t));
-  struct Curl_sh_entry *check;
-
-  if(there)
-    /* it is present, return fine */
-    return there;
-
-  /* not present, add it */
-  check = calloc(sizeof(struct Curl_sh_entry), 1);
-  if(!check)
-    return NULL; /* major failure */
-  check->easy = data;
-  check->socket = s;
-
-  /* make/add new hash entry */
-  if(NULL == Curl_hash_add(sh, (char *)&s, sizeof(curl_socket_t), check)) {
-    free(check);
-    return NULL; /* major failure */
-  }
-
-  return check; /* things are good in sockhash land */
-}
-
-
-/* delete the given socket + handle from the hash */
-static void sh_delentry(struct curl_hash *sh, curl_socket_t s)
-{
-  struct Curl_sh_entry *there =
-    Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t));
-
-  if(there) {
-    /* this socket is in the hash */
-    /* We remove the hash entry. (This'll end up in a call to
-       sh_freeentry().) */
-    Curl_hash_delete(sh, (char *)&s, sizeof(curl_socket_t));
-  }
-}
-
-/*
- * free a sockhash entry
- */
-static void sh_freeentry(void *freethis)
-{
-  struct Curl_sh_entry *p = (struct Curl_sh_entry *) freethis;
-
-  free(p);
-}
-
-/*
- * sh_init() creates a new socket hash and returns the handle for it.
- *
- * Quote from README.multi_socket:
- *
- * "Some tests at 7000 and 9000 connections showed that the socket hash lookup
- * is somewhat of a bottle neck. Its current implementation may be a bit too
- * limiting. It simply has a fixed-size array, and on each entry in the array
- * it has a linked list with entries. So the hash only checks which list to
- * scan through. The code I had used so for used a list with merely 7 slots
- * (as that is what the DNS hash uses) but with 7000 connections that would
- * make an average of 1000 nodes in each list to run through. I upped that to
- * 97 slots (I believe a prime is suitable) and noticed a significant speed
- * increase.  I need to reconsider the hash implementation or use a rather
- * large default value like this. At 9000 connections I was still below 10us
- * per call."
- *
- */
-static struct curl_hash *sh_init(void)
-{
-  return Curl_hash_alloc(97, sh_freeentry);
-}
-
-CURLM *curl_multi_init(void)
-{
-  struct Curl_multi *multi = (void *)calloc(sizeof(struct Curl_multi), 1);
-
-  if(!multi)
-    return NULL;
-
-  multi->type = CURL_MULTI_HANDLE;
-
-  multi->hostcache = Curl_mk_dnscache();
-  if(!multi->hostcache) {
-    /* failure, free mem and bail out */
-    free(multi);
-    return NULL;
-  }
-
-  multi->sockhash = sh_init();
-  if(!multi->sockhash) {
-    /* failure, free mem and bail out */
-    Curl_hash_destroy(multi->hostcache);
-    free(multi);
-    return NULL;
-  }
-
-  multi->connc = Curl_mk_connc(CONNCACHE_MULTI, -1);
-  if(!multi->connc) {
-    Curl_hash_destroy(multi->hostcache);
-    free(multi);
-    return NULL;
-  }
-
-  return (CURLM *) multi;
-}
-
-CURLMcode curl_multi_add_handle(CURLM *multi_handle,
-                                CURL *easy_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  struct closure *cl;
-  struct closure *prev=NULL;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  /* Verify that we got a somewhat good easy handle too */
-  if(!GOOD_EASY_HANDLE(easy_handle))
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* Prevent users to add the same handle more than once! */
-  if(((struct SessionHandle *)easy_handle)->multi)
-    /* possibly we should create a new unique error code for this condition */
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* Now, time to add an easy handle to the multi stack */
-  easy = (struct Curl_one_easy *)calloc(sizeof(struct Curl_one_easy), 1);
-  if(!easy)
-    return CURLM_OUT_OF_MEMORY;
-
-  cl = multi->closure;
-  while(cl) {
-    struct closure *next = cl->next;
-    if(cl->easy_handle == (struct SessionHandle *)easy_handle) {
-      /* remove this handle from the closure list */
-      free(cl);
-      if(prev)
-        prev->next = next;
-      else
-        multi->closure = next;
-      break; /* no need to continue since this handle can only be present once
-                in the list */
-    }
-    cl = next;
-  }
-
-  /* set the easy handle */
-  easy->easy_handle = easy_handle;
-  multistate(easy, CURLM_STATE_INIT);
-
-  /* for multi interface connections, we share DNS cache automatically if the
-     easy handle's one is currently private. */
-  if (easy->easy_handle->dns.hostcache &&
-      (easy->easy_handle->dns.hostcachetype == HCACHE_PRIVATE)) {
-    Curl_hash_destroy(easy->easy_handle->dns.hostcache);
-    easy->easy_handle->dns.hostcache = NULL;
-    easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-  }
-
-  if (!easy->easy_handle->dns.hostcache ||
-      (easy->easy_handle->dns.hostcachetype == HCACHE_NONE)) {
-    easy->easy_handle->dns.hostcache = multi->hostcache;
-    easy->easy_handle->dns.hostcachetype = HCACHE_MULTI;
-  }
-
-  if(easy->easy_handle->state.connc) {
-    if(easy->easy_handle->state.connc->type == CONNCACHE_PRIVATE) {
-      /* kill old private version */
-      Curl_rm_connc(easy->easy_handle->state.connc);
-      /* point out our shared one instead */
-      easy->easy_handle->state.connc = multi->connc;
-    }
-    /* else it is already using multi? */
-  }
-  else
-    /* point out our shared one */
-    easy->easy_handle->state.connc = multi->connc;
-
-  /* Make sure the type is setup correctly */
-  easy->easy_handle->state.connc->type = CONNCACHE_MULTI;
-
-  /* We add this new entry first in the list. We make our 'next' point to the
-     previous next and our 'prev' point back to the 'first' struct */
-  easy->next = multi->easy.next;
-  easy->prev = &multi->easy;
-
-  /* make 'easy' the first node in the chain */
-  multi->easy.next = easy;
-
-  /* if there was a next node, make sure its 'prev' pointer links back to
-     the new node */
-  if(easy->next)
-    easy->next->prev = easy;
-
-  Curl_easy_addmulti(easy_handle, multi_handle);
-
-  /* make the SessionHandle struct refer back to this struct */
-  easy->easy_handle->set.one_easy = easy;
-
-  /* increase the node-counter */
-  multi->num_easy++;
-
-  if((multi->num_easy * 4) > multi->connc->num) {
-    /* We want the connection cache to have plenty room. Before we supported
-       the shared cache every single easy handle had 5 entries in their cache
-       by default. */
-    CURLcode res = Curl_ch_connc(easy_handle, multi->connc,
-                                 multi->connc->num*4);
-    if(res != CURLE_OK)
-      /* TODO: we need to do some cleaning up here! */
-      return CURLM_OUT_OF_MEMORY;
-  }
-
-  /* increase the alive-counter */
-  multi->num_alive++;
-
-  update_timer(multi);
-  return CURLM_OK;
-}
-
-#if 0
-/* Debug-function, used like this:
- *
- * Curl_hash_print(multi->sockhash, debug_print_sock_hash);
- *
- * Enable the hash print function first by editing hash.c
- */
-static void debug_print_sock_hash(void *p)
-{
-  struct Curl_sh_entry *sh = (struct Curl_sh_entry *)p;
-
-  fprintf(stderr, " [easy %p/magic %x/socket %d]",
-          (void *)sh->easy, sh->easy->magic, sh->socket);
-}
-#endif
-
-CURLMcode curl_multi_remove_handle(CURLM *multi_handle,
-                                   CURL *curl_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  /* Verify that we got a somewhat good easy handle too */
-  if(!GOOD_EASY_HANDLE(curl_handle))
-    return CURLM_BAD_EASY_HANDLE;
-
-  /* scan through the list and remove the 'curl_handle' */
-  easy = multi->easy.next;
-  while(easy) {
-    if(easy->easy_handle == (struct SessionHandle *)curl_handle)
-      break;
-    easy=easy->next;
-  }
-
-  if(easy) {
-    bool premature = (bool)(easy->state != CURLM_STATE_COMPLETED);
-
-    /* If the 'state' is not INIT or COMPLETED, we might need to do something
-       nice to put the easy_handle in a good known state when this returns. */
-    if(premature)
-      /* this handle is "alive" so we need to count down the total number of
-         alive connections when this is removed */
-      multi->num_alive--;
-
-    if (easy->easy_handle->state.is_in_pipeline &&
-        easy->state > CURLM_STATE_DO) {
-      /* If the handle is in a pipeline and has finished sending off its
-         request, we need to remember the fact that we want to remove this
-         handle but do the actual removal at a later time */
-      easy->easy_handle->state.cancelled = TRUE;
-      return CURLM_OK;
-    }
-
-    /* The timer must be shut down before easy->multi is set to NULL,
-       else the timenode will remain in the splay tree after
-       curl_easy_cleanup is called. */
-    Curl_expire(easy->easy_handle, 0);
-
-    if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-      /* clear out the usage of the shared DNS cache */
-      easy->easy_handle->dns.hostcache = NULL;
-      easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-    }
-
-    /* if we have a connection we must call Curl_done() here so that we
-       don't leave a half-baked one around */
-    if(easy->easy_conn) {
-      /* Set up the association right */
-      easy->easy_conn->data = easy->easy_handle;
-
-      /* Curl_done() clears the conn->data field to lose the association
-         between the easy handle and the connection */
-      Curl_done(&easy->easy_conn, easy->result, premature);
-
-      if(easy->easy_conn)
-        /* the connection is still alive, set back the association to enable
-           the check below to trigger TRUE */
-        easy->easy_conn->data = easy->easy_handle;
-    }
-
-    /* If this easy_handle was the last one in charge for one or more
-       connections a the shared connection cache, we might need to keep this
-       handle around until either A) the connection is closed and killed
-       properly, or B) another easy_handle uses the connection.
-
-       The reason why we need to have a easy_handle associated with a live
-       connection is simply that some connections will need a handle to get
-       closed down properly. Currently, the only connections that need to keep
-       a easy_handle handle around are using FTP(S). Such connections have
-       the PROT_CLOSEACTION bit set.
-
-       Thus, we need to check for all connections in the shared cache that
-       points to this handle and are using PROT_CLOSEACTION. If there's any,
-       we need to add this handle to the list of "easy handles kept around for
-       nice connection closures".
-     */
-    if(multi_conn_using(multi, easy->easy_handle)) {
-      /* There's at least one connection using this handle so we must keep
-         this handle around. We also keep the connection cache pointer
-         pointing to the shared one since that will be used on close as
-         well. */
-      easy->easy_handle->state.shared_conn = multi;
-
-      /* this handle is still being used by a shared connection cache and
-         thus we leave it around for now */
-      add_closure(multi, easy->easy_handle);
-    }
-
-    if(easy->easy_handle->state.connc->type == CONNCACHE_MULTI) {
-      /* if this was using the shared connection cache we clear the pointer
-         to that since we're not part of that handle anymore */
-      easy->easy_handle->state.connc = NULL;
-
-      /* and modify the connectindex since this handle can't point to the
-         connection cache anymore */
-      if(easy->easy_conn)
-        easy->easy_conn->connectindex = -1;
-    }
-
-    /* change state without using multistate(), only to make singlesocket() do
-       what we want */
-    easy->state = CURLM_STATE_COMPLETED;
-    singlesocket(multi, easy); /* to let the application know what sockets
-                                  that vanish with this handle */
-
-    Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association
-                                                    to this multi handle */
-
-    /* make the previous node point to our next */
-    if(easy->prev)
-      easy->prev->next = easy->next;
-    /* make our next point to our previous node */
-    if(easy->next)
-      easy->next->prev = easy->prev;
-
-    easy->easy_handle->set.one_easy = NULL; /* detached */
-
-    /* NOTE NOTE NOTE
-       We do not touch the easy handle here! */
-    if (easy->msg)
-      free(easy->msg);
-    free(easy);
-
-    multi->num_easy--; /* one less to care about now */
-
-    update_timer(multi);
-    return CURLM_OK;
-  }
-  else
-    return CURLM_BAD_EASY_HANDLE; /* twasn't found */
-}
-
-bool Curl_multi_canPipeline(struct Curl_multi* multi)
-{
-  return multi->pipelining_enabled;
-}
-
-static int waitconnect_getsock(struct connectdata *conn,
-                               curl_socket_t *sock,
-                               int numsocks)
-{
-  if(!numsocks)
-    return GETSOCK_BLANK;
-
-  sock[0] = conn->sock[FIRSTSOCKET];
-  return GETSOCK_WRITESOCK(0);
-}
-
-static int domore_getsock(struct connectdata *conn,
-                          curl_socket_t *sock,
-                          int numsocks)
-{
-  if(!numsocks)
-    return GETSOCK_BLANK;
-
-  /* When in DO_MORE state, we could be either waiting for us
-     to connect to a remote site, or we could wait for that site
-     to connect to us. It makes a difference in the way: if we
-     connect to the site we wait for the socket to become writable, if
-     the site connects to us we wait for it to become readable */
-  sock[0] = conn->sock[SECONDARYSOCKET];
-
-  return GETSOCK_WRITESOCK(0);
-}
-
-/* returns bitmapped flags for this handle and its sockets */
-static int multi_getsock(struct Curl_one_easy *easy,
-                         curl_socket_t *socks, /* points to numsocks number
-                                                 of sockets */
-                         int numsocks)
-{
-  if (easy->easy_handle->state.pipe_broke) {
-    return 0;
-  }
-
-  if (easy->state > CURLM_STATE_CONNECT &&
-      easy->state < CURLM_STATE_COMPLETED) {
-    /* Set up ownership correctly */
-    easy->easy_conn->data = easy->easy_handle;
-  }
-
-  switch(easy->state) {
-  case CURLM_STATE_TOOFAST:  /* returns 0, so will not select. */
-  default:
-    /* this will get called with CURLM_STATE_COMPLETED when a handle is
-       removed */
-    return 0;
-
-  case CURLM_STATE_WAITRESOLVE:
-    return Curl_resolv_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_PROTOCONNECT:
-    return Curl_protocol_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_DOING:
-    return Curl_doing_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_WAITCONNECT:
-    return waitconnect_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_DO_MORE:
-    return domore_getsock(easy->easy_conn, socks, numsocks);
-
-  case CURLM_STATE_PERFORM:
-  case CURLM_STATE_WAITPERFORM:
-    return Curl_single_getsock(easy->easy_conn, socks, numsocks);
-  }
-
-}
-
-CURLMcode curl_multi_fdset(CURLM *multi_handle,
-                           fd_set *read_fd_set, fd_set *write_fd_set,
-                           fd_set *exc_fd_set, int *max_fd)
-{
-  /* Scan through all the easy handles to get the file descriptors set.
-     Some easy handles may not have connected to the remote host yet,
-     and then we must make sure that is done. */
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  int this_max_fd=-1;
-  curl_socket_t sockbunch[MAX_SOCKSPEREASYHANDLE];
-  int bitmap;
-  int i;
-  (void)exc_fd_set; /* not used */
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  easy=multi->easy.next;
-  while(easy) {
-    bitmap = multi_getsock(easy, sockbunch, MAX_SOCKSPEREASYHANDLE);
-
-    for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) {
-      curl_socket_t s = CURL_SOCKET_BAD;
-
-      if(bitmap & GETSOCK_READSOCK(i)) {
-        FD_SET(sockbunch[i], read_fd_set);
-        s = sockbunch[i];
-      }
-      if(bitmap & GETSOCK_WRITESOCK(i)) {
-        FD_SET(sockbunch[i], write_fd_set);
-        s = sockbunch[i];
-      }
-      if(s == CURL_SOCKET_BAD)
-        /* this socket is unused, break out of loop */
-        break;
-      else {
-        if((int)s > this_max_fd)
-          this_max_fd = (int)s;
-      }
-    }
-
-    easy = easy->next; /* check next handle */
-  }
-
-  *max_fd = this_max_fd;
-
-  return CURLM_OK;
-}
-
-static CURLMcode multi_runsingle(struct Curl_multi *multi,
-                                 struct Curl_one_easy *easy)
-{
-  struct Curl_message *msg = NULL;
-  bool connected;
-  bool async;
-  bool protocol_connect;
-  bool dophase_done;
-  bool done;
-  CURLMcode result = CURLM_OK;
-  struct Curl_transfer_keeper *k;
-
-  do {
-
-    if(!GOOD_EASY_HANDLE(easy->easy_handle))
-      return CURLM_BAD_EASY_HANDLE;
-
-    if (easy->easy_handle->state.pipe_broke) {
-      infof(easy->easy_handle, "Pipe broke: handle 0x%x, url = %s\n",
-            easy, easy->easy_handle->reqdata.path);
-      if(easy->easy_handle->state.is_in_pipeline) {
-        /* Head back to the CONNECT state */
-        multistate(easy, CURLM_STATE_CONNECT);
-        result = CURLM_CALL_MULTI_PERFORM;
-        easy->result = CURLE_OK;
-      } else {
-        easy->result = CURLE_COULDNT_CONNECT;
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-
-      easy->easy_handle->state.pipe_broke = FALSE;
-      easy->easy_conn = NULL;
-      break;
-    }
-
-    if (easy->state > CURLM_STATE_CONNECT &&
-        easy->state < CURLM_STATE_COMPLETED) {
-      /* Make sure we set the connection's current owner */
-      easy->easy_conn->data = easy->easy_handle;
-    }
-
-    if (CURLM_STATE_WAITCONNECT <= easy->state &&
-        easy->state <= CURLM_STATE_DO &&
-        easy->easy_handle->change.url_changed) {
-      char *gotourl;
-      Curl_posttransfer(easy->easy_handle);
-
-      easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-      /* We make sure that the pipe broken flag is reset
-         because in this case, it isn't an actual break */
-      easy->easy_handle->state.pipe_broke = FALSE;
-      if(CURLE_OK == easy->result) {
-        gotourl = strdup(easy->easy_handle->change.url);
-        if(gotourl) {
-          easy->easy_handle->change.url_changed = FALSE;
-          easy->result = Curl_follow(easy->easy_handle, gotourl, FALSE);
-          if(CURLE_OK == easy->result)
-            multistate(easy, CURLM_STATE_CONNECT);
-          else
-            free(gotourl);
-        }
-        else {
-          easy->result = CURLE_OUT_OF_MEMORY;
-          multistate(easy, CURLM_STATE_COMPLETED);
-          break;
-        }
-      }
-    }
-
-    easy->easy_handle->change.url_changed = FALSE;
-
-    switch(easy->state) {
-    case CURLM_STATE_INIT:
-      /* init this transfer. */
-      easy->result=Curl_pretransfer(easy->easy_handle);
-
-      if(CURLE_OK == easy->result) {
-        /* after init, go CONNECT */
-        multistate(easy, CURLM_STATE_CONNECT);
-        result = CURLM_CALL_MULTI_PERFORM;
-
-        easy->easy_handle->state.used_interface = Curl_if_multi;
-      }
-      break;
-
-    case CURLM_STATE_CONNECT:
-      /* Connect. We get a connection identifier filled in. */
-      Curl_pgrsTime(easy->easy_handle, TIMER_STARTSINGLE);
-      easy->result = Curl_connect(easy->easy_handle, &easy->easy_conn,
-                                  &async, &protocol_connect);
-
-      if(CURLE_OK == easy->result) {
-        /* Add this handle to the send pipeline */
-        Curl_addHandleToPipeline(easy->easy_handle,
-                                 easy->easy_conn->send_pipe);
-
-        if(async)
-          /* We're now waiting for an asynchronous name lookup */
-          multistate(easy, CURLM_STATE_WAITRESOLVE);
-        else {
-          /* after the connect has been sent off, go WAITCONNECT unless the
-             protocol connect is already done and we can go directly to
-             WAITDO! */
-          result = CURLM_CALL_MULTI_PERFORM;
-
-          if(protocol_connect) {
-            multistate(easy, CURLM_STATE_WAITDO);
-          } else {
-            multistate(easy, CURLM_STATE_WAITCONNECT);
-          }
-        }
-      }
-      break;
-
-    case CURLM_STATE_WAITRESOLVE:
-      /* awaiting an asynch name resolve to complete */
-    {
-      struct Curl_dns_entry *dns = NULL;
-
-      /* check if we have the name resolved by now */
-      easy->result = Curl_is_resolved(easy->easy_conn, &dns);
-
-      if(dns) {
-        /* Perform the next step in the connection phase, and then move on
-           to the WAITCONNECT state */
-        easy->result = Curl_async_resolved(easy->easy_conn,
-                                           &protocol_connect);
-
-        if(CURLE_OK != easy->result)
-          /* if Curl_async_resolved() returns failure, the connection struct
-             is already freed and gone */
-          easy->easy_conn = NULL;           /* no more connection */
-        else {
-          /* call again please so that we get the next socket setup */
-          result = CURLM_CALL_MULTI_PERFORM;
-          if(protocol_connect)
-            multistate(easy, CURLM_STATE_DO);
-          else
-            multistate(easy, CURLM_STATE_WAITCONNECT);
-        }
-      }
-
-      if(CURLE_OK != easy->result) {
-        /* failure detected */
-        Curl_disconnect(easy->easy_conn); /* disconnect properly */
-        easy->easy_conn = NULL;           /* no more connection */
-        break;
-      }
-    }
-    break;
-
-    case CURLM_STATE_WAITCONNECT:
-      /* awaiting a completion of an asynch connect */
-      easy->result = Curl_is_connected(easy->easy_conn,
-                                       FIRSTSOCKET,
-                                       &connected);
-      if(connected)
-        easy->result = Curl_protocol_connect(easy->easy_conn,
-                                             &protocol_connect);
-
-      if(CURLE_OK != easy->result) {
-        /* failure detected */
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-        break;
-      }
-
-      if(connected) {
-        if(!protocol_connect) {
-          /* We have a TCP connection, but 'protocol_connect' may be false
-             and then we continue to 'STATE_PROTOCONNECT'. If protocol
-             connect is TRUE, we move on to STATE_DO. */
-          multistate(easy, CURLM_STATE_PROTOCONNECT);
-        }
-        else {
-          /* after the connect has completed, go WAITDO */
-          multistate(easy, CURLM_STATE_WAITDO);
-
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-      break;
-
-    case CURLM_STATE_PROTOCONNECT:
-      /* protocol-specific connect phase */
-      easy->result = Curl_protocol_connecting(easy->easy_conn,
-                                              &protocol_connect);
-      if(protocol_connect) {
-        /* after the connect has completed, go WAITDO */
-        multistate(easy, CURLM_STATE_WAITDO);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      else if(easy->result) {
-        /* failure detected */
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-      }
-      break;
-
-    case CURLM_STATE_WAITDO:
-      /* Wait for our turn to DO when we're pipelining requests */
-#ifdef CURLDEBUG
-      infof(easy->easy_handle, "Conn %d send pipe %d inuse %d athead %d\n",
-            easy->easy_conn->connectindex,
-            easy->easy_conn->send_pipe->size,
-            easy->easy_conn->writechannel_inuse,
-            Curl_isHandleAtHead(easy->easy_handle,
-                                easy->easy_conn->send_pipe));
-#endif
-      if (!easy->easy_conn->writechannel_inuse &&
-          Curl_isHandleAtHead(easy->easy_handle,
-                              easy->easy_conn->send_pipe)) {
-        /* Grab the channel */
-        easy->easy_conn->writechannel_inuse = TRUE;
-        multistate(easy, CURLM_STATE_DO);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      break;
-
-    case CURLM_STATE_DO:
-      if(easy->easy_handle->set.connect_only) {
-        /* keep connection open for application to use the socket */
-        easy->easy_conn->bits.close = FALSE;
-        multistate(easy, CURLM_STATE_DONE);
-        easy->result = CURLE_OK;
-        result = CURLM_OK;
-      }
-      else {
-        /* Perform the protocol's DO action */
-        easy->result = Curl_do(&easy->easy_conn,
-                               &dophase_done);
-
-        if(CURLE_OK == easy->result) {
-
-          if(!dophase_done) {
-            /* DO was not completed in one function call, we must continue
-               DOING... */
-            multistate(easy, CURLM_STATE_DOING);
-            result = CURLM_OK;
-          }
-
-          /* after DO, go DO_DONE... or DO_MORE */
-          else if(easy->easy_conn->bits.do_more) {
-            /* we're supposed to do more, but we need to sit down, relax
-               and wait a little while first */
-            multistate(easy, CURLM_STATE_DO_MORE);
-            result = CURLM_OK;
-          }
-          else {
-            /* we're done with the DO, now DO_DONE */
-            easy->result = Curl_readwrite_init(easy->easy_conn);
-            if(CURLE_OK == easy->result) {
-              multistate(easy, CURLM_STATE_DO_DONE);
-              result = CURLM_CALL_MULTI_PERFORM;
-            }
-          }
-        }
-        else {
-          /* failure detected */
-          Curl_posttransfer(easy->easy_handle);
-          Curl_done(&easy->easy_conn, easy->result, FALSE);
-          Curl_disconnect(easy->easy_conn); /* close the connection */
-          easy->easy_conn = NULL;           /* no more connection */
-        }
-      }
-      break;
-
-    case CURLM_STATE_DOING:
-      /* we continue DOING until the DO phase is complete */
-      easy->result = Curl_protocol_doing(easy->easy_conn,
-                                         &dophase_done);
-      if(CURLE_OK == easy->result) {
-        if(dophase_done) {
-          /* after DO, go PERFORM... or DO_MORE */
-          if(easy->easy_conn->bits.do_more) {
-            /* we're supposed to do more, but we need to sit down, relax
-               and wait a little while first */
-            multistate(easy, CURLM_STATE_DO_MORE);
-            result = CURLM_OK;
-          }
-          else {
-            /* we're done with the DO, now DO_DONE */
-            easy->result = Curl_readwrite_init(easy->easy_conn);
-            if(CURLE_OK == easy->result) {
-              multistate(easy, CURLM_STATE_DO_DONE);
-              result = CURLM_CALL_MULTI_PERFORM;
-            }
-          }
-        } /* dophase_done */
-      }
-      else {
-        /* failure detected */
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-        Curl_disconnect(easy->easy_conn); /* close the connection */
-        easy->easy_conn = NULL;           /* no more connection */
-      }
-      break;
-
-    case CURLM_STATE_DO_MORE:
-      /* Ready to do more? */
-      easy->result = Curl_is_connected(easy->easy_conn,
-                                       SECONDARYSOCKET,
-                                       &connected);
-      if(connected) {
-        /*
-         * When we are connected, DO MORE and then go DO_DONE
-         */
-        easy->result = Curl_do_more(easy->easy_conn);
-
-        if(CURLE_OK == easy->result)
-          easy->result = Curl_readwrite_init(easy->easy_conn);
-        else
-          /* Remove ourselves from the send pipeline */
-          Curl_removeHandleFromPipeline(easy->easy_handle,
-                                        easy->easy_conn->send_pipe);
-
-        if(CURLE_OK == easy->result) {
-          multistate(easy, CURLM_STATE_DO_DONE);
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-      break;
-
-    case CURLM_STATE_DO_DONE:
-      /* Remove ourselves from the send pipeline */
-      Curl_removeHandleFromPipeline(easy->easy_handle,
-                                    easy->easy_conn->send_pipe);
-      /* Add ourselves to the recv pipeline */
-      Curl_addHandleToPipeline(easy->easy_handle,
-                               easy->easy_conn->recv_pipe);
-      multistate(easy, CURLM_STATE_WAITPERFORM);
-      result = CURLM_CALL_MULTI_PERFORM;
-      break;
-
-    case CURLM_STATE_WAITPERFORM:
-#ifdef CURLDEBUG
-      infof(easy->easy_handle, "Conn %d recv pipe %d inuse %d athead %d\n",
-            easy->easy_conn->connectindex,
-            easy->easy_conn->recv_pipe->size,
-            easy->easy_conn->readchannel_inuse,
-            Curl_isHandleAtHead(easy->easy_handle,
-                                easy->easy_conn->recv_pipe));
-#endif
-      /* Wait for our turn to PERFORM */
-      if (!easy->easy_conn->readchannel_inuse &&
-          Curl_isHandleAtHead(easy->easy_handle,
-                              easy->easy_conn->recv_pipe)) {
-        /* Grab the channel */
-        easy->easy_conn->readchannel_inuse = TRUE;
-        multistate(easy, CURLM_STATE_PERFORM);
-        result = CURLM_CALL_MULTI_PERFORM;
-      }
-      break;
-
-    case CURLM_STATE_TOOFAST: /* limit-rate exceeded in either direction */
-      /* if both rates are within spec, resume transfer */
-      Curl_pgrsUpdate(easy->easy_conn);
-      if ( ( ( easy->easy_handle->set.max_send_speed == 0 ) ||
-             ( easy->easy_handle->progress.ulspeed <
-               easy->easy_handle->set.max_send_speed ) )  &&
-           ( ( easy->easy_handle->set.max_recv_speed == 0 ) ||
-             ( easy->easy_handle->progress.dlspeed <
-               easy->easy_handle->set.max_recv_speed ) )
-        )
-      multistate(easy, CURLM_STATE_PERFORM);
-      break;
-
-    case CURLM_STATE_PERFORM:
-      /* check if over speed */
-      if ( (  ( easy->easy_handle->set.max_send_speed > 0 ) &&
-              ( easy->easy_handle->progress.ulspeed >
-                easy->easy_handle->set.max_send_speed ) )  ||
-           (  ( easy->easy_handle->set.max_recv_speed > 0 ) &&
-              ( easy->easy_handle->progress.dlspeed >
-                easy->easy_handle->set.max_recv_speed ) )
-        ) {
-        /* Transfer is over the speed limit. Change state.  TODO: Call
-         * Curl_expire() with the time left until we're targeted to be below
-         * the speed limit again. */
-        multistate(easy, CURLM_STATE_TOOFAST );
-        break;
-      }
-
-      /* read/write data if it is ready to do so */
-      easy->result = Curl_readwrite(easy->easy_conn, &done);
-
-      k = &easy->easy_handle->reqdata.keep;
-
-      if (!(k->keepon & KEEP_READ)) {
-          /* We're done reading */
-          easy->easy_conn->readchannel_inuse = FALSE;
-      }
-
-      if (!(k->keepon & KEEP_WRITE)) {
-          /* We're done writing */
-          easy->easy_conn->writechannel_inuse = FALSE;
-      }
-
-      if(easy->result)  {
-        /* The transfer phase returned error, we mark the connection to get
-         * closed to prevent being re-used. This is becasue we can't
-         * possibly know if the connection is in a good shape or not now. */
-        easy->easy_conn->bits.close = TRUE;
-
-        if(CURL_SOCKET_BAD != easy->easy_conn->sock[SECONDARYSOCKET]) {
-          /* if we failed anywhere, we must clean up the secondary socket if
-             it was used */
-          sclose(easy->easy_conn->sock[SECONDARYSOCKET]);
-          easy->easy_conn->sock[SECONDARYSOCKET] = CURL_SOCKET_BAD;
-        }
-        Curl_posttransfer(easy->easy_handle);
-        Curl_done(&easy->easy_conn, easy->result, FALSE);
-      }
-      else if(TRUE == done) {
-        char *newurl;
-        bool retry = Curl_retry_request(easy->easy_conn, &newurl);
-
-        /* call this even if the readwrite function returned error */
-        Curl_posttransfer(easy->easy_handle);
-
-        /* When we follow redirects, must to go back to the CONNECT state */
-        if(easy->easy_handle->reqdata.newurl || retry) {
-          Curl_removeHandleFromPipeline(easy->easy_handle,
-                                        easy->easy_conn->recv_pipe);
-          if(!retry) {
-            /* if the URL is a follow-location and not just a retried request
-               then figure out the URL here */
-            newurl = easy->easy_handle->reqdata.newurl;
-            easy->easy_handle->reqdata.newurl = NULL;
-          }
-          easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-          if(easy->result == CURLE_OK)
-            easy->result = Curl_follow(easy->easy_handle, newurl, retry);
-          if(CURLE_OK == easy->result) {
-            multistate(easy, CURLM_STATE_CONNECT);
-            result = CURLM_CALL_MULTI_PERFORM;
-          }
-          else
-            /* Since we "took it", we are in charge of freeing this on
-               failure */
-            free(newurl);
-        }
-        else {
-          /* after the transfer is done, go DONE */
-          multistate(easy, CURLM_STATE_DONE);
-          result = CURLM_CALL_MULTI_PERFORM;
-        }
-      }
-
-      break;
-
-    case CURLM_STATE_DONE:
-      /* Remove ourselves from the receive pipeline */
-      Curl_removeHandleFromPipeline(easy->easy_handle,
-                                    easy->easy_conn->recv_pipe);
-      easy->easy_handle->state.is_in_pipeline = FALSE;
-
-      if (easy->easy_conn->bits.stream_was_rewound) {
-          /* This request read past its response boundary so we quickly
-             let the other requests consume those bytes since there is no
-             guarantee that the socket will become active again */
-          result = CURLM_CALL_MULTI_PERFORM;
-      }
-
-      if (!easy->easy_handle->state.cancelled) {
-        /* post-transfer command */
-        easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE);
-
-        /* after we have DONE what we're supposed to do, go COMPLETED, and
-           it doesn't matter what the Curl_done() returned! */
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-
-      break;
-
-    case CURLM_STATE_COMPLETED:
-      if (easy->easy_handle->state.cancelled)
-        /* Go into the CANCELLED state if we were cancelled */
-        multistate(easy, CURLM_STATE_CANCELLED);
-
-      /* this is a completed transfer, it is likely to still be connected */
-
-      /* This node should be delinked from the list now and we should post
-         an information message that we are complete. */
-      break;
-
-    case CURLM_STATE_CANCELLED:
-      /* Cancelled transfer, wait to be cleaned up */
-      break;
-
-    default:
-      return CURLM_INTERNAL_ERROR;
-    }
-
-    if(CURLM_STATE_COMPLETED != easy->state) {
-      if(CURLE_OK != easy->result) {
-        /*
-         * If an error was returned, and we aren't in completed state now,
-         * then we go to completed and consider this transfer aborted.
-         */
-        easy->easy_handle->state.is_in_pipeline = FALSE;
-        easy->easy_handle->state.pipe_broke = FALSE;
-
-        if(easy->easy_conn) {
-          /* if this has a connection, unsubscribe from the pipelines */
-          easy->easy_conn->writechannel_inuse = FALSE;
-          easy->easy_conn->readchannel_inuse = FALSE;
-        }
-        multistate(easy, CURLM_STATE_COMPLETED);
-      }
-    }
-
-  } while (easy->easy_handle->change.url_changed);
-
-  if ((CURLM_STATE_COMPLETED == easy->state) && !easy->msg) {
-    if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-      /* clear out the usage of the shared DNS cache */
-      easy->easy_handle->dns.hostcache = NULL;
-      easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-    }
-
-    /* now add a node to the Curl_message linked list with this info */
-    msg = (struct Curl_message *)malloc(sizeof(struct Curl_message));
-
-    if(!msg)
-      return CURLM_OUT_OF_MEMORY;
-
-    msg->extmsg.msg = CURLMSG_DONE;
-    msg->extmsg.easy_handle = easy->easy_handle;
-    msg->extmsg.data.result = easy->result;
-    msg->next = NULL;
-
-    easy->msg = msg;
-    easy->msg_num = 1; /* there is one unread message here */
-
-    multi->num_msgs++; /* increase message counter */
-  }
-
-  return result;
-}
-
-
-CURLMcode curl_multi_perform(CURLM *multi_handle, int *running_handles)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  CURLMcode returncode=CURLM_OK;
-  struct Curl_tree *t;
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  easy=multi->easy.next;
-  while(easy) {
-    CURLMcode result;
-
-    if (easy->easy_handle->state.cancelled &&
-        easy->state == CURLM_STATE_CANCELLED) {
-      /* Remove cancelled handles once it's safe to do so */
-      Curl_multi_rmeasy(multi_handle, easy->easy_handle);
-      easy->easy_handle = NULL;
-      easy = easy->next;
-      continue;
-    }
-
-    result = multi_runsingle(multi, easy);
-    if(result)
-      returncode = result;
-
-    easy = easy->next; /* operate on next handle */
-  }
-
-  /*
-   * Simply remove all expired timers from the splay since handles are dealt
-   * with unconditionally by this function and curl_multi_timeout() requires
-   * that already passed/handled expire times are removed from the splay.
-   */
-  do {
-    struct timeval now = Curl_tvnow();
-    int key = now.tv_sec; /* drop the usec part */
-
-    multi->timetree = Curl_splaygetbest(key, multi->timetree, &t);
-    if (t) {
-      struct SessionHandle *d = t->payload;
-      struct timeval* tv = &d->state.expiretime;
-
-      /* clear the expire times within the handles that we remove from the
-         splay tree */
-      tv->tv_sec = 0;
-      tv->tv_usec = 0;
-    }
-
-  } while(t);
-
-  *running_handles = multi->num_alive;
-
-  if ( CURLM_OK == returncode )
-    update_timer(multi);
-  return returncode;
-}
-
-/* This is called when an easy handle is cleanup'ed that is part of a multi
-   handle */
-void Curl_multi_rmeasy(void *multi_handle, CURL *easy_handle)
-{
-  curl_multi_remove_handle(multi_handle, easy_handle);
-}
-
-
-CURLMcode curl_multi_cleanup(CURLM *multi_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  struct Curl_one_easy *nexteasy;
-  int i;
-  struct closure *cl;
-  struct closure *n;
-
-  if(GOOD_MULTI_HANDLE(multi)) {
-    multi->type = 0; /* not good anymore */
-    Curl_hash_destroy(multi->hostcache);
-    Curl_hash_destroy(multi->sockhash);
-
-    /* go over all connections that have close actions */
-    for(i=0; i< multi->connc->num; i++) {
-      if(multi->connc->connects[i] &&
-         multi->connc->connects[i]->protocol & PROT_CLOSEACTION) {
-        Curl_disconnect(multi->connc->connects[i]);
-        multi->connc->connects[i] = NULL;
-      }
-    }
-    /* now walk through the list of handles we kept around only to be
-       able to close connections "properly" */
-    cl = multi->closure;
-    while(cl) {
-      cl->easy_handle->state.shared_conn = NULL; /* no more shared */
-      if(cl->easy_handle->state.closed)
-        /* close handle only if curl_easy_cleanup() already has been called
-           for this easy handle */
-        Curl_close(cl->easy_handle);
-      n = cl->next;
-      free(cl);
-      cl= n;
-    }
-
-    Curl_rm_connc(multi->connc);
-
-    /* remove all easy handles */
-    easy = multi->easy.next;
-    while(easy) {
-      nexteasy=easy->next;
-      if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) {
-        /* clear out the usage of the shared DNS cache */
-        easy->easy_handle->dns.hostcache = NULL;
-        easy->easy_handle->dns.hostcachetype = HCACHE_NONE;
-      }
-
-      /* Clear the pointer to the connection cache */
-      easy->easy_handle->state.connc = NULL;
-
-      Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association */
-
-      if (easy->msg)
-        free(easy->msg);
-      free(easy);
-      easy = nexteasy;
-    }
-
-    free(multi);
-
-    return CURLM_OK;
-  }
-  else
-    return CURLM_BAD_HANDLE;
-}
-
-CURLMsg *curl_multi_info_read(CURLM *multi_handle, int *msgs_in_queue)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-
-  *msgs_in_queue = 0; /* default to none */
-
-  if(GOOD_MULTI_HANDLE(multi)) {
-    struct Curl_one_easy *easy;
-
-    if(!multi->num_msgs)
-      return NULL; /* no messages left to return */
-
-    easy=multi->easy.next;
-    while(easy) {
-      if(easy->msg_num) {
-        easy->msg_num--;
-        break;
-      }
-      easy = easy->next;
-    }
-    if(!easy)
-      return NULL; /* this means internal count confusion really */
-
-    multi->num_msgs--;
-    *msgs_in_queue = multi->num_msgs;
-
-    return &easy->msg->extmsg;
-  }
-  else
-    return NULL;
-}
-
-/*
- * singlesocket() checks what sockets we deal with and their "action state"
- * and if we have a different state in any of those sockets from last time we
- * call the callback accordingly.
- */
-static void singlesocket(struct Curl_multi *multi,
-                         struct Curl_one_easy *easy)
-{
-  curl_socket_t socks[MAX_SOCKSPEREASYHANDLE];
-  int i;
-  struct Curl_sh_entry *entry;
-  curl_socket_t s;
-  int num;
-  unsigned int curraction;
-
-  memset(&socks, 0, sizeof(socks));
-  for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++)
-    socks[i] = CURL_SOCKET_BAD;
-
-  /* Fill in the 'current' struct with the state as it is now: what sockets to
-     supervise and for what actions */
-  curraction = multi_getsock(easy, socks, MAX_SOCKSPEREASYHANDLE);
-
-  /* We have 0 .. N sockets already and we get to know about the 0 .. M
-     sockets we should have from now on. Detect the differences, remove no
-     longer supervised ones and add new ones */
-
-  /* walk over the sockets we got right now */
-  for(i=0; (i< MAX_SOCKSPEREASYHANDLE) &&
-        (curraction & (GETSOCK_READSOCK(i) | GETSOCK_WRITESOCK(i)));
-      i++) {
-    int action = CURL_POLL_NONE;
-
-    s = socks[i];
-
-    /* get it from the hash */
-    entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-    if(curraction & GETSOCK_READSOCK(i))
-      action |= CURL_POLL_IN;
-    if(curraction & GETSOCK_WRITESOCK(i))
-      action |= CURL_POLL_OUT;
-
-    if(entry) {
-      /* yeps, already present so check if it has the same action set */
-      if(entry->action == action)
-        /* same, continue */
-        continue;
-    }
-    else {
-      /* this is a socket we didn't have before, add it! */
-      entry = sh_addentry(multi->sockhash, s, easy->easy_handle);
-      if(!entry)
-        /* fatal */
-        return;
-    }
-
-    multi->socket_cb(easy->easy_handle,
-                     s,
-                     action,
-                     multi->socket_userp,
-                     entry ? entry->socketp : NULL);
-
-    entry->action = action; /* store the current action state */
-  }
-
-  num = i; /* number of sockets */
-
-  /* when we've walked over all the sockets we should have right now, we must
-     make sure to detect sockets that are removed */
-  for(i=0; i< easy->numsocks; i++) {
-    int j;
-    s = easy->sockets[i];
-    for(j=0; j<num; j++) {
-      if(s == socks[j]) {
-        /* this is still supervised */
-        s = CURL_SOCKET_BAD;
-        break;
-      }
-    }
-    if(s != CURL_SOCKET_BAD) {
-      /* this socket has been removed. Remove it */
-
-      entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-      if(entry) {
-        /* just a precaution, this socket really SHOULD be in the hash already
-           but in case it isn't, we don't have to tell the app to remove it
-           either since it never got to know about it */
-        multi->socket_cb(easy->easy_handle,
-                         s,
-                         CURL_POLL_REMOVE,
-                         multi->socket_userp,
-                         entry ? entry->socketp : NULL);
-
-        sh_delentry(multi->sockhash, s);
-      }
-    }
-  }
-
-  memcpy(easy->sockets, socks, num*sizeof(curl_socket_t));
-  easy->numsocks = num;
-}
-
-static CURLMcode multi_socket(struct Curl_multi *multi,
-                              bool checkall,
-                              curl_socket_t s,
-                              int *running_handles)
-{
-  CURLMcode result = CURLM_OK;
-  struct SessionHandle *data = NULL;
-  struct Curl_tree *t;
-
-  if(checkall) {
-    struct Curl_one_easy *easyp;
-    /* *perform() deals with running_handles on its own */
-    result = curl_multi_perform(multi, running_handles);
-
-    /* walk through each easy handle and do the socket state change magic
-       and callbacks */
-    easyp=multi->easy.next;
-    while(easyp) {
-      singlesocket(multi, easyp);
-      easyp = easyp->next;
-    }
-
-    /* or should we fall-through and do the timer-based stuff? */
-    return result;
-  }
-  else if (s != CURL_SOCKET_TIMEOUT) {
-
-    struct Curl_sh_entry *entry =
-      Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-    if(!entry)
-      /* unmatched socket, major problemo! */
-      return CURLM_BAD_SOCKET; /* better return code? */
-
-    data = entry->easy;
-
-    if(data->magic != CURLEASY_MAGIC_NUMBER)
-      /* bad bad bad bad bad bad bad */
-      return CURLM_INTERNAL_ERROR;
-
-    result = multi_runsingle(multi, data->set.one_easy);
-
-    if(result == CURLM_OK)
-      /* get the socket(s) and check if the state has been changed since
-         last */
-      singlesocket(multi, data->set.one_easy);
-
-    /* Now we fall-through and do the timer-based stuff, since we don't want
-       to force the user to have to deal with timeouts as long as at least one
-       connection in fact has traffic. */
-
-    data = NULL; /* set data to NULL again to avoid calling multi_runsingle()
-                    in case there's no need to */
-  }
-
-  /*
-   * The loop following here will go on as long as there are expire-times left
-   * to process in the splay and 'data' will be re-assigned for every expired
-   * handle we deal with.
-   */
-  do {
-    int key;
-    struct timeval now;
-
-    /* the first loop lap 'data' can be NULL */
-    if(data) {
-      result = multi_runsingle(multi, data->set.one_easy);
-
-      if(result == CURLM_OK)
-        /* get the socket(s) and check if the state has been changed since
-           last */
-        singlesocket(multi, data->set.one_easy);
-    }
-
-    /* Check if there's one (more) expired timer to deal with! This function
-       extracts a matching node if there is one */
-
-    now = Curl_tvnow();
-    key = now.tv_sec; /* drop the usec part */
-
-    multi->timetree = Curl_splaygetbest(key, multi->timetree, &t);
-    if(t) {
-      /* assign 'data' to be the easy handle we just removed from the splay
-         tree */
-      data = t->payload;
-      /* clear the expire time within the handle we removed from the
-         splay tree */
-      data->state.expiretime.tv_sec = 0;
-      data->state.expiretime.tv_usec = 0;
-    }
-
-  } while(t);
-
-  *running_handles = multi->num_alive;
-  return result;
-}
-
-CURLMcode curl_multi_setopt(CURLM *multi_handle,
-                            CURLMoption option, ...)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  CURLMcode res = CURLM_OK;
-  va_list param;
-
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  va_start(param, option);
-
-  switch(option) {
-  case CURLMOPT_SOCKETFUNCTION:
-    multi->socket_cb = va_arg(param, curl_socket_callback);
-    break;
-  case CURLMOPT_SOCKETDATA:
-    multi->socket_userp = va_arg(param, void *);
-    break;
-  case CURLMOPT_PIPELINING:
-    multi->pipelining_enabled = (bool)(0 != va_arg(param, long));
-    break;
-  case CURLMOPT_TIMERFUNCTION:
-    multi->timer_cb = va_arg(param, curl_multi_timer_callback);
-    break;
-  case CURLMOPT_TIMERDATA:
-    multi->timer_userp = va_arg(param, void *);
-    break;
-  default:
-    res = CURLM_UNKNOWN_OPTION;
-    break;
-  }
-  va_end(param);
-  return res;
-}
-
-
-CURLMcode curl_multi_socket_all(CURLM *multi_handle, int *running_handles)
-
-{
-  CURLMcode result = multi_socket((struct Curl_multi *)multi_handle,
-                                  TRUE, CURL_SOCKET_BAD, running_handles);
-  if (CURLM_OK == result)
-    update_timer((struct Curl_multi *)multi_handle);
-  return result;
-}
-
-static CURLMcode multi_timeout(struct Curl_multi *multi,
-                               long *timeout_ms)
-{
-  if(multi->timetree) {
-    /* we have a tree of expire times */
-    struct timeval now = Curl_tvnow();
-
-    /* splay the lowest to the bottom */
-    multi->timetree = Curl_splay(0, multi->timetree);
-
-    /* At least currently, the splay key is a time_t for the expire time */
-    *timeout_ms = (multi->timetree->key - now.tv_sec) * 1000 -
-      now.tv_usec/1000;
-    if(*timeout_ms < 0)
-      /* 0 means immediately */
-      *timeout_ms = 0;
-  }
-  else
-    *timeout_ms = -1;
-
-  return CURLM_OK;
-}
-
-CURLMcode curl_multi_timeout(CURLM *multi_handle,
-                             long *timeout_ms)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-
-  /* First, make some basic checks that the CURLM handle is a good handle */
-  if(!GOOD_MULTI_HANDLE(multi))
-    return CURLM_BAD_HANDLE;
-
-  return multi_timeout(multi, timeout_ms);
-}
-
-/*
- * Tell the application it should update its timers, if it subscribes to the
- * update timer callback.
- */
-static int update_timer(struct Curl_multi *multi)
-{
-  long timeout_ms;
-  if (!multi->timer_cb)
-    return 0;
-  if ( multi_timeout(multi, &timeout_ms) != CURLM_OK )
-    return -1;
-  if ( timeout_ms < 0 )
-    return 0;
-
-  /* When multi_timeout() is done, multi->timetree points to the node with the
-   * timeout we got the (relative) time-out time for. We can thus easily check
-   * if this is the same (fixed) time as we got in a previous call and then
-   * avoid calling the callback again. */
-  if(multi->timetree->key == multi->timer_lastcall)
-    return 0;
-
-  multi->timer_lastcall = multi->timetree->key;
-
-  return multi->timer_cb((CURLM*)multi, timeout_ms, multi->timer_userp);
-}
-
-/* given a number of milliseconds from now to use to set the 'act before
-   this'-time for the transfer, to be extracted by curl_multi_timeout() */
-void Curl_expire(struct SessionHandle *data, long milli)
-{
-  struct Curl_multi *multi = data->multi;
-  struct timeval *nowp = &data->state.expiretime;
-  int rc;
-
-  /* this is only interesting for multi-interface using libcurl, and only
-     while there is still a multi interface struct remaining! */
-  if(!multi)
-    return;
-
-  if(!milli) {
-    /* No timeout, clear the time data. */
-    if(nowp->tv_sec) {
-      /* Since this is an cleared time, we must remove the previous entry from
-         the splay tree */
-      rc = Curl_splayremovebyaddr(multi->timetree,
-                                  &data->state.timenode,
-                                  &multi->timetree);
-      if(rc)
-        infof(data, "Internal error clearing splay node = %d\n", rc);
-      infof(data, "Expire cleared\n");
-      nowp->tv_sec = 0;
-      nowp->tv_usec = 0;
-    }
-  }
-  else {
-    struct timeval set;
-    int rest;
-
-    set = Curl_tvnow();
-    set.tv_sec += milli/1000;
-    set.tv_usec += (milli%1000)*1000;
-
-    rest = (int)(set.tv_usec - 1000000);
-    if(rest > 0) {
-      /* bigger than a full microsec */
-      set.tv_sec++;
-      set.tv_usec -= 1000000;
-    }
-
-    if(nowp->tv_sec) {
-      /* This means that the struct is added as a node in the splay tree.
-         Compare if the new time is earlier, and only remove-old/add-new if it
-         is. */
-      long diff = curlx_tvdiff(set, *nowp);
-      if(diff > 0)
-        /* the new expire time was later so we don't change this */
-        return;
-
-      /* Since this is an updated time, we must remove the previous entry from
-         the splay tree first and then re-add the new value */
-      rc = Curl_splayremovebyaddr(multi->timetree,
-                                  &data->state.timenode,
-                                  &multi->timetree);
-      if(rc)
-        infof(data, "Internal error removing splay node = %d\n", rc);
-    }
-
-    *nowp = set;
-#if 0
-    infof(data, "Expire at %ld / %ld (%ldms)\n",
-          (long)nowp->tv_sec, (long)nowp->tv_usec, milli);
-#endif
-    data->state.timenode.payload = data;
-    multi->timetree = Curl_splayinsert((int)nowp->tv_sec,
-                                       multi->timetree,
-                                       &data->state.timenode);
-  }
-#if 0
-  Curl_splayprint(multi->timetree, 0, TRUE);
-#endif
-}
-
-CURLMcode curl_multi_assign(CURLM *multi_handle,
-                            curl_socket_t s, void *hashp)
-{
-  struct Curl_sh_entry *there = NULL;
-  struct Curl_multi *multi = (struct Curl_multi *)multi_handle;
-
-  if(s != CURL_SOCKET_BAD)
-    there = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(curl_socket_t));
-
-  if(!there)
-    return CURLM_BAD_SOCKET;
-
-  there->socketp = hashp;
-
-  return CURLM_OK;
-}
-
-static bool multi_conn_using(struct Curl_multi *multi,
-                             struct SessionHandle *data)
-{
-  /* any live CLOSEACTION-connections pointing to the give 'data' ? */
-  int i;
-
-  for(i=0; i< multi->connc->num; i++) {
-    if(multi->connc->connects[i] &&
-       (multi->connc->connects[i]->data == data) &&
-       multi->connc->connects[i]->protocol & PROT_CLOSEACTION)
-      return TRUE;
-  }
-
-  return FALSE;
-}
-
-/* Add the given data pointer to the list of 'closure handles' that are kept
-   around only to be able to close some connections nicely - just make sure
-   that this handle isn't already added, like for the cases when an easy
-   handle is removed, added and removed again... */
-static void add_closure(struct Curl_multi *multi,
-                        struct SessionHandle *data)
-{
-  int i;
-  struct closure *cl = (struct closure *)calloc(sizeof(struct closure), 1);
-  struct closure *p=NULL;
-  struct closure *n;
-  if(cl) {
-    cl->easy_handle = data;
-    cl->next = multi->closure;
-    multi->closure = cl;
-  }
-
-  p = multi->closure;
-  cl = p->next; /* start immediately on the second since the first is the one
-                   we just added and it is _very_ likely to actually exist
-                   used in the cache since that's the whole purpose of adding
-                   it to this list! */
-
-  /* When adding, scan through all the other currently kept handles and see if
-     there are any connections still referring to them and kill them if not. */
-  while(cl) {
-    bool inuse = FALSE;
-    for(i=0; i< multi->connc->num; i++) {
-      if(multi->connc->connects[i] &&
-         (multi->connc->connects[i]->data == cl->easy_handle)) {
-        inuse = TRUE;
-        break;
-      }
-    }
-
-    n = cl->next;
-
-    if(!inuse) {
-      /* cl->easy_handle is now killable */
-      infof(data, "Delayed kill of easy handle %p\n", cl->easy_handle);
-      /* unmark it as not having a connection around that uses it anymore */
-      cl->easy_handle->state.shared_conn= NULL;
-      Curl_close(cl->easy_handle);
-      if(p)
-        p->next = n;
-      else
-        multi->closure = n;
-      free(cl);
-    }
-    else
-      p = cl;
-
-    cl = n;
-  }
-
-}
-
-#ifdef CURLDEBUG
-void curl_multi_dump(CURLM *multi_handle)
-{
-  struct Curl_multi *multi=(struct Curl_multi *)multi_handle;
-  struct Curl_one_easy *easy;
-  int i;
-  fprintf(stderr, "* Multi status: %d handles, %d alive\n",
-          multi->num_easy, multi->num_alive);
-  for(easy=multi->easy.next; easy; easy = easy->next) {
-    if(easy->state != CURLM_STATE_COMPLETED) {
-      /* only display handles that are not completed */
-      fprintf(stderr, "handle %p, state %s, %d sockets\n",
-              (void *)easy->easy_handle,
-              statename[easy->state], easy->numsocks);
-      for(i=0; i < easy->numsocks; i++) {
-        curl_socket_t s = easy->sockets[i];
-        struct Curl_sh_entry *entry =
-          Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s));
-
-        fprintf(stderr, "%d ", (int)s);
-        if(!entry) {
-          fprintf(stderr, "INTERNAL CONFUSION\n");
-          continue;
-        }
-        fprintf(stderr, "[%s %s] ",
-                entry->action&CURL_POLL_IN?"RECVING":"",
-                entry->action&CURL_POLL_OUT?"SENDING":"");
-      }
-      if(easy->numsocks)
-        fprintf(stderr, "\n");
-    }
-  }
-}
-#endif
diff --git a/exsrc/src/h5diff_correct_ansi.c b/exsrc/src/h5diff_correct_ansi.c
deleted file mode 100644
index a15e3ff27..000000000
--- a/exsrc/src/h5diff_correct_ansi.c
+++ /dev/null
@@ -1,2222 +0,0 @@
-/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
- * Copyright by The HDF Group.                                               *
- * Copyright by the Board of Trustees of the University of Illinois.         *
- * All rights reserved.                                                      *
- *                                                                           *
- * This file is part of HDF5.  The full HDF5 copyright notice, including     *
- * terms governing use, modification, and redistribution, is contained in    *
- * the files COPYING and Copyright.html.  COPYING can be found at the root   *
- * of the source code distribution tree; Copyright.html can be found at the  *
- * root level of an installed copy of the electronic HDF5 document set and   *
- * is linked from the top-level documents page.  It can also be found at     *
- * http://hdfgroup.org/HDF5/doc/Copyright.html.  If you do not have          *
- * access to either file, you may request a copy from help@hdfgroup.org.     *
- * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */
-
-#include <stdlib.h>
-
-#include "H5private.h"
-#include "h5tools.h"
-#include "h5tools_utils.h"
-#include "h5diff.h"
-#include "ph5diff.h"
-
-/*
- * Debug printf macros. The prefix allows output filtering by test scripts.
- */
-#ifdef H5DIFF_DEBUG
-#define h5diffdebug(x) fprintf(stderr, "h5diff debug: " x)
-#define h5diffdebug2(x1, x2) fprintf(stderr, "h5diff debug: " x1, x2)
-#define h5diffdebug3(x1, x2, x3) fprintf(stderr, "h5diff debug: " x1, x2, x3)
-#define h5diffdebug4(x1, x2, x3, x4) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4)
-#define h5diffdebug5(x1, x2, x3, x4, x5) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4, x5)
-#else
-#define h5diffdebug(x)
-#define h5diffdebug2(x1, x2)
-#define h5diffdebug3(x1, x2, x3)
-#define h5diffdebug4(x1, x2, x3, x4)
-#define h5diffdebug5(x1, x2, x3, x4, x5)
-#endif
-
-
-/*-------------------------------------------------------------------------
- * Function: print_objname
- *
- * Purpose: check if object name is to be printed, only when:
- *  1) verbose mode
- *  2) when diff was found (normal mode)
- *-------------------------------------------------------------------------
- */
-int print_objname (diff_opt_t * options, hsize_t nfound)
-{
-    return ((options->m_verbose || nfound) && !options->m_quiet) ? 1 : 0;
-}
-
-/*-------------------------------------------------------------------------
- * Function: do_print_objname
- *
- * Purpose: print object name
- *
- *-------------------------------------------------------------------------
- */
-void do_print_objname (const char *OBJ, const char *path1, const char *path2, diff_opt_t * opts)
-{
-    /* if verbose level is higher than 0, put space line before
-     * displaying any object or symbolic links. This improves
-     * readability of the output. 
-     */
-    if (opts->m_verbose_level >= 1)
-        parallel_print("\n");
-    parallel_print("%-7s: <%s> and <%s>\n", OBJ, path1, path2);
-}
-
-/*-------------------------------------------------------------------------
- * Function: do_print_attrname
- *
- * Purpose: print attribute name
- *
- *-------------------------------------------------------------------------
- */
-void
-do_print_attrname (const char *attr, const char *path1, const char *path2)
-{
-    parallel_print("%-7s: <%s> and <%s>\n", attr, path1, path2);
-}
-
-/*-------------------------------------------------------------------------
- * Function: print_warn
- *
- * Purpose: check print warning condition.
- * Return: 
- *    1 if verbose mode
- *    0 if not verbos mode
- * Programmer: Jonathan Kim
- * Date: Feb 4, 2010
- *-------------------------------------------------------------------------
- */
-static int print_warn(diff_opt_t *options)
-{
-    return ((options->m_verbose))?1:0;
-}
-
-
-#ifdef H5_HAVE_PARALLEL
-/*-------------------------------------------------------------------------
- * Function: phdiff_dismiss_workers
- *
- * Purpose: tell all workers to end.
- *
- * Return: none
- *
- * Programmer: Albert Cheng
- *
- * Date: Feb 6, 2005
- *
- *-------------------------------------------------------------------------
- */
-void phdiff_dismiss_workers(void)
-{
-    int i;
-    for(i=1; i<g_nTasks; i++)
-        MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD);
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: print_manager_output
- *
- * Purpose: special function that prints any output accumulated by the
- *      manager task.
- *
- * Return: none
- *
- * Programmer: Leon Arber
- *
- * Date: Feb 7, 2005
- *
- *-------------------------------------------------------------------------
- */
-void print_manager_output(void)
-{
-    /* If there was something we buffered, let's print it now */
-    if( (outBuffOffset>0) && g_Parallel)
-    {
-        printf("%s", outBuff);
-
-        if(overflow_file)
-        {
-            int     tmp;
-            rewind(overflow_file);
-            while((tmp = getc(overflow_file)) >= 0)
-                putchar(tmp);
-            fclose(overflow_file);
-            overflow_file = NULL;
-        }
-
-        fflush(stdout);
-        memset(outBuff, 0, OUTBUFF_SIZE);
-        outBuffOffset = 0;
-    }
-    else if( (outBuffOffset>0) && !g_Parallel)
-    {
-        fprintf(stderr, "h5diff error: outBuffOffset>0, but we're not in parallel!\n");
-    }
-}
-
-/*-------------------------------------------------------------------------
- * Function: print_incoming_data
- *
- * Purpose: special function that prints any output that has been sent to the manager
- *      and is currently sitting in the incoming message queue
- *
- * Return: none
- *
- * Programmer: Leon Arber
- *
- * Date: March 7, 2005
- *
- *-------------------------------------------------------------------------
- */
-
-static void print_incoming_data(void)
-{
-    char data[PRINT_DATA_MAX_SIZE+1];
-    int  incomingMessage;
-    MPI_Status Status;
-
-    do
-    {
-        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &incomingMessage, &Status);
-        if(incomingMessage)
-        {
-            memset(data, 0, PRINT_DATA_MAX_SIZE+1);
-            MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status);
-
-            printf("%s", data);
-        }
-    } while(incomingMessage);
-}
-#endif
-
-/*-------------------------------------------------------------------------
- * Function: is_valid_options
- *
- * Purpose: check if options are valid
- *
- * Return: 
- *  1 : Valid
- *  0 : Not valid
- *
- * Programmer: Jonathan Kim
- *
- * Date: Feb 17, 2010
- *
- *------------------------------------------------------------------------*/
-static int is_valid_options(diff_opt_t *options)
-{
-    int ret=1; /* init to valid */
-
-    /*-----------------------------------------------
-     * no -q(quiet) with -v (verbose) or -r (report) */
-    if(options->m_quiet && (options->m_verbose || options->m_report))
-    {
-        parallel_print("Error: -q (quiet mode) cannot be added to verbose or report modes\n");
-        options->err_stat=1;
-        ret = 0;
-        goto out;
-    }
-
-    /* -------------------------------------------------------
-     * only allow --no-dangling-links along with --follow-symlinks */
-    if(options->no_dangle_links && !options->follow_links)
-    {
-        parallel_print("Error: --no-dangling-links must be used along with --follow-symlinks option.\n");
-        options->err_stat=1;
-        ret = 0;
-        goto out;
-    }
-
-out:
-
-    return ret;
-}
-
-/*-------------------------------------------------------------------------
- * Function: is_exclude_path
- *
- * Purpose: check if 'paths' are part of exclude path list
- *
- * Return:  
- *   1 - excluded path
- *   0 - not excluded path
- * 
- * Programmer: Jonathan Kim
- * Date: Aug 23, 2010
- *------------------------------------------------------------------------*/
-static int is_exclude_path (char * path, h5trav_type_t type, diff_opt_t *options)
-{
-    struct exclude_path_list * exclude_path_ptr;
-    int ret_cmp;
-    int ret = 0;
-    int len_grp;
-
-    /* check if exclude path option is given */
-    if (!options->exclude_path)
-        goto out;
-
-    /* assign to local exclude list pointer */
-    exclude_path_ptr = options->exclude;
-
-    /* search objects in exclude list */
-    while (NULL != exclude_path_ptr)
-    {
-        /* if given object is group, exclude its members as well */
-        if (exclude_path_ptr->obj_type == H5TRAV_TYPE_GROUP)
-        {
-            ret_cmp = HDstrncmp(exclude_path_ptr->obj_path, path,
-                                strlen(exclude_path_ptr->obj_path));
-            if (ret_cmp == 0)
-            {
-                /* check if given path belong to an excluding group, if so 
-                 * exclude it as well.
-                 * This verifies if “/grp1/dset1” is only under “/grp1”, but
-                 * not under “/grp1xxx/” group.  
-                 */ 
-                len_grp = HDstrlen(exclude_path_ptr->obj_path);
-                if (path[len_grp] == '/')
-                {
-                    /* belong to excluded group! */
-                    ret = 1;
-                    break;  /* while */
-                }
-            }
-        }
-        /* exclude target is not group, just exclude the object */
-        else  
-        {
-            ret_cmp = HDstrcmp(exclude_path_ptr->obj_path, path);
-            if (ret_cmp == 0)
-            {
-                /* excluded non-group object */
-                ret = 1;
-                /* assign type as scan progress, which is sufficient to 
-                 * determine type for excluding groups from the above if. */
-                exclude_path_ptr->obj_type = type;
-                break; /* while */
-            }
-        }
-        exclude_path_ptr = exclude_path_ptr->next;
-    }
-
-out:
-    return  ret;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: free_exclude_path_list
- *
- * Purpose: free exclud object list from diff options
- *
- * Programmer: Jonathan Kim
- * Date: Aug 23, 2010
- *------------------------------------------------------------------------*/
-static void free_exclude_path_list(diff_opt_t *options)
-{
-    struct exclude_path_list * curr = options->exclude;
-    struct exclude_path_list * next;
-
-    while (NULL != curr)
-    {
-        next = curr->next;
-        HDfree(curr);
-        curr = next;
-    }
-}
-
-/*-------------------------------------------------------------------------
- * Function: build_match_list
- *
- * Purpose: get list of matching path_name from info1 and info2
- *
- * Note:
- *  Find common objects; the algorithm used for this search is the
- *  cosequential match algorithm and is described in
- *  Folk, Michael; Zoellick, Bill. (1992). File Structures. Addison-Wesley.
- *  Moved out from diff_match() to make code more flexible.
- *
- * Parameter:
- *  table_out [OUT] : return the list
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 18, 2010
- *------------------------------------------------------------------------*/
-static void build_match_list (const char *objname1, trav_info_t *info1, const char *objname2, trav_info_t *info2, trav_table_t ** table_out, diff_opt_t *options)
-{
-    unsigned i;
-    size_t curr1 = 0;
-    size_t curr2 = 0;
-    unsigned infile[2];
-    char * path1_lp;
-    char * path2_lp;
-    h5trav_type_t type1_l;
-    h5trav_type_t type2_l;
-    int path1_offset = 0;
-    int path2_offset = 0;
-    int cmp;
-    trav_table_t *table;
-    size_t  idx;
-
-    /* init */
-    trav_table_init( &table );
-
-    /*
-     * This is necessary for the case that given objects are group and
-     * have different names (ex: obj1 is /grp1 and obj2 is /grp5).
-     * All the objects belong to given groups are the cadidates.
-     * So prepare to compare paths without the group names.
-     */
-    /* if obj1 is not root */
-    if (HDstrcmp (objname1,"/") != 0)
-        path1_offset = HDstrlen(objname1);
-    /* if obj2 is not root */
-    if (HDstrcmp (objname2,"/") != 0)
-        path2_offset = HDstrlen(objname2);
-
-    /*--------------------------------------------------
-    * build the list
-    */
-    while(curr1 < info1->nused && curr2 < info2->nused)
-    {
-        
-        path1_lp = (info1->paths[curr1].path) + path1_offset;
-        path2_lp = (info2->paths[curr2].path) + path2_offset;
-        type1_l = info1->paths[curr1].type;
-        type2_l = info2->paths[curr2].type;
-        
-        /* criteria is string compare */
-        cmp = HDstrcmp(path1_lp, path2_lp);
-
-        if(cmp == 0) {
-            if(!is_exclude_path(path1_lp, type1_l, options))
-            {
-                infile[0] = 1;
-                infile[1] = 1;
-                trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-                /* if the two point to the same target object,
-                 * mark that in table */
-                if (info1->paths[curr1].fileno == info2->paths[curr2].fileno &&
-                    info1->paths[curr1].objno == info2->paths[curr2].objno )
-                {
-                    idx = table->nobjs - 1;
-                    table->objs[idx].is_same_trgobj = 1;
-                }
-            }
-            curr1++;
-            curr2++;
-        } /* end if */
-        else if(cmp < 0)
-        {
-            if(!is_exclude_path(path1_lp, type1_l, options))
-            {
-                infile[0] = 1;
-                infile[1] = 0;
-                trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-            }
-            curr1++;
-        } /* end else-if */
-        else
-        {
-            if (!is_exclude_path(path2_lp, type2_l, options))
-            {
-                infile[0] = 0;
-                infile[1] = 1;
-                trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table);
-            }
-            curr2++;
-        } /* end else */
-    } /* end while */
-
-    /* list1 did not end */
-    infile[0] = 1;
-    infile[1] = 0;
-    while(curr1 < info1->nused)
-    {
-        if(!is_exclude_path(path1_lp, type1_l, options))
-        {
-            path1_lp = (info1->paths[curr1].path) + path1_offset;
-            trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table);
-        }
-        curr1++;
-    } /* end while */
-
-    /* list2 did not end */
-    infile[0] = 0;
-    infile[1] = 1;
-    while(curr2 < info2->nused)
-    {
-        if (!is_exclude_path(path2_lp, type2_l, options))
-        {
-            path2_lp = (info2->paths[curr2].path) + path2_offset;
-            trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table);
-        } 
-        curr2++;
-    } /* end while */
-
-    free_exclude_path_list (options);
-   /*------------------------------------------------------
-    * print the list
-    */
-    if(options->m_verbose)
-    {
-        parallel_print("\n");
-        /* if given objects is group under root */
-        if (HDstrcmp (objname1,"/") || HDstrcmp (objname2,"/"))
-            parallel_print("group1   group2\n");
-        else
-            parallel_print("file1     file2\n");
-        parallel_print("---------------------------------------\n");
-        for(i = 0; i < table->nobjs; i++) 
-        {
-            char c1, c2;
-            c1 = (table->objs[i].flags[0]) ? 'x' : ' ';
-            c2 = (table->objs[i].flags[1]) ? 'x' : ' ';
-            parallel_print("%5c %6c    %-15s\n", c1, c2, table->objs[i].name);
-        } /* end for */
-        parallel_print ("\n");
-    } /* end if */
-
-    *table_out = table;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: trav_grp_objs
- *
- * Purpose: 
- *  Call back function from h5trav_visit(). 
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 16, 2010
- *------------------------------------------------------------------------*/
-static herr_t trav_grp_objs(const char *path, const H5O_info_t *oinfo,
-    const char *already_visited, void *udata)
-{
-    trav_info_visit_obj(path, oinfo, already_visited, udata);
-
-    return 0;
-} 
-
-/*-------------------------------------------------------------------------
- * Function: trav_grp_symlinks
- *
- * Purpose: 
- *  Call back function from h5trav_visit(). 
- *  Track and extra checkings while visiting all symbolic-links.
- *
- * Programmer: Jonathan Kim
- *
- * Date: Aug 16, 2010
- *------------------------------------------------------------------------*/
-static herr_t trav_grp_symlinks(const char *path, const H5L_info_t *linfo, 
-                               void *udata)
-{                               
-    trav_info_t *tinfo = (trav_info_t *)udata;
-    diff_opt_t *opts = (diff_opt_t *)tinfo->opts;
-    int ret;
-    h5tool_link_info_t lnk_info;
-    const char *ext_fname;
-    const char *ext_path;
-
-    /* init linkinfo struct */
-    memset(&lnk_info, 0, sizeof(h5tool_link_info_t));
-
-    if (!opts->follow_links)
-    {
-        trav_info_visit_lnk(path, linfo, tinfo);
-        goto done;
-    }
-
-    switch(linfo->type)
-    {
-    case H5L_TYPE_SOFT:
-        ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links);
-        /* error */
-        if (ret < 0)
-            goto done;
-        /* no dangling link option given and detect dangling link */
-        else if (ret == 0)
-        {
-            tinfo->symlink_visited.dangle_link = TRUE;
-            trav_info_visit_lnk(path, linfo, tinfo);
-            if (opts->no_dangle_links)
-                opts->err_stat = 1; /* make dgangling link is error */
-            goto done;
-        }
-
-        /* check if already visit the target object */        
-        if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path)) 
-            goto done;
-
-        /* add this link as visited link */
-        if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path) < 0) 
-            goto done;
-                
-        if(h5trav_visit(tinfo->fid, path, TRUE, TRUE,
-                     trav_grp_objs,trav_grp_symlinks, tinfo) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            opts->err_stat = 1;
-            goto done;
-        }
-        break;
-    
-    case H5L_TYPE_EXTERNAL:    
-        ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links);
-        /* error */
-        if (ret < 0)
-            goto done;
-        /* no dangling link option given and detect dangling link */
-        else if (ret == 0)
-        {
-            tinfo->symlink_visited.dangle_link = TRUE;
-            trav_info_visit_lnk(path, linfo, tinfo);
-            if (opts->no_dangle_links)
-                opts->err_stat = 1; /* make dgangling link is error */
-            goto done;
-        }
-
-        if(H5Lunpack_elink_val(lnk_info.trg_path, linfo->u.val_size, NULL, &ext_fname, &ext_path) < 0) 
-            goto done;
-
-        /* check if already visit the target object */        
-        if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path)) 
-            goto done;
-
-        /* add this link as visited link */
-        if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path) < 0) 
-            goto done;
-                
-        if(h5trav_visit(tinfo->fid, path, TRUE, TRUE,
-                        trav_grp_objs,trav_grp_symlinks, tinfo) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            opts->err_stat = 1;
-            goto done;
-        }
-        break;
-    default:
-        ;
-        break;
-    } /* end of switch */
-
-done:    
-    if (lnk_info.trg_path)
-        HDfree(lnk_info.trg_path);
-    return 0;
-}    
-
-
-/*-------------------------------------------------------------------------
- * Function: h5diff
- *
- * Purpose: public function, can be called in an application program.
- *   return differences between 2 HDF5 files
- *
- * Return: Number of differences found.
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- *
- * Date: October 22, 2003
- *
- *-------------------------------------------------------------------------
- */
-hsize_t h5diff(const char *fname1,
-               const char *fname2,
-               const char *objname1,
-               const char *objname2,
-               diff_opt_t *options)
-{
-    hid_t        file1_id = (-1);
-    hid_t        file2_id = (-1);
-    char         filenames[2][MAX_FILENAME];
-    hsize_t      nfound = 0;
-    int i;
-    int l_ret;
-    const char * obj1fullname = NULL;
-    const char * obj2fullname = NULL;
-    /* init to group type */
-    h5trav_type_t obj1type = H5TRAV_TYPE_GROUP;
-    h5trav_type_t obj2type = H5TRAV_TYPE_GROUP;
-    /* for single object */
-    H5O_info_t oinfo1, oinfo2; /* object info */
-    trav_info_t  *info1_obj = NULL;
-    trav_info_t  *info2_obj = NULL;
-    /* for group object */
-    trav_info_t  *info1_grp = NULL;
-    trav_info_t  *info2_grp = NULL;
-    /* local pointer */
-    trav_info_t  *info1_lp;
-    trav_info_t  *info2_lp;
-    /* link info from specified object */
-    H5L_info_t src_linfo1;
-    H5L_info_t src_linfo2;
-    /* link info from member object */
-    h5tool_link_info_t trg_linfo1;
-    h5tool_link_info_t trg_linfo2;
-    /* list for common objects */
-    trav_table_t *match_list = NULL;
-
-    /* init filenames */
-    HDmemset(filenames, 0, MAX_FILENAME * 2);
-    /* init link info struct */
-    HDmemset(&trg_linfo1, 0, sizeof(h5tool_link_info_t));
-    HDmemset(&trg_linfo2, 0, sizeof(h5tool_link_info_t));
-
-   /*-------------------------------------------------------------------------
-    * check invalid combination of options
-    *-----------------------------------------------------------------------*/
-    if(!is_valid_options(options))
-        goto out;
-
-    options->cmn_objs = 1; /* eliminate warning */
-
-    /*-------------------------------------------------------------------------
-    * open the files first; if they are not valid, no point in continuing
-    *-------------------------------------------------------------------------
-    */
-
-    /* disable error reporting */
-    H5E_BEGIN_TRY
-    {
-        /* open file 1 */
-        if((file1_id = h5tools_fopen(fname1, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) 
-        {
-            parallel_print("h5diff: <%s>: unable to open file\n", fname1);
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-
-
-        /* open file 2 */
-        if((file2_id = h5tools_fopen(fname2, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) 
-        {
-            parallel_print("h5diff: <%s>: unable to open file\n", fname2);
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-    /* enable error reporting */
-    } H5E_END_TRY;
-
-    /*-------------------------------------------------------------------------
-    * Initialize the info structs
-    *-------------------------------------------------------------------------
-    */
-    trav_info_init(fname1, file1_id, &info1_obj);
-    trav_info_init(fname2, file2_id, &info2_obj);
-
-    /* if any object is specified */
-    if (objname1)
-    {
-        /* malloc 2 more for "/" and end-of-line */
-        obj1fullname = (char*)HDcalloc(HDstrlen(objname1) + 2, sizeof(char));
-        obj2fullname = (char*)HDcalloc(HDstrlen(objname2) + 2, sizeof(char));
-
-        /* make the given object1 fullpath, start with "/"  */
-        if (HDstrncmp(objname1, "/", 1))
-        {
-            HDstrcpy(obj1fullname, "/");
-            HDstrcat(obj1fullname, objname1);
-        }
-        else
-            HDstrcpy(obj1fullname, objname1);
-
-        /* make the given object2 fullpath, start with "/" */
-        if (HDstrncmp(objname2, "/", 1))
-        {
-            HDstrcpy(obj2fullname, "/");
-            HDstrcat(obj2fullname, objname2);
-        }
-        else
-            HDstrcpy(obj2fullname, objname2);
-
-        /*----------------------------------------------------------
-         * check if obj1 is root, group, single object or symlink
-         */
-        if(!HDstrcmp(obj1fullname, "/"))
-        {
-            obj1type = H5TRAV_TYPE_GROUP;
-        }
-        else
-        {
-            /* check if link itself exist */
-            if(H5Lexists(file1_id, obj1fullname, H5P_DEFAULT) <= 0) 
-            {
-                parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1);
-                options->err_stat = 1;
-                goto out;
-            }
-            /* get info from link */
-            if(H5Lget_info(file1_id, obj1fullname, &src_linfo1, H5P_DEFAULT) < 0) 
-            {
-                parallel_print("Unable to get link info from <%s>\n", obj1fullname);
-                goto out;
-            }
-
-            info1_lp = info1_obj;
-
-            /* 
-             * check the type of specified path for hard and symbolic links
-             */
-            if(src_linfo1.type == H5L_TYPE_HARD)
-            {
-                /* optional data pass */
-                info1_obj->opts = (diff_opt_t*)options;
-
-                if(H5Oget_info_by_name(file1_id, obj1fullname, &oinfo1, H5P_DEFAULT) < 0)
-                {
-                    parallel_print("Error: Could not get file contents\n");
-                    options->err_stat = 1;
-                    goto out;
-                }
-                obj1type = oinfo1.type;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-            else if (src_linfo1.type == H5L_TYPE_SOFT)
-            {
-                obj1type = H5TRAV_TYPE_LINK;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-            else if (src_linfo1.type == H5L_TYPE_EXTERNAL)
-            {
-                obj1type = H5TRAV_TYPE_UDLINK;
-                trav_info_add(info1_obj, obj1fullname, obj1type);
-            }
-        }
-
-        /*----------------------------------------------------------
-         * check if obj2 is root, group, single object or symlink
-         */
-        if(!HDstrcmp(obj2fullname, "/"))
-        {
-            obj2type = H5TRAV_TYPE_GROUP;
-        }
-        else
-        {
-            /* check if link itself exist */
-            if(H5Lexists(file2_id, obj2fullname, H5P_DEFAULT) <= 0) 
-            {
-                parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2);
-                options->err_stat = 1;
-                goto out;
-            }
-            /* get info from link */
-            if(H5Lget_info(file2_id, obj2fullname, &src_linfo2, H5P_DEFAULT) < 0) 
-            {
-                parallel_print("Unable to get link info from <%s>\n", obj2fullname);
-                goto out;
-            }
-
-            info2_lp = info2_obj;
-
-            /* 
-             * check the type of specified path for hard and symbolic links
-             */
-            if(src_linfo2.type == H5L_TYPE_HARD)
-            {
-                /* optional data pass */
-                info2_obj->opts = (diff_opt_t*)options;
-
-                if(H5Oget_info_by_name(file2_id, obj2fullname, &oinfo2, H5P_DEFAULT) < 0)
-                {
-                    parallel_print("Error: Could not get file contents\n");
-                    options->err_stat = 1;
-                    goto out;
-                }
-                obj2type = oinfo2.type;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-            else if (src_linfo2.type == H5L_TYPE_SOFT)
-            {
-                obj2type = H5TRAV_TYPE_LINK;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-            else if (src_linfo2.type == H5L_TYPE_EXTERNAL)
-            {
-                obj2type = H5TRAV_TYPE_UDLINK;
-                trav_info_add(info2_obj, obj2fullname, obj2type);
-            }
-        }           
-    }
-    /* if no object specified */
-    else
-    {
-        /* set root group */
-        obj1fullname = (char*)HDcalloc(2, sizeof(char));
-        HDstrcat(obj1fullname, "/");
-        obj2fullname = (char*)HDcalloc(2, sizeof(char));
-        HDstrcat(obj2fullname, "/");
-    }
-
-   /* 
-    * If verbose options is used, need to traverse thorugh the list of objects 
-    * in the group to print out objects information.
-    * Use h5tools_is_obj_same() to improve performance by skipping 
-    * comparing details of same objects. 
-    */
-    if(!(options->m_verbose || options->m_report))
-    {
-        if (h5tools_is_obj_same(file1_id,obj1fullname,file2_id,obj2fullname)!=0)
-            goto out;
-    }
-
-    /*---------------------------------------------
-     * check for following symlinks 
-     */
-    if (options->follow_links)
-    {
-        /* pass how to handle printing warning to linkinfo option */
-        if(print_warn(options))
-            trg_linfo1.opt.msg_mode = trg_linfo2.opt.msg_mode = 1;
-
-        /*-------------------------------
-         * check symbolic link (object1)
-         */
-        l_ret = H5tools_get_symlink_info(file1_id, obj1fullname, &trg_linfo1, TRUE);
-        /* dangling link */
-        if (l_ret == 0)
-        {
-            if (options->no_dangle_links)
-            {
-                /* gangling link is error */
-                if(options->m_verbose)
-                    parallel_print("Warning: <%s> is a dangling link.\n", obj1fullname);
-                options->err_stat = 1;
-                goto out;
-            }
-            else
-            {
-                if(options->m_verbose)
-                    parallel_print("obj1 <%s> is a dangling link.\n", obj1fullname);
-                nfound++;
-                print_found(nfound);
-                goto out;
-            }
-        }
-        else if(l_ret < 0) /* fail */
-        {
-            parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1);
-            options->err_stat = 1;
-            goto out;
-        }
-        else if(l_ret != 2) /* symbolic link */
-            obj1type = trg_linfo1.trg_type;
-
-        /*-------------------------------
-         * check symbolic link (object2)
-         */
-        l_ret = H5tools_get_symlink_info(file2_id, obj2fullname, &trg_linfo2, TRUE);
-        /* dangling link */
-        if (l_ret == 0)
-        {
-            if (options->no_dangle_links)
-            {
-                /* gangling link is error */
-                if(options->m_verbose)
-                    parallel_print("Warning: <%s> is a dangling link.\n", obj2fullname);
-                options->err_stat = 1;
-                goto out;
-            }
-            else
-            {
-                if(options->m_verbose)
-                    parallel_print("obj2 <%s> is a dangling link.\n", obj2fullname);
-                nfound++;
-                print_found(nfound);
-                goto out;
-            }
-        }
-        else if(l_ret < 0) /* fail */ 
-        {
-            parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2);
-            options->err_stat = 1;
-            goto out;
-        }
-        else if(l_ret != 2)  /* symbolic link */
-            obj2type = trg_linfo2.trg_type;
-    } /* end of if follow symlinks */
-
-
-    /* if both obj1 and obj2 are group */
-    if (obj1type == H5TRAV_TYPE_GROUP && obj2type == H5TRAV_TYPE_GROUP)
-    {
-
-        /* 
-         * traverse group1 
-         */
-        trav_info_init(fname1, file1_id, &info1_grp);
-        /* optional data pass */
-        info1_grp->opts = (diff_opt_t*)options;
-
-        if(h5trav_visit(file1_id,obj1fullname,TRUE,TRUE,
-                        trav_grp_objs,trav_grp_symlinks, info1_grp) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            options->err_stat = 1;
-            goto out;
-        }
-        info1_lp = info1_grp;
-
-        /* 
-         * traverse group2 
-         */
-        trav_info_init(fname2, file2_id, &info2_grp);
-        /* optional data pass */
-        info2_grp->opts = (diff_opt_t*)options;
-
-        if(h5trav_visit(file2_id,obj2fullname,TRUE,TRUE,
-                        trav_grp_objs,trav_grp_symlinks, info2_grp) < 0)
-        {
-            parallel_print("Error: Could not get file contents\n");
-            options->err_stat = 1;
-            goto out;
-        } /* end if */
-        info2_lp = info2_grp;
-
-
-#ifdef H5_HAVE_PARALLEL
-        if(g_Parallel)
-        {
-            if((HDstrlen(fname1) > MAX_FILENAME) || 
-               (HDstrlen(fname2) > MAX_FILENAME))
-            {
-                fprintf(stderr, "The parallel diff only supports path names up to %d characters\n", MAX_FILENAME);
-                MPI_Abort(MPI_COMM_WORLD, 0);
-            } /* end if */
-
-            HDstrcpy(filenames[0], fname1);
-            HDstrcpy(filenames[1], fname2);
-
-            /* Alert the worker tasks that there's going to be work. */
-            for(i = 1; i < g_nTasks; i++)
-                MPI_Send(filenames, (MAX_FILENAME * 2), MPI_CHAR, i, MPI_TAG_PARALLEL, MPI_COMM_WORLD);
-        } /* end if */
-#endif
-        build_match_list (obj1fullname, info1_lp, obj2fullname, info2_lp, 
-                         &match_list, options);
-        nfound = diff_match(file1_id, obj1fullname, info1_lp, 
-                            file2_id, obj2fullname, info2_lp, 
-                            match_list, options); 
-    }
-    else
-    {
-#ifdef H5_HAVE_PARALLEL
-        if(g_Parallel)
-            /* Only single object diff, parallel workers won't be needed */
-            phdiff_dismiss_workers();
-#endif
-
-        nfound = diff_compare(file1_id, fname1, obj1fullname, info1_lp,
-                              file2_id, fname2, obj2fullname, info2_lp,
-                              options);
-    }
-
-out:
-#ifdef H5_HAVE_PARALLEL
-    if(g_Parallel)
-        /* All done at this point, let tasks know that they won't be needed */
-        phdiff_dismiss_workers();
-#endif
-    /* free buffers in trav_info structures */
-    if (info1_obj)
-        trav_info_free(info1_obj);
-    if (info2_obj)
-        trav_info_free(info2_obj);
-
-    if (info1_grp)
-        trav_info_free(info1_grp);
-    if (info2_grp)
-        trav_info_free(info2_grp);
-
-    /* free buffers */
-    if (obj1fullname)
-        HDfree(obj1fullname);
-    if (obj2fullname)
-        HDfree(obj2fullname);
-
-    /* free link info buffer */
-    if (trg_linfo1.trg_path)
-        HDfree(trg_linfo1.trg_path);
-    if (trg_linfo2.trg_path)
-        HDfree(trg_linfo2.trg_path);
-
-    /* close */
-    H5E_BEGIN_TRY
-    {
-        H5Fclose(file1_id);
-        H5Fclose(file2_id);
-    } H5E_END_TRY;
-
-    return nfound;
-}
-
-
-
-/*-------------------------------------------------------------------------
- * Function: diff_match
- *
- * Purpose: 
- *  Compare common objects in given groups according to table structure. 
- *  The table structure has flags which can be used to find common objects 
- *  and will be compared. 
- *  Common object means same name (absolute path) objects in both location.
- *
- * Return: Number of differences found
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- *
- * Date: May 9, 2003
- *
- * Modifications: Jan 2005 Leon Arber, larber@uiuc.edu
- *    Added support for parallel diffing
- *
- * Pedro Vicente, pvn@hdfgroup.org, Nov 4, 2008
- *    Compare the graph and make h5diff return 1 for difference if
- * 1) the number of objects in file1 is not the same as in file2
- * 2) the graph does not match, i.e same names (absolute path)
- * 3) objects with the same name are not of the same type
- *-------------------------------------------------------------------------
- */
-hsize_t diff_match(hid_t file1_id, const char *grp1, trav_info_t *info1,
-                   hid_t file2_id, const char *grp2, trav_info_t *info2,
-                   trav_table_t *table, diff_opt_t *options)
-{
-    hsize_t      nfound = 0;
-    unsigned     i;
-
-    char * grp1_path = "";
-    char * grp2_path = "";
-    char * obj1_fullpath = NULL;
-    char * obj2_fullpath = NULL;
-    h5trav_type_t objtype;
-    diff_args_t argdata;
-
-
-    /* 
-     * if not root, prepare object name to be pre-appended to group path to
-     * make full path
-     */
-    if (HDstrcmp (grp1, "/"))
-        grp1_path = grp1;
-    if (HDstrcmp (grp2, "/"))
-        grp2_path = grp2;
-
-    /*-------------------------------------------------------------------------
-    * regarding the return value of h5diff (0, no difference in files, 1 difference )
-    * 1) the number of objects in file1 must be the same as in file2
-    * 2) the graph must match, i.e same names (absolute path)
-    * 3) objects with the same name must be of the same type
-    *-------------------------------------------------------------------------
-    */     
-       
-    /* not valid compare nused when --exclude-path option is used */
-    if (!options->exclude_path)
-    {
-        /* number of different objects */
-        if ( info1->nused != info2->nused )
-        {
-            options->contents = 0;
-        }
-    }
-    
-    /* objects in one file and not the other */
-    for( i = 0; i < table->nobjs; i++)
-    {
-        if( table->objs[i].flags[0] != table->objs[i].flags[1] )
-        {
-            options->contents = 0;
-            break;
-        }
-    }
-
-    /* objects with the same name but different HDF5 types */
-    for( i = 0; i < table->nobjs; i++) 
-    {
-        if ( table->objs[i].flags[0] && table->objs[i].flags[1] )
-        {
-            if ( table->objs[i].type != table->objs[i].type )
-            {
-                options->contents = 0;
-            }
-        }
-    }
-
-    /*-------------------------------------------------------------------------
-    * do the diff for common objects
-    *-------------------------------------------------------------------------
-    */
-#ifdef H5_HAVE_PARALLEL
-    {
-    char *workerTasks = (char*)HDmalloc((g_nTasks - 1) * sizeof(char));
-    int n;
-    int busyTasks = 0;
-    struct diffs_found nFoundbyWorker;
-    struct diff_mpi_args args;
-    int havePrintToken = 1;
-    MPI_Status Status;
-
-    /*set all tasks as free */
-    HDmemset(workerTasks, 1, (g_nTasks - 1));
-#endif
-
-    for(i = 0; i < table->nobjs; i++)
-    {
-        if( table->objs[i].flags[0] && table->objs[i].flags[1])
-        {
-            objtype = table->objs[i].type;
-            /* make full path for obj1 */
-            obj1_fullpath = (char*)HDcalloc (strlen(grp1_path) + strlen (table->objs[i].name) + 1, sizeof (char));
-            HDstrcpy(obj1_fullpath, grp1_path);
-            HDstrcat(obj1_fullpath, table->objs[i].name);
-
-            /* make full path for obj2 */
-            obj2_fullpath = (char*)HDcalloc (strlen(grp2_path) + strlen (table->objs[i].name) + 1, sizeof (char));
-            HDstrcpy(obj2_fullpath, grp2_path);
-            HDstrcat(obj2_fullpath, table->objs[i].name);
-
-            /* Set argdata to pass other args into diff() */
-            argdata.type = objtype;
-            argdata.is_same_trgobj = table->objs[i].is_same_trgobj;
-
-            options->cmn_objs = 1;
-            if(!g_Parallel)
-            {
-                nfound += diff(file1_id, obj1_fullpath,
-                               file2_id, obj2_fullpath, 
-                               options, &argdata);
-            } /* end if */
-#ifdef H5_HAVE_PARALLEL
-            else
-            {
-                int workerFound = 0;
-
-                h5diffdebug("beginning of big else block\n");
-                /* We're in parallel mode */
-                /* Since the data type of diff value is hsize_t which can
-                * be arbitary large such that there is no MPI type that
-                * matches it, the value is passed between processes as
-                * an array of bytes in order to be portable.  But this
-                * may not work in non-homogeneous MPI environments.
-                */
-
-                /*Set up args to pass to worker task. */
-                if(HDstrlen(obj1_fullpath) > 255 || 
-                   HDstrlen(obj2_fullpath) > 255)
-                {
-                    printf("The parallel diff only supports object names up to 255 characters\n");
-                    MPI_Abort(MPI_COMM_WORLD, 0);
-                } /* end if */
-
-                /* set args struct to pass */
-                HDstrcpy(args.name1, obj1_fullpath);
-                HDstrcpy(args.name2, obj2_fullpath);
-                args.options = *options;
-                args.argdata.type = objtype;
-                args.argdata.is_same_trgobj = table->objs[i].is_same_trgobj;
-
-                h5diffdebug2("busyTasks=%d\n", busyTasks);
-                /* if there are any outstanding print requests, let's handle one. */
-                if(busyTasks > 0)
-                {
-                    int incomingMessage;
-
-                    /* check if any tasks freed up, and didn't need to print. */
-                    MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                    /* first block*/
-                    if(incomingMessage)
-                    {
-                        workerTasks[Status.MPI_SOURCE - 1] = 1;
-                        MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                        nfound += nFoundbyWorker.nfound;
-                        options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                        busyTasks--;
-                    } /* end if */
-
-                    /* check to see if the print token was returned. */
-                    if(!havePrintToken)
-                    {
-                        /* If we don't have the token, someone is probably sending us output */
-                        print_incoming_data();
-
-                        /* check incoming queue for token */
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        /* incoming token implies free task. */
-                        if(incomingMessage) {
-                            workerTasks[Status.MPI_SOURCE - 1] = 1;
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            busyTasks--;
-                            havePrintToken = 1;
-                        } /* end if */
-                    } /* end if */
-
-                    /* check to see if anyone needs the print token. */
-                    if(havePrintToken)
-                    {
-                        /* check incoming queue for print token requests */
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &incomingMessage, &Status);
-                        if(incomingMessage)
-                        {
-                            MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                            MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-                            havePrintToken = 0;
-                        } /* end if */
-                    } /* end if */
-                } /* end if */
-
-                /* check array of tasks to see which ones are free.
-                * Manager task never does work, so freeTasks[0] is really
-                * worker task 0. */
-                for(n = 1; (n < g_nTasks) && !workerFound; n++)
-                {
-                    if(workerTasks[n-1])
-                    {
-                        /* send file id's and names to first free worker */
-                        MPI_Send(&args, sizeof(args), MPI_BYTE, n, MPI_TAG_ARGS, MPI_COMM_WORLD);
-
-                        /* increment counter for total number of prints. */
-                        busyTasks++;
-
-                        /* mark worker as busy */
-                        workerTasks[n - 1] = 0;
-                        workerFound = 1;
-                    } /* end if */
-                } /* end for */
-
-                h5diffdebug2("workerfound is %d \n", workerFound);
-                if(!workerFound)
-                {
-                    /* if they were all busy, we've got to wait for one free up
-                     *  before we can move on.  If we don't have the token, some
-                     * task is currently printing so we'll wait for that task to
-                     * return it.
-                     */
-
-                    if(!havePrintToken)
-                    {
-                        while(!havePrintToken)
-                        {
-                            int incomingMessage;
-
-                            print_incoming_data();
-                            MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-                            if(incomingMessage)
-                            {
-                                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                                havePrintToken = 1;
-                                nfound += nFoundbyWorker.nfound;
-                                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                                /* send this task the work unit. */
-                                MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                            } /* end if */
-                        } /* end while */
-                    } /* end if */
-                    /* if we do have the token, check for task to free up, or wait for a task to request it */
-                    else
-                    {
-                        /* But first print all the data in our incoming queue */
-                        print_incoming_data();
-                        MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status);
-                        if(Status.MPI_TAG == MPI_TAG_DONE)
-                        {
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                        } /* end if */
-                        else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST)
-                        {
-                            int incomingMessage;
-
-                            MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                            MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-
-                            do
-                            {
-                                MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                                print_incoming_data();
-                            } while(!incomingMessage);
-
-                            MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                            nfound += nFoundbyWorker.nfound;
-                            options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                            MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD);
-                        } /* end else-if */
-                        else
-                        {
-                            printf("ERROR: Invalid tag (%d) received \n", Status.MPI_TAG);
-                            MPI_Abort(MPI_COMM_WORLD, 0);
-                            MPI_Finalize();
-                        } /* end else */
-                    } /* end else */
-                } /* end if */
-            } /* end else */
-#endif /* H5_HAVE_PARALLEL */
-            if (obj1_fullpath)
-                HDfree (obj1_fullpath);
-            if (obj2_fullpath)                
-                HDfree (obj2_fullpath);
-        } /* end if */
-    } /* end for */
-    h5diffdebug("done with for loop\n");
-
-#ifdef H5_HAVE_PARALLEL
-    if(g_Parallel)
-    {
-        /* make sure all tasks are done */
-        while(busyTasks > 0)
-        {
-            MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status);
-            if(Status.MPI_TAG == MPI_TAG_DONE)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-            } /* end if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-                havePrintToken = 1;
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST)
-            {
-                MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status);
-                if(havePrintToken)
-                {
-                    int incomingMessage;
-
-                    MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-
-                    do {
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        print_incoming_data();
-                    } while(!incomingMessage);
-
-                    MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                    nfound += nFoundbyWorker.nfound;
-                    options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                    busyTasks--;
-                } /* end if */
-                /* someone else must have it...wait for them to return it, then give it to the task that just asked for it. */
-                else
-                {
-                    int source = Status.MPI_SOURCE;
-                    int incomingMessage;
-
-                    do
-                    {
-                        MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status);
-
-                        print_incoming_data();
-                    } while(!incomingMessage);
-
-
-                    MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                    nfound += nFoundbyWorker.nfound;
-                    options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                    busyTasks--;
-                    MPI_Send(NULL, 0, MPI_BYTE, source, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD);
-                } /* end else */
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN)
-            {
-                MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status);
-                nfound += nFoundbyWorker.nfound;
-                options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp;
-                busyTasks--;
-                havePrintToken = 1;
-            } /* end else-if */
-            else if(Status.MPI_TAG == MPI_TAG_PRINT_DATA)
-            {
-                char  data[PRINT_DATA_MAX_SIZE + 1];
-                HDmemset(data, 0, PRINT_DATA_MAX_SIZE + 1);
-
-                MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status);
-
-                printf("%s", data);
-            } /* end else-if */
-            else
-            {
-                printf("ph5diff-manager: ERROR!! Invalid tag (%d) received \n", Status.MPI_TAG);
-                MPI_Abort(MPI_COMM_WORLD, 0);
-            } /* end else */
-        } /* end while */
-
-        for(i = 1; i < g_nTasks; i++)
-            MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD);
-
-        /* Print any final data waiting in our queue */
-        print_incoming_data();
-    } /* end if */
-    h5diffdebug("done with if block\n");
-
-    free(workerTasks);
-    }
-#endif /* H5_HAVE_PARALLEL */
-
-    /* free table */
-    if (table)
-        trav_table_free(table);
-
-    return nfound;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: diff_compare
- *
- * Purpose: get objects from list, and check for the same type
- *
- * Return: Number of differences found
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- * Date: May 9, 2003
- *
- * Programmer: Jonathan Kim
- *  - add following links feature (Feb 11,2010)
- *-------------------------------------------------------------------------
- */
-
-hsize_t diff_compare(hid_t file1_id,
-                     const char *file1_name,
-                     const char *obj1_name,
-                     trav_info_t *info1,
-                     hid_t file2_id,
-                     const char *file2_name,
-                     const char *obj2_name,
-                     trav_info_t *info2,
-                     diff_opt_t *options)
-{
-    int     f1 = 0;
-    int     f2 = 0;
-    hsize_t nfound = 0;
-    ssize_t i,j;
-    int l_ret;
-    int is_dangle_link1 = 0;
-    int is_dangle_link2 = 0;
-    const char *obj1name = obj1_name;
-    const char *obj2name = obj2_name;
-    diff_args_t argdata;
-
-    /* local variables for diff() */
-    h5trav_type_t obj1type, obj2type;
-
-    /* to get link info */
-    h5tool_link_info_t linkinfo1;
-    h5tool_link_info_t linkinfo2;
-
-    /* init link info struct */
-    HDmemset(&linkinfo1, 0, sizeof(h5tool_link_info_t));
-    HDmemset(&linkinfo2, 0, sizeof(h5tool_link_info_t));
-
-    i = h5trav_getindex (info1, obj1name);
-    j = h5trav_getindex (info2, obj2name);
-
-    if (i == -1)
-    {
-        parallel_print ("Object <%s> could not be found in <%s>\n", obj1name,
-            file1_name);
-        f1 = 1;
-    }
-    if (j == -1)
-    {
-        parallel_print ("Object <%s> could not be found in <%s>\n", obj2name,
-            file2_name);
-        f2 = 1;
-    }
-    if (f1 || f2)
-    {
-        options->err_stat = 1;
-        return 0;
-    }
-    /* use the name with "/" first, as obtained by iterator function */
-    obj1name = info1->paths[i].path;
-    obj2name = info2->paths[j].path;
-
-    obj1type = info1->paths[i].type;
-    obj2type = info2->paths[j].type;
-
-    /*-----------------------------------------------------------------
-     * follow link option, compare with target object 
-    */
-    if (options->follow_links)
-    {
-        /* pass how to handle printing warning to linkinfo option */
-        if(print_warn(options))
-            linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1;
-
-        /*------------------------------------------------------------
-         * Soft links
-         *------------------------------------------------------------*/
-
-        /*--------------------------
-         * if object1 soft link   */
-        if (obj1type == H5TRAV_TYPE_LINK)
-        {
-            /* get type of target object */
-            l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj1name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link1 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* target type for diff() */
-                obj1type = linkinfo1.trg_type;
-            }
-        }
-        
-        /*-----------------------------
-         * if object2 is soft link   */
-        if (obj2type == H5TRAV_TYPE_LINK)
-        {
-            /* get type target object */
-            l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj2name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link2=1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* target type for diff() */
-                obj2type = linkinfo2.trg_type;
-            }
-        }
-
-        /*------------------------------------------------------------
-         * External links
-         *------------------------------------------------------------*/
-
-        /*--------------------------------
-         * if object1 is external link  */
-        if (obj1type == H5TRAV_TYPE_UDLINK)
-        {
-            /* get type and name of target object */
-            l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj1name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link1 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* for external link */
-                if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL)
-                    obj1type = linkinfo1.trg_type;
-            }
-        }
-
-        /*--------------------------------
-         * if object2 is external link  */
-        if (obj2type == H5TRAV_TYPE_UDLINK)
-        {
-            /* get type and name of target object */
-            l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE);
-            /* dangling link */
-            if (l_ret == 0)
-            {
-                if (options->no_dangle_links)
-                {
-                    /* gangling link is error */
-                    if(options->m_verbose)
-                        parallel_print("Warning: <%s> is a dangling link.\n", obj2name);
-                    options->err_stat = 1;
-                    goto out;
-                }
-                else
-                    is_dangle_link2 = 1;
-            }
-            /* fail */
-            else if(l_ret < 0)
-            {
-                options->err_stat = 1;
-                goto out;
-            }
-            else /* OK */
-            {
-                /* for external link */
-                if(linkinfo2.linfo.type == H5L_TYPE_EXTERNAL)
-                    obj2type = linkinfo2.trg_type;
-            }
-        }
-        /* found dangling link */
-        if (is_dangle_link1 || is_dangle_link2)
-            goto out;
-    } /* end of follow_links */
-    
-    /* objects are not the same type */
-    if (obj1type != obj2type)
-    {
-        if (options->m_verbose||options->m_list_not_cmp)
-        {
-            parallel_print("<%s> is of type %s and <%s> is of type %s\n",
-            obj1name, get_type(obj1type), 
-            obj2name, get_type(obj2type));
-        }
-        options->not_cmp=1;
-        goto out;
-    }
-
-    /* Set argdata to pass other args into diff() */
-    argdata.type = obj1type;
-    argdata.is_same_trgobj = 0;
-
-    nfound = diff(file1_id, obj1name,
-                  file2_id, obj2name,
-                  options, &argdata);
-
-out:
-    /*-------------------------------
-     * handle dangling link(s) */
-    /* both obj1 and obj2 are dangling links */
-    if(is_dangle_link1 && is_dangle_link2)
-    {
-        if(print_objname(options, nfound))
-        {
-            do_print_objname("dangling link", obj1name, obj2name, options);
-            print_found(nfound);
-        }
-    }
-    /* obj1 is dangling link */
-    else if (is_dangle_link1)
-    {
-        if(options->m_verbose)
-           parallel_print("obj1 <%s> is a dangling link.\n", obj1name);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-    /* obj2 is dangling link */
-    else if (is_dangle_link2)
-    {
-        if(options->m_verbose)
-            parallel_print("obj2 <%s> is a dangling link.\n", obj2name);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    return nfound;
-}
-
-
-/*-------------------------------------------------------------------------
- * Function: diff
- *
- * Purpose: switch between types and choose the diff function
- * TYPE is either
- *  H5G_GROUP         Object is a group
- *  H5G_DATASET       Object is a dataset
- *  H5G_TYPE          Object is a named data type
- *  H5G_LINK          Object is a symbolic link
- *
- * Return: Number of differences found
- *
- * Programmer: Jonathan Kim
- *  - add following links feature (Feb 11,2010)
- *  - Change to use diff_args_t to pass the rest of args.
- *    Passing through it instead of individual args provides smoother
- *    extensibility through its members along with MPI code update for ph5diff
- *    as it doesn't require interface change.
- *    (May 6,2011)
- *
- * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu
- * Date: May 9, 2003
- *-------------------------------------------------------------------------
- */
-
-hsize_t diff(hid_t file1_id,
-              const char *path1,
-              hid_t file2_id,
-              const char *path2,
-              diff_opt_t * options,
-              diff_args_t *argdata)
-{
-    hid_t   type1_id = (-1);
-    hid_t   type2_id = (-1);
-    hid_t   grp1_id = (-1);
-    hid_t   grp2_id = (-1);
-    int     ret;
-    int     is_dangle_link1 = 0;
-    int     is_dangle_link2 = 0;
-    int     is_hard_link = 0;
-    hsize_t nfound = 0;
-
-
-    /* to get link info */
-    h5tool_link_info_t linkinfo1;
-    h5tool_link_info_t linkinfo2;
-
-    /*init link info struct */
-    HDmemset(&linkinfo1,0,sizeof(h5tool_link_info_t));
-    HDmemset(&linkinfo2,0,sizeof(h5tool_link_info_t));
-
-    /* pass how to handle printing warnings to linkinfo option */
-    if(print_warn(options))
-        linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1;
-
-    /* 
-     * Get target object info for obj1 and obj2 and check dangling links.
-     * (for hard-linked-objects, because diff() only get the obj1's type, 
-     *  so obj2's type should be check here when diff() is called from 
-     *  diff_match() for same-named objects with dangling link only one side.)
-     */
-
-    /* target object1 - get type and name */
-    ret = H5tools_get_symlink_info(file1_id, path1, &linkinfo1, TRUE);
-    /* dangling link */
-    if (ret == 0)
-    {
-        if (options->no_dangle_links)
-        {
-            /* gangling link is error */
-            if(options->m_verbose)
-                parallel_print("Warning: <%s> is a dangling link.\n", path1);
-            goto out;
-        }
-        else
-            is_dangle_link1 = 1;
-    }
-    else if (ret < 0)
-        goto out;
-
-    /* target object2 - get type and name */
-    ret = H5tools_get_symlink_info(file2_id, path2, &linkinfo2, TRUE);
-    /* dangling link */
-    if (ret == 0)
-    {
-        if (options->no_dangle_links)
-        {
-            /* gangling link is error */
-            if(options->m_verbose)
-                parallel_print("Warning: <%s> is a dangling link.\n", path2);
-            goto out;
-        }
-        else
-            is_dangle_link2 = 1;
-    }
-    else if (ret < 0)
-        goto out;
-                
-    /* found dangling link */
-    if (is_dangle_link1 || is_dangle_link2)
-        goto out2;
-  
-    /* 
-     * If both points to the same target object, skip comparing details inside
-     * of the objects to improve performance.
-     * Always check for the hard links, otherwise if follow symlink option is 
-     * specified.
-     *
-     * Perform this to match the outputs as bypassing.
-     */
-     is_hard_link = (argdata->type == H5TRAV_TYPE_DATASET ||
-                     argdata->type == H5TRAV_TYPE_NAMED_DATATYPE ||
-                     argdata->type == H5TRAV_TYPE_GROUP);
-     if (options->follow_links || is_hard_link)
-     {
-        if (argdata->is_same_trgobj)
-        {
-            /* print information is only verbose option is used */
-            if(options->m_verbose || options->m_report)
-            {
-                switch(argdata->type)
-                {
-                case H5TRAV_TYPE_DATASET:
-                    do_print_objname("dataset", path1, path2, options);
-                    break; 
-                case H5TRAV_TYPE_NAMED_DATATYPE:
-                    do_print_objname("datatype", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_GROUP:
-                    do_print_objname("group", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_LINK:
-                    do_print_objname("link", path1, path2, options);
-                    break;
-                case H5TRAV_TYPE_UDLINK:
-                    if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL)
-                        do_print_objname("external link", path1, path2, options);
-                    else
-                        do_print_objname ("user defined link", path1, path2, options);
-                    break; 
-                default:
-                    parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n",
-                        path1, path2, get_type(argdata->type) );
-                    options->not_cmp = 1;
-                    break;
-                } /* switch(type)*/
-
-                print_found(nfound);
-            } /* if(options->m_verbose || options->m_report) */
-
-            goto out2;
-        }
-    }
-
-    switch(argdata->type)
-    {
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_DATASET
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_DATASET:
-			/* verbose (-v) and report (-r) mode */
-            if(options->m_verbose || options->m_report)
-            {
-                do_print_objname("dataset", path1, path2, options);
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-                print_found(nfound);
-            }
-            /* quiet mode (-q), just count differences */
-            else if(options->m_quiet)
-            {
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-            }
-			/* the rest (-c, none, ...) */
-            else
-            {
-                nfound = diff_dataset(file1_id, file2_id, path1, path2, options);
-                /* print info if difference found  */
-                if (nfound)
-                {
-                    do_print_objname("dataset", path1, path2, options);
-                    print_found(nfound);	
-                }
-            }
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_NAMED_DATATYPE
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_NAMED_DATATYPE:
-            if((type1_id = H5Topen2(file1_id, path1, H5P_DEFAULT)) < 0)
-                goto out;
-            if((type2_id = H5Topen2(file2_id, path2, H5P_DEFAULT)) < 0)
-                goto out;
-
-            if((ret = H5Tequal(type1_id, type2_id)) < 0)
-                goto out;
-
-            /* if H5Tequal is > 0 then the datatypes refer to the same datatype */
-            nfound = (ret > 0) ? 0 : 1;
-
-            if(print_objname(options,nfound))
-                do_print_objname("datatype", path1, path2, options);
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            /*-----------------------------------------------------------------
-             * compare attributes
-             * the if condition refers to cases when the dataset is a 
-             * referenced object
-             *-----------------------------------------------------------------
-             */
-            if(path1)
-                nfound += diff_attr(type1_id, type2_id, path1, path2, options);
-
-            if(H5Tclose(type1_id) < 0)
-                goto out;
-            if(H5Tclose(type2_id) < 0)
-                goto out;
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_GROUP
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_GROUP:
-            if(print_objname(options, nfound))
-                do_print_objname("group", path1, path2, options);
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            if((grp1_id = H5Gopen2(file1_id, path1, H5P_DEFAULT)) < 0)
-                goto out;
-            if((grp2_id = H5Gopen2(file2_id, path2, H5P_DEFAULT)) < 0)
-                goto out;
-
-            /*-----------------------------------------------------------------
-             * compare attributes
-             * the if condition refers to cases when the dataset is a 
-             * referenced object
-             *-----------------------------------------------------------------
-             */
-            if(path1)
-                nfound += diff_attr(grp1_id, grp2_id, path1, path2, options);
-
-            if(H5Gclose(grp1_id) < 0)
-                goto out;
-            if(H5Gclose(grp2_id) < 0)
-                goto out;
-            break;
-
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_LINK
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_LINK:
-            {
-            ret = HDstrcmp(linkinfo1.trg_path, linkinfo2.trg_path);
-
-            /* if the target link name is not same then the links are "different" */
-            nfound = (ret != 0) ? 1 : 0;
-
-            if(print_objname(options, nfound))
-                do_print_objname("link", path1, path2, options);
-
-            if (options->follow_links)
-            {
-                /* objects are not the same type */
-                if (linkinfo1.trg_type != linkinfo2.trg_type)
-                {
-                    if (options->m_verbose||options->m_list_not_cmp)
-                    {
-                        parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type));
-                    }
-                    options->not_cmp=1;
-                    goto out;
-                }
-
-                /* Renew type in argdata to pass into diff(). 
-                 * For recursive call, argdata.is_same_trgobj is already
-                 * set from initial call, so don't reset here */
-                argdata->type = linkinfo1.trg_type;
-
-                /* call self to compare target object */
-                nfound += diff(file1_id, path1, 
-                               file2_id, path2, 
-                               options, argdata);
-            }
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-
-            }
-            break;
-
-       /*----------------------------------------------------------------------
-        * H5TRAV_TYPE_UDLINK
-        *----------------------------------------------------------------------
-        */
-        case H5TRAV_TYPE_UDLINK:
-            {
-            /* Only external links will have a query function registered */
-            if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) 
-            {
-                /* If the buffers are the same size, compare them */
-                if(linkinfo1.linfo.u.val_size == linkinfo2.linfo.u.val_size) 
-                {
-                    ret = HDmemcmp(linkinfo1.trg_path, linkinfo2.trg_path, linkinfo1.linfo.u.val_size);
-                }
-                else
-                    ret = 1;
-
-                /* if "linkinfo1.trg_path" != "linkinfo2.trg_path" then the links
-                 * are "different" extlinkinfo#.path is combination string of 
-                 * file_name and obj_name
-                 */
-                nfound = (ret != 0) ? 1 : 0;
-
-                if(print_objname(options, nfound))
-                    do_print_objname("external link", path1, path2, options);
-
-                if (options->follow_links)
-                {
-                    /* objects are not the same type */
-                    if (linkinfo1.trg_type != linkinfo2.trg_type)
-                    {
-                        if (options->m_verbose||options->m_list_not_cmp)
-                        {
-                            parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type));
-                        }
-                        options->not_cmp=1;
-                        goto out;
-                    }
-
-                    /* Renew type in argdata to pass into diff(). 
-                     * For recursive call, argdata.is_same_trgobj is already
-                     * set from initial call, so don't reset here */
-                    argdata->type = linkinfo1.trg_type;
-
-                    nfound = diff(file1_id, path1,  
-                                  file2_id, path2, 
-                                  options, argdata);
-                } 
-            } /* end if */
-            else 
-            {
-                /* If one or both of these links isn't an external link, we can only
-                 * compare information from H5Lget_info since we don't have a query
-                 * function registered for them.
-                 *
-                 * If the link classes or the buffer length are not the
-                 * same, the links are "different"
-                 */
-                if((linkinfo1.linfo.type != linkinfo2.linfo.type) || 
-                   (linkinfo1.linfo.u.val_size != linkinfo2.linfo.u.val_size))
-                    nfound = 1;
-                else
-                    nfound = 0;
-
-                if (print_objname (options, nfound))
-                    do_print_objname ("user defined link", path1, path2, options);
-            } /* end else */
-
-            /* always print the number of differences found in verbose mode */
-            if(options->m_verbose)
-                print_found(nfound);
-            }
-            break;
-
-        default:
-            if(options->m_verbose)
-                parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n",
-                    path1, path2, get_type(argdata->type) );
-            options->not_cmp = 1;
-            break;
-     }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    return nfound;
-
-out:
-    options->err_stat = 1;
-
-out2:
-    /*-----------------------------------
-     * handle dangling link(s) 
-     */
-    /* both path1 and path2 are dangling links */
-    if(is_dangle_link1 && is_dangle_link2)
-    {
-        if(print_objname(options, nfound))
-        {
-            do_print_objname("dangling link", path1, path2, options);
-            print_found(nfound);
-        }
-    }
-    /* path1 is dangling link */
-    else if (is_dangle_link1)
-    {
-        if(options->m_verbose)
-           parallel_print("obj1 <%s> is a dangling link.\n", path1);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-    /* path2 is dangling link */
-    else if (is_dangle_link2)
-    {
-        if(options->m_verbose)
-            parallel_print("obj2 <%s> is a dangling link.\n", path2);
-        nfound++;
-        if(print_objname(options, nfound))
-            print_found(nfound);
-    }
-
-    /* free link info buffer */
-    if (linkinfo1.trg_path)
-        HDfree(linkinfo1.trg_path);
-    if (linkinfo2.trg_path)
-        HDfree(linkinfo2.trg_path);
-
-    /* close */
-    /* disable error reporting */
-    H5E_BEGIN_TRY {
-        H5Tclose(type1_id);
-        H5Tclose(type2_id);
-        H5Gclose(grp1_id);
-        H5Tclose(grp2_id);
-        /* enable error reporting */
-    } H5E_END_TRY;
-
-    return nfound;
-}
-
diff --git a/exsrc/src/o.txt b/exsrc/src/o.txt
deleted file mode 100644
index 47eb655e1..000000000
--- a/exsrc/src/o.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-o
-yes
-
diff --git a/exsrc/src/pbmplus/Makefile.in b/exsrc/src/pbmplus/Makefile.in
deleted file mode 100644
index da35176c8..000000000
--- a/exsrc/src/pbmplus/Makefile.in
+++ /dev/null
@@ -1,134 +0,0 @@
-# Makefile for pbmplus tools.
-#
-# Copyright (C) 1989, 1991 by Jef Poskanzer.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted, provided
-# that the above copyright notice appear in all copies and that both that
-# copyright notice and this permission notice appear in supporting
-# documentation.  This software is provided "as is" without express or
-# implied warranty.
-
-# CONFIGURE: gcc makes things go faster on some machines, but not everyone
-# has it.  Warning: do not use gcc's -finline-functions or -fstrength-reduce
-# flags, they can produce incorrect code.  (This is with gcc versions 1.35,
-# 1.36, and 1.37, later versions may fix these bugs.)  Also, on some systems
-# gcc can't compile pnmconvol - dunno why.  And on some systems you can't
-# use the -ansi flag, it gives compilation errors in <math.h>.
-CC =		cc
-#CC =		gcc
-#CC =		gcc -fcombine-regs -fpcc-struct-return
-#CC =		gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return
-
-# CONFIGURE: cc flags go here.
-CFLAGS =	-O -w
-#CFLAGS =	-g
-#CFLAGS =	-g -O
-
-# CONFIGURE: ld flags go here.  Eunice users may want to use -noshare so that
-# the binaries can run standalone.
-LDFLAGS =	-s
-#LDFLAGS =	
-#LDFLAGS =	-noshare
-
-# CONFIGURE: If you have an X11-style rgb color names file, define its
-# path here.  This is used by PPM to parse color names into rgb values.
-# If you don't have such a file, comment this out and use the alternative
-# hex and decimal forms to specify colors (see ppm/pgmtoppm.1 for details).
-RGBDEF =	-DRGB_DB=\"/usr/lib/X11/rgb\"
-
-# CONFIGURE: PBMPLUS's support for TIFF files depends on the library from
-# Sam Leffler's TIFF Software package - see the OTHER.SYSTEMS file for a
-# full description and access information.  To configure PBMPLUS to use the
-# library: first, if necessary, fetch the TIFF Software, unpack it in a
-# scratch directory somewhere, and move the libtiff subdirectory right here
-# into the PBMPLUS top-level directory.  Configure and "make" in the
-# libtiff directory.  Yes, you do have to do the TIFF make by hand, the
-# general PBMPLUS make will *not* make libtiff.  Finally, uncomment the
-# following five definitions.
-#
-# Libtiff is pretty good about portability, but there are some machines
-# it has problems on.  If you run into problems, you may wish to contact
-# Sam directly, at the address listed in the OTHER.SYSTEMS file.
-#
-# By the way, you must have at least version 2.4 of libtiff.  Earlier
-# versions will not work.
-TIFFDEF =	-DLIBTIFF
-TIFFINC =	-I@EXTERNALS@/include
-TIFFLIB =	@EXTERNALS@/lib/libtiff.a
-#TIFFBINARIES =	tifftopnm pnmtotiff
-#TIFFOBJECTS =	tifftopnm.o pnmtotiff.o
-
-# CONFIGURE: Define the directory that you want the binaries copied to.
-# If you need scripts and binaries to be in different directories, you
-# can set that up too.
-INSTALLBINARIES =	@EXTERNALS@/bin
-INSTALLSCRIPTS =	$(INSTALLBINARIES)
-
-# CONFIGURE: Define the directories that you want the manual sources copied to,
-# plus the suffix you want them to have.
-INSTALLMANUALS1 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS1 =	n
-INSTALLMANUALS3 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS3 =	n
-INSTALLMANUALS5 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS5 =	n
-
-# CONFIGURE: Normally the man pages are installed using "cp".  By changing
-# this define you can use something else, for example a script that calls
-# compress or pack.
-MANCP =			cp
-
-# CONFIGURE: Normally the Makefiles build and install separate binaries for
-# each program.  However, on some systems (especially those without shared
-# libraries) this can mean a lot of space.  In this case you might try
-# building a "merge" instead.  The idea here is to link all the binaries
-# together into one huge executable, with a tiny dispatch program as the
-# main.  Then the merged binary is installed with file-system links for
-# each program it includes.  The dispatch routine can tell which program
-# to run by looking at argv[0].  On a Sun3 under SunOS 3.5 the space for
-# executables went from 2.9 meg to .36 meg.
-#
-# Note that if you make a "merge", the executables don't get created
-# until you do the install.
-all:		binaries
-install:	install.bin install.man
-#all:		merge
-#install:	install.merge install.man
-
-# End of configurable definitions.
-
-SHELL =		/bin/sh
-MAKE =		make
-SUBDIRS =	pbm pgm ppm pnm
-
-binaries:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' binaries ); \
-	done
-
-merge:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' merge ); \
-	done
-
-install.bin:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.bin ); \
-	done
-
-install.merge:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.merge ); \
-	done
-
-install.man:
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'TIFFBINARIES=$(TIFFBINARIES)' 'INSTALLMANUALS1=$(INSTALLMANUALS1)' 'SUFFIXMANUALS1=$(SUFFIXMANUALS1)' 'INSTALLMANUALS3=$(INSTALLMANUALS3)' 'SUFFIXMANUALS3=$(SUFFIXMANUALS3)' 'INSTALLMANUALS5=$(INSTALLMANUALS5)' 'SUFFIXMANUALS5=$(SUFFIXMANUALS5)' 'MANCP=$(MANCP)' install.man ); \
-	done
-
-clean:
-	-rm -f *.shar *.shar? art.*
-	for i in $(SUBDIRS) ; do \
-	    ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) clean ); \
-	done
diff --git a/exsrc/src/pbmplus/libpbm1.c b/exsrc/src/pbmplus/libpbm1.c
deleted file mode 100644
index 00f3e31b2..000000000
--- a/exsrc/src/pbmplus/libpbm1.c
+++ /dev/null
@@ -1,674 +0,0 @@
-/* libpbm1.c - pbm utility library part 1
-**
-** Copyright (C) 1988 by Jef Poskanzer.
-**
-** Permission to use, copy, modify, and distribute this software and its
-** documentation for any purpose and without fee is hereby granted, provided
-** that the above copyright notice appear in all copies and that both that
-** copyright notice and this permission notice appear in supporting
-** documentation.  This software is provided "as is" without express or
-** implied warranty.
-*/
-
-#include "pbm.h"
-#include "version.h"
-#include "libpbm.h"
-#if __STDC__
-#include <stdarg.h>
-#else /*__STDC__*/
-#include <varargs.h>
-#endif /*__STDC__*/
-
-
-/* Forward routines. */
-
-#if defined(NEED_VFPRINTF1) || defined(NEED_VFPRINTF2)
-int vfprintf ARGS(( FILE* stream, char* format, va_list args ));
-#endif /*NEED_VFPRINTF*/
-
-
-/* Variable-sized arrays. */
-
-char*
-pm_allocrow( cols, size )
-    int cols;
-    int size;
-    {
-    register char* itrow;
-
-    itrow = (char*) malloc( cols * size );
-    if ( itrow == (char*) 0 )
-	pm_error( "out of memory allocating a row" );
-    return itrow;
-    }
-
-void
-pm_freerow( itrow )
-    char* itrow;
-    {
-    free( itrow );
-    }
-
-
-char**
-pm_allocarray( cols, rows, size )
-    int cols, rows;
-    int size;
-    {
-    char** its;
-    int i;
-
-    its = (char**) malloc( rows * sizeof(char*) );
-    if ( its == (char**) 0 )
-	pm_error( "out of memory allocating an array" );
-    its[0] = (char*) malloc( rows * cols * size );
-    if ( its[0] == (char*) 0 )
-	pm_error( "out of memory allocating an array" );
-    for ( i = 1; i < rows; ++i )
-	its[i] = &(its[0][i * cols * size]);
-    return its;
-    }
-
-void
-pm_freearray( its, rows )
-    char** its;
-    int rows;
-    {
-    free( its[0] );
-    free( its );
-    }
-
-
-/* Case-insensitive keyword matcher. */
-
-int
-pm_keymatch( str, keyword, minchars )
-    char* str;
-    char* keyword;
-    int minchars;
-    {
-    register int len;
-
-    len = strlen( str );
-    if ( len < minchars )
-	return 0;
-    while ( --len >= 0 )
-	{
-	register char c1, c2;
-
-	c1 = *str++;
-	c2 = *keyword++;
-	if ( c2 == '\0' )
-	    return 0;
-	if ( isupper( c1 ) )
-	    c1 = tolower( c1 );
-	if ( isupper( c2 ) )
-	    c1 = tolower( c2 );
-	if ( c1 != c2 )
-	    return 0;
-	}
-    return 1;
-    }
-
-
-/* Log base two hacks. */
-
-int
-pm_maxvaltobits( maxval )
-    int maxval;
-    {
-    if ( maxval <= 1 )
-	return 1;
-    else if ( maxval <= 3 )
-	return 2;
-    else if ( maxval <= 7 )
-	return 3;
-    else if ( maxval <= 15 )
-	return 4;
-    else if ( maxval <= 31 )
-	return 5;
-    else if ( maxval <= 63 )
-	return 6;
-    else if ( maxval <= 127 )
-	return 7;
-    else if ( maxval <= 255 )
-	return 8;
-    else if ( maxval <= 511 )
-	return 9;
-    else if ( maxval <= 1023 )
-	return 10;
-    else if ( maxval <= 2047 )
-	return 11;
-    else if ( maxval <= 4095 )
-	return 12;
-    else if ( maxval <= 8191 )
-	return 13;
-    else if ( maxval <= 16383 )
-	return 14;
-    else if ( maxval <= 32767 )
-	return 15;
-    else if ( (long) maxval <= 65535L )
-	return 16;
-    else
-	pm_error( "maxval of %d is too large!", maxval );
-    }
-
-int
-pm_bitstomaxval( bits )
-    int bits;
-    {
-    return ( 1 << bits ) - 1;
-    }
-
-
-/* Initialization. */
-
-static char* progname;
-static int showmessages;
-
-void
-pm_init( argcP, argv )
-    int* argcP;
-    char* argv[];
-    {
-    int argn, i;
-
-    /* Extract program name. */
-    progname = rindex( argv[0], '/');
-    if ( progname == NULL )
-	progname = argv[0];
-    else
-	++progname;
-
-    /* Check for any global args. */
-    showmessages = 1;
-    for ( argn = 1; argn < *argcP; ++argn )
-	{
-	if ( pm_keymatch( argv[argn], "-quiet", 6 ) )
-	    {
-	    showmessages = 0;
-	    }
-	else if ( pm_keymatch( argv[argn], "-version", 7 ) )
-	    {
-	    pm_message( "Version of %s", PBMPLUS_VERSION );
-#ifdef BSD
-	    pm_message( "BSD defined" );
-#endif /*BSD*/
-#ifdef SYSV
-	    pm_message( "SYSV defined" );
-#endif /*SYSV*/
-#ifdef MSDOS
-	    pm_message( "MSDOS defined" );
-#endif /*MSDOS*/
-#ifdef PBMPLUS_RAWBITS
-	    pm_message( "PBMPLUS_RAWBITS defined" );
-#endif /*PBMPLUS_RAWBITS*/
-#ifdef PBMPLUS_BROKENPUTC1
-	    pm_message( "PBMPLUS_BROKENPUTC1 defined" );
-#endif /*PBMPLUS_BROKENPUTC1*/
-#ifdef PBMPLUS_BROKENPUTC2
-	    pm_message( "PBMPLUS_BROKENPUTC2 defined" );
-#endif /*PBMPLUS_BROKENPUTC2*/
-#ifdef PGM_BIGGRAYS
-	    pm_message( "PGM_BIGGRAYS defined" );
-#endif /*PGM_BIGGRAYS*/
-#ifdef PPM_PACKCOLORS
-	    pm_message( "PPM_PACKCOLORS defined" );
-#endif /*PPM_PACKCOLORS*/
-#ifdef DEBUG
-	    pm_message( "DEBUG defined" );
-#endif /*DEBUG*/
-#ifdef NEED_VFPRINTF1
-	    pm_message( "NEED_VFPRINTF1 defined" );
-#endif /*NEED_VFPRINTF1*/
-#ifdef NEED_VFPRINTF2
-	    pm_message( "NEED_VFPRINTF2 defined" );
-#endif /*NEED_VFPRINTF2*/
-#ifdef RGB_DB
-	    pm_message( "RGB_DB=\"%s\"", RGB_DB );
-#endif /*RGB_DB*/
-#ifdef LIBTIFF
-	    pm_message( "LIBTIFF defined" );
-#endif /*LIBTIFF*/
-	    exit( 0 );
-	    }
-	else
-	    continue;
-	for ( i = argn + 1; i <= *argcP; ++i )
-	    argv[i - 1] = argv[i];
-	--(*argcP);
-	}
-    }
-
-void
-pbm_init( argcP, argv )
-    int* argcP;
-    char* argv[];
-    {
-    pm_init( argcP, argv );
-    }
-
-
-/* Error handling. */
-
-void
-pm_usage( usage )
-    char* usage;
-    {
-    fprintf( stderr, "usage:  %s %s\n", progname, usage );
-    exit( 1 );
-    }
-
-void
-pm_perror( reason )
-    char* reason;
-    {
-    extern int errno;
-    char* e;
-
-    e = sys_errlist[errno];
-
-    if ( reason != 0 && reason[0] != '\0' )
-	pm_error( "%s - %s", reason, e );
-    else
-	pm_error( "%s", e );
-    }
-
-#if __STDC__
-void
-pm_message( char* format, ... )
-    {
-    va_list args;
-
-    va_start( args, format );
-#else /*__STDC__*/
-/*VARARGS1*/
-void
-pm_message( va_alist )
-    va_dcl
-    { /*}*/
-    va_list args;
-    char* format;
-
-    va_start( args );
-    format = va_arg( args, char* );
-#endif /*__STDC__*/
-
-    if ( showmessages )
-	{
-	fprintf( stderr, "%s: ", progname );
-	(void) vfprintf( stderr, format, args );
-	fputc( '\n', stderr );
-	}
-    va_end( args );
-    }
-
-#if __STDC__
-void
-pm_error( char* format, ... )
-    {
-    va_list args;
-
-    va_start( args, format );
-#else /*__STDC__*/
-/*VARARGS1*/
-void
-pm_error( va_alist )
-    va_dcl
-    { /*}*/
-    va_list args;
-    char* format;
-
-    va_start( args );
-    format = va_arg( args, char* );
-#endif /*__STDC__*/
-
-    fprintf( stderr, "%s: ", progname );
-    (void) vfprintf( stderr, format, args );
-    fputc( '\n', stderr );
-    va_end( args );
-    exit( 1 );
-    }
-
-#ifdef NEED_VFPRINTF1
-
-/* Micro-vfprintf, for systems that don't have vfprintf but do have _doprnt.
-*/
-
-int
-vfprintf( stream, format, args )
-    FILE* stream;
-    char* format;
-    va_list args;
-    {
-    return _doprnt( format, args, stream );
-    }
-#endif /*NEED_VFPRINTF1*/
-
-#ifdef NEED_VFPRINTF2
-
-/* Portable mini-vfprintf, for systems that don't have either vfprintf or
-** _doprnt.  This depends only on fprintf.  If you don't have fprintf,
-** you might consider getting a new stdio library.
-*/
-
-int
-vfprintf( stream, format, args )
-    FILE* stream;
-    char* format;
-    va_list args;
-    {
-    int n;
-    char* ep;
-    char fchar;
-    char tformat[512];
-    int do_long;
-    int i;
-    long l;
-    unsigned u;
-    unsigned long ul;
-    char* s;
-    double d;
-
-    n = 0;
-    while ( *format != '\0' )
-	{
-	if ( *format != '%' )
-	    { /* Not special, just write out the char. */
-	    (void) putc( *format, stream );
-	    ++n;
-	    ++format;
-	    }
-	else
-	    {
-	    do_long = 0;
-	    ep = format + 1;
-
-	    /* Skip over all the field width and precision junk. */
-	    if ( *ep == '-' )
-		++ep;
-	    if ( *ep == '0' )
-		++ep;
-	    while ( isdigit( *ep ) )
-		++ep;
-	    if ( *ep == '.' )
-		{
-		++ep;
-		while ( isdigit( *ep ) )
-		    ++ep;
-		}
-	    if ( *ep == '#' )
-		++ep;
-	    if ( *ep == 'l' )
-		{
-		do_long = 1;
-		++ep;
-		}
-
-	    /* Here's the field type.  Extract it, and copy this format
-	    ** specifier to a temp string so we can add an end-of-string.
-	    */
-	    fchar = *ep;
-	    (void) strncpy( tformat, format, ep - format + 1 );
-	    tformat[ep - format + 1] = '\0';
-
-	    /* Now do a one-argument fprintf with the format string we have
-	    ** isolated.
-	    */
-	    switch ( fchar )
-		{
-		case 'd':
-		if ( do_long )
-		    {
-		    l = va_arg( args, long );
-		    n += fprintf( stream, tformat, l );
-		    }
-		else
-		    {
-		    i = va_arg( args, int );
-		    n += fprintf( stream, tformat, i );
-		    }
-		break;
-
-	        case 'o':
-	        case 'x':
-	        case 'X':
-	        case 'u':
-		if ( do_long )
-		    {
-		    ul = va_arg( args, unsigned long );
-		    n += fprintf( stream, tformat, ul );
-		    }
-		else
-		    {
-		    u = va_arg( args, unsigned );
-		    n += fprintf( stream, tformat, u );
-		    }
-		break;
-
-	        case 'c':
-		i = (char) va_arg( args, int );
-		n += fprintf( stream, tformat, i );
-		break;
-
-	        case 's':
-		s = va_arg( args, char* );
-		n += fprintf( stream, tformat, s );
-		break;
-
-	        case 'e':
-	        case 'E':
-	        case 'f':
-	        case 'g':
-	        case 'G':
-		d = va_arg( args, double );
-		n += fprintf( stream, tformat, d );
-		break;
-
-	        case '%':
-		(void) putc( '%', stream );
-		++n;
-		break;
-
-		default:
-		return -1;
-		}
-
-	    /* Resume formatting on the next character. */
-	    format = ep + 1;
-	    }
-	}
-    return nc;
-    }
-#endif /*NEED_VFPRINTF2*/
-
-
-/* File open/close that handles "-" as stdin and checks errors. */
-
-FILE*
-pm_openr( name )
-    char* name;
-    {
-    FILE* f;
-
-    if ( strcmp( name, "-" ) == 0 )
-	f = stdin;
-    else
-	{
-#ifdef MSDOS
-	f = fopen( name, "rb" );
-#else /*MSDOS*/
-	f = fopen( name, "r" );
-#endif /*MSDOS*/
-	if ( f == NULL )
-	    {
-	    pm_perror( name );
-	    exit( 1 );
-	    }
-	}
-    return f;
-    }
-
-FILE*
-pm_openw( name )
-    char* name;
-    {
-    FILE* f;
-
-#ifdef MSDOS
-    f = fopen( name, "wb" );
-#else /*MSDOS*/
-    f = fopen( name, "w" );
-#endif /*MSDOS*/
-    if ( f == NULL )
-	{
-	pm_perror( name );
-	exit( 1 );
-	}
-    return f;
-    }
-
-void
-pm_close( f )
-    FILE* f;
-    {
-    fflush( f );
-    if ( ferror( f ) )
-	pm_message( "a file read or write error occurred at some point" );
-    if ( f != stdin )
-	if ( fclose( f ) != 0 )
-	    pm_perror( "fclose" );
-    }
-
-/* Endian I/O.
-*/
-
-int
-pm_readbigshort( in, sP )
-    FILE* in;
-    short* sP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP = ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP |= c & 0xff;
-    return 0;
-    }
-
-#if __STDC__
-int
-pm_writebigshort( FILE* out, short s )
-#else /*__STDC__*/
-int
-pm_writebigshort( out, s )
-    FILE* out;
-    short s;
-#endif /*__STDC__*/
-    {
-    (void) putc( ( s >> 8 ) & 0xff, out );
-    (void) putc( s & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readbiglong( in, lP )
-    FILE* in;
-    long* lP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP = ( c & 0xff ) << 24;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 16;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= c & 0xff;
-    return 0;
-    }
-
-int
-pm_writebiglong( out, l )
-    FILE* out;
-    long l;
-    {
-    (void) putc( ( l >> 24 ) & 0xff, out );
-    (void) putc( ( l >> 16 ) & 0xff, out );
-    (void) putc( ( l >> 8 ) & 0xff, out );
-    (void) putc( l & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readlittleshort( in, sP )
-    FILE* in;
-    short* sP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP = c & 0xff;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *sP |= ( c & 0xff ) << 8;
-    return 0;
-    }
-
-#if __STDC__
-int
-pm_writelittleshort( FILE* out, short s )
-#else /*__STDC__*/
-int
-pm_writelittleshort( out, s )
-    FILE* out;
-    short s;
-#endif /*__STDC__*/
-    {
-    (void) putc( s & 0xff, out );
-    (void) putc( ( s >> 8 ) & 0xff, out );
-    return 0;
-    }
-
-int
-pm_readlittlelong( in, lP )
-    FILE* in;
-    long* lP;
-    {
-    int c;
-
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP = c & 0xff;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 8;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 16;
-    if ( (c = getc( in )) == EOF )
-	return -1;
-    *lP |= ( c & 0xff ) << 24;
-    return 0;
-    }
-
-int
-pm_writelittlelong( out, l )
-    FILE* out;
-    long l;
-    {
-    (void) putc( l & 0xff, out );
-    (void) putc( ( l >> 8 ) & 0xff, out );
-    (void) putc( ( l >> 16 ) & 0xff, out );
-    (void) putc( ( l >> 24 ) & 0xff, out );
-    return 0;
-    }
diff --git a/exsrc/src/pbmplus/pbmplus.h b/exsrc/src/pbmplus/pbmplus.h
deleted file mode 100644
index 7f868c83c..000000000
--- a/exsrc/src/pbmplus/pbmplus.h
+++ /dev/null
@@ -1,192 +0,0 @@
-/* pbmplus.h - header file for PBM, PGM, PPM, and PNM
-**
-** Copyright (C) 1988, 1989, 1991 by Jef Poskanzer.
-**
-** Permission to use, copy, modify, and distribute this software and its
-** documentation for any purpose and without fee is hereby granted, provided
-** that the above copyright notice appear in all copies and that both that
-** copyright notice and this permission notice appear in supporting
-** documentation.  This software is provided "as is" without express or
-** implied warranty.
-*/
-
-#ifndef _PBMPLUS_H_
-#define _PBMPLUS_H_
-
-#include <sys/types.h>
-#include <ctype.h>
-#include <stdio.h>
-
-#if defined(USG) || defined(SVR4)
-#define SYSV
-#endif
-#if ! ( defined(BSD) || defined(SYSV) || defined(MSDOS) )
-/* CONFIGURE: If your system is >= 4.2BSD, set the BSD option; if you're a
-** System V site, set the SYSV option; and if you're IBM-compatible, set
-** MSDOS.  If your compiler is ANSI C, you're probably better off setting
-** SYSV - all it affects is string handling.
-*/
-#define BSD
-/* #define SYSV */
-/* #define MSDOS */
-#endif
-
-/* CONFIGURE: If you want to enable writing "raw" files, set this option.
-** "Raw" files are smaller, and much faster to read and write, but you
-** must have a filesystem that allows all 256 ASCII characters to be read
-** and written.  You will no longer be able to mail P?M files without 
-** using uuencode or the equivalent, or running the files through pnmnoraw.
-** Note that reading "raw" files works whether writing is enabled or not.
-*/
-#define PBMPLUS_RAWBITS
-
-/* CONFIGURE: PGM can store gray values as either bytes or shorts.  For most
-** applications, bytes will be big enough, and the memory savings can be
-** substantial.  However, if you need more than 8 bits of grayscale resolution,
-** then define this symbol.
-*/
-/* #define PGM_BIGGRAYS */
-
-/* CONFIGURE: Normally, PPM handles a pixel as a struct of three grays.
-** If grays are stored in bytes, that's 24 bits per color pixel; if
-** grays are stored as shorts, that's 48 bits per color pixel.  PPM
-** can also be configured to pack the three grays into a single longword,
-** 10 bits each, 30 bits per pixel.
-**
-** If you have configured PGM with the PGM_BIGGRAYS option, AND you don't
-** need more than 10 bits for each color component, AND you care more about
-** memory use than speed, then this option might be a win.  Under these
-** circumstances it will make some of the programs use 1.5 times less space,
-** but all of the programs will run about 1.4 times slower.
-**
-** If you are not using PGM_BIGGRAYS, then this option is useless -- it
-** doesn't save any space, but it still slows things down.
-*/
-/* #define PPM_PACKCOLORS */
-
-/* CONFIGURE: uncomment this to enable debugging checks. */
-/* #define DEBUG */
-
-#ifdef SYSV
-
-#include <string.h>
-#define index(s,c) strchr(s,c)
-#define rindex(s,c) strrchr(s,c)
-#define srandom(s) srand(s)
-#define random rand
-#define bzero(dst,len) memset(dst,0,len)
-#define bcopy(src,dst,len) memcpy(dst,src,len)
-#define bcmp memcmp
-extern void srand();
-extern int rand();
-
-#else /*SYSV*/
-
-#include <strings.h>
-extern void srandom();
-extern long random();
-
-#endif /*SYSV*/
-
-extern int atoi();
-extern void exit();
-extern long time();
-extern int write();
-
-/* CONFIGURE: On some systems, malloc.h doesn't declare these, so we have
-** to do it.  On other systems, for example HP/UX, it declares them
-** incompatibly.  And some systems, for example Dynix, don't have a
-** malloc.h at all.  A sad situation.  If you have compilation problems
-** that point here, feel free to tweak or remove these declarations.
-*/
-#include <sys/malloc.h>
-//extern char* malloc();
-//extern char* realloc();
-//extern char* calloc();
-
-/* CONFIGURE: Some systems don't have vfprintf(), which we need for the
-** error-reporting routines.  If you compile and get a link error about
-** this routine, uncomment the first define, which gives you a vfprintf
-** that uses the theoretically non-portable but fairly common routine
-** _doprnt().  If you then get a link error about _doprnt, or
-** message-printing doesn't look like it's working, try the second
-** define instead.
-*/
-/* #define NEED_VFPRINTF1 */
-/* #define NEED_VFPRINTF2 */
-
-/* End of configurable definitions. */
-
-
-#undef max
-#define max(a,b) ((a) > (b) ? (a) : (b))
-#undef min
-#define min(a,b) ((a) < (b) ? (a) : (b))
-#undef abs
-#define abs(a) ((a) >= 0 ? (a) : -(a))
-#undef odd
-#define odd(n) ((n) & 1)
-
-
-/* Definitions to make PBMPLUS work with either ANSI C or C Classic. */
-
-#if __STDC__
-#define ARGS(alist) alist
-#else /*__STDC__*/
-#define ARGS(alist) ()
-#define const
-#endif /*__STDC__*/
-
-
-/* Initialization. */
-
-void pm_init ARGS(( int* argcP, char* argv[] ));
-
-
-/* Variable-sized arrays definitions. */
-
-char** pm_allocarray ARGS(( int cols, int rows, int size ));
-char* pm_allocrow ARGS(( int cols, int size ));
-void pm_freearray ARGS(( char** its, int rows ));
-void pm_freerow ARGS(( char* itrow ));
-
-
-/* Case-insensitive keyword matcher. */
-
-int pm_keymatch ARGS(( char* str, char* keyword, int minchars ));
-
-
-/* Log base two hacks. */
-
-int pm_maxvaltobits ARGS(( int maxval ));
-int pm_bitstomaxval ARGS(( int bits ));
-
-
-/* Error handling definitions. */
-
-void pm_message ARGS(( char*, ... ));
-void pm_error ARGS(( char*, ... ));			/* doesn't return */
-void pm_perror ARGS(( char* reason ));			/* doesn't return */
-void pm_usage ARGS(( char* usage ));			/* doesn't return */
-
-
-/* File open/close that handles "-" as stdin and checks errors. */
-
-FILE* pm_openr ARGS(( char* name ));
-FILE* pm_openw ARGS(( char* name ));
-void pm_close ARGS(( FILE* f ));
-
-
-/* Endian I/O. */
-
-int pm_readbigshort ARGS(( FILE* in, short* sP ));
-int pm_writebigshort ARGS(( FILE* out, short s ));
-int pm_readbiglong ARGS(( FILE* in, long* lP ));
-int pm_writebiglong ARGS(( FILE* out, long l ));
-int pm_readlittleshort ARGS(( FILE* in, short* sP ));
-int pm_writelittleshort ARGS(( FILE* out, short s ));
-int pm_readlittlelong ARGS(( FILE* in, long* lP ));
-int pm_writelittlelong ARGS(( FILE* out, long l ));
-
-
-#endif /*_PBMPLUS_H_*/
diff --git a/exsrc/src/pbmplus/pnm/Makefile.in b/exsrc/src/pbmplus/pnm/Makefile.in
deleted file mode 100644
index e14ff6d7b..000000000
--- a/exsrc/src/pbmplus/pnm/Makefile.in
+++ /dev/null
@@ -1,188 +0,0 @@
-# Makefile for pnm tools.
-#
-# Copyright (C) 1989, 1991 by Jef Poskanzer.
-#
-# Permission to use, copy, modify, and distribute this software and its
-# documentation for any purpose and without fee is hereby granted, provided
-# that the above copyright notice appear in all copies and that both that
-# copyright notice and this permission notice appear in supporting
-# documentation.  This software is provided "as is" without express or
-# implied warranty.
-
-# Default values, usually overridden by top-level Makefile.
-#CC =		cc
-CC =		gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return
-#CFLAGS =	-O
-CFLAGS =	-g -w
-#CFLAGS =	-g -O
-TIFFDEF =	-DLIBTIFF
-TIFFINC =	-I@EXTERNALS@/include
-TIFFLIB =	@EXTERNALS@/lib/libtiff.a
-TIFFBINARIES =  tifftopnm pnmtotiff
-TIFFOBJECTS =   tifftopnm.o pnmtotiff.o
-#LDFLAGS =	-s
-LDFLAGS =	
-INSTALLBINARIES =	@EXTERNALS@/bin
-INSTALLSCRIPTS =	$(INSTALLBINARIES)
-INSTALLMANUALS1 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS1 =	1
-INSTALLMANUALS3 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS3 =	3
-INSTALLMANUALS5 =	@EXTERNALS@/man/mann
-SUFFIXMANUALS5 =	5
-MANCP =			cp
-
-PPMDIR =	../ppm
-INCLUDEPPM =	-I$(PPMDIR)
-LIBPPM =	$(PPMDIR)/libppm.a
-DEFPPM =	$(PPMDIR)/ppm.h
-DEFLIBPPM =	$(PPMDIR)/libppm.h
-
-PGMDIR =	../pgm
-INCLUDEPGM =	-I$(PGMDIR)
-LIBPGM =	$(PGMDIR)/libpgm.a
-DEFPGM =	$(PGMDIR)/pgm.h
-DEFLIBPGM =	$(PGMDIR)/libpgm.h
-
-PBMDIR =	../pbm
-INCLUDEPBM =	-I$(PBMDIR)
-LIBPBM =	$(PBMDIR)/libpbm.a
-DEFPBM =	$(PBMDIR)/pbm.h ../pbmplus.h
-DEFLIBPBM =	$(PBMDIR)/libpbm.h
-
-SHELL =		/bin/sh
-INCLUDE =	-I.. $(INCLUDEPPM) $(INCLUDEPGM) $(INCLUDEPBM)
-ALLCFLAGS =	$(CFLAGS) $(INCLUDE) $(TIFFDEF) $(TIFFINC)
-LIBPNM =	libpnm.a
-
-PORTBINARIES =	pnmarith pnmcat pnmconvol pnmcrop pnmcut \
-		pnmdepth pnmenlarge pnmfile pnmflip pnminvert \
-		pnmnoraw pnmpaste pnmscale pnmtile pnmtops \
-		pnmtorast pnmtoxwd rasttopnm xwdtopnm
-MATHBINARIES =	pnmgamma pnmrotate pnmshear
-BINARIES =      $(PORTBINARIES) $(MATHBINARIES) $(TIFFBINARIES)
-SCRIPTS =	anytopnm pnmindex pnmmargin pnmsmooth
-
-PORTOBJECTS =	pnmarith.o pnmcat.o pnmconvol.o pnmcrop.o pnmcut.o \
-		pnmdepth.o pnmenlarge.o pnmfile.o pnmflip.o pnminvert.o \
-		pnmnoraw.o pnmpaste.o pnmscale.o pnmtile.o pnmtops.o \
-		pnmtorast.o pnmtoxwd.o rasttopnm.o xwdtopnm.o \
-		pnmgamma.o pnmrotate.o pnmshear.o
-OBJECTS =	$(PORTOBJECTS) $(TIFFOBJECTS)
-
-MANUALS1 =	$(BINARIES) $(SCRIPTS)
-MANUALS3 =	libpnm
-MANUALS5 =	pnm
-
-
-#all:		binaries
-all:		merge
-#install:	install.bin
-install:	install.merge
-
-
-binaries:	$(BINARIES)
-
-install.bin:	binaries $(SCRIPTS)
-	cd $(INSTALLBINARIES) ; rm -f $(BINARIES)
-	cp $(BINARIES) $(INSTALLBINARIES)
-	cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS)
-	cp $(SCRIPTS) $(INSTALLSCRIPTS)
-	cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS)
-
-
-merge:		pnmmerge
-pnmmerge:	pnmmerge.c $(OBJECTS) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(OBJECTS) -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-
-install.merge:	install.pnmmerge $(SCRIPTS)
-install.pnmmerge:	pnmmerge
-	cd $(INSTALLBINARIES) ; rm -f $(BINARIES)
-	cp pnmmerge $(INSTALLBINARIES)
-	cd $(INSTALLBINARIES) ; for i in $(BINARIES) ; do ln pnmmerge $$i ; done
-	rm $(INSTALLBINARIES)/pnmmerge
-	cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS)
-	cp $(SCRIPTS) $(INSTALLSCRIPTS)
-	cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS)
-
-
-install.man:
-	for i in $(MANUALS1) ; do \
-	    rm -f $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \
-	    $(MANCP) $$i.1 $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \
-	done
-	for i in $(MANUALS3) ; do \
-	    rm -f $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \
-	    $(MANCP) $$i.3 $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \
-	done
-	for i in $(MANUALS5) ; do \
-	    rm -f $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \
-	    $(MANCP) $$i.5 $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \
-	done
-
-
-# Rules for plain programs.
-$(PORTBINARIES) $(TIFFBINARIES):	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB)
-
-# Rule for math-dependent programs.
-$(MATHBINARIES):        pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) \
-			$(LIBPPM) $(LIBPGM) $(LIBPBM)
-	$(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM)
-
-# Rule for objects.
-$(OBJECTS):	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM)
-	$(CC) $(ALLCFLAGS) "-Dmain=$*_main" -c $*.c
-
-# And libraries.
-$(LIBPBM):
-	cd $(PBMDIR) ; make lib
-$(LIBPGM) FOO:
-	cd $(PGMDIR) ; make lib
-$(LIBPPM) BAR:
-	cd $(PPMDIR) ; make lib
-lib:		$(LIBPNM)
-$(LIBPNM):	libpnm1.o libpnm2.o libpnm3.o libpnm4.o
-	-rm -f $(LIBPNM)
-	ar rc $(LIBPNM) libpnm1.o libpnm2.o libpnm3.o libpnm4.o
-	-ranlib $(LIBPNM)
-
-libpnm1.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm1.c
-	$(CC) $(ALLCFLAGS) -c libpnm1.c
-libpnm2.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm2.c $(DEFLIBPPM) \
-		$(DEFLIBPGM) $(DEFLIBPBM)
-	$(CC) $(ALLCFLAGS) -c libpnm2.c
-libpnm3.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm3.c $(DEFLIBPPM) \
-		$(DEFLIBPGM) $(DEFLIBPBM)
-	$(CC) $(ALLCFLAGS) -c libpnm3.c
-libpnm4.o:	pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) rast.h libpnm4.c
-	$(CC) $(ALLCFLAGS) -c libpnm4.c
-
-# Other dependencies.
-pnmarith pnmarith.o:		pnmarith.c
-pnmcat pnmcat.o:		pnmcat.c
-pnmconvol pnmconvol.o:		pnmconvol.c
-pnmcrop pnmcrop.o:		pnmcrop.c
-pnmcut pnmcut.o:		pnmcut.c
-pnmdepth pnmdepth.o:		pnmdepth.c
-pnmenlarge pnmenlarge.o:	pnmenlarge.c
-pnmfile pnmfile.o:		pnmfile.c
-pnmflip pnmflip.o:		pnmflip.c
-pnmgamma pnmgamma.o:		pnmgamma.c
-pnminvert pnminvert.o:		pnminvert.c
-pnmnoraw pnmnoraw.o:		pnmnoraw.c
-pnmpaste pnmpaste.o:		pnmpaste.c
-pnmrotate pnmrotate.o:		pnmrotate.c
-pnmscale pnmscale.o:		pnmscale.c
-pnmshear pnmshear.o:		pnmshear.c
-pnmtile pnmtile.o:		pnmtile.c
-pnmtops pnmtops.o:		pnmtops.c
-pnmtorast pnmtorast.o:		pnmtorast.c rast.h
-pnmtotiff pnmtotiff.o:		pnmtotiff.c
-pnmtoxwd pnmtoxwd.o:		pnmtoxwd.c x11wd.h
-rasttopnm rasttopnm.o:		rasttopnm.c rast.h
-tifftopnm tifftopnm.o:		tifftopnm.c
-xwdtopnm xwdtopnm.o:		xwdtopnm.c x10wd.h x11wd.h
-
-clean:
-	-rm -f *.o *.a *.cat core $(BINARIES) pnmmerge
diff --git a/exsrc/src/png/pngconf.h b/exsrc/src/png/pngconf.h
deleted file mode 100644
index e185438ca..000000000
--- a/exsrc/src/png/pngconf.h
+++ /dev/null
@@ -1,632 +0,0 @@
-
-/* pngconf.h - machine configurable file for libpng
- *
- * libpng version 1.5.1 - February 3, 2011
- *
- * Copyright (c) 1998-2011 Glenn Randers-Pehrson
- * (Version 0.96 Copyright (c) 1996, 1997 Andreas Dilger)
- * (Version 0.88 Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.)
- *
- * This code is released under the libpng license.
- * For conditions of distribution and use, see the disclaimer
- * and license in png.h
- *
- */
-
-/* Any machine specific code is near the front of this file, so if you
- * are configuring libpng for a machine, you may want to read the section
- * starting here down to where it starts to typedef png_color, png_text,
- * and png_info.
- */
-
-#ifndef PNGCONF_H
-#define PNGCONF_H
-
-/* PNG_NO_LIMITS_H may be used to turn off the use of the standard C
- * definition file for  machine specific limits, this may impact the
- * correctness of the definitons below (see uses of INT_MAX).
- */
-#ifndef PNG_NO_LIMITS_H
-#  include <limits.h>
-#endif
-
-/* For the memory copy APIs (i.e. the standard definitions of these),
- * because this file defines png_memcpy and so on the base APIs must
- * be defined here.
- */
-#ifdef BSD
-#  include <strings.h>
-#else
-#  include <string.h>
-#endif
-
-/* For png_FILE_p - this provides the standard definition of a
- * FILE
- */
-#ifdef PNG_STDIO_SUPPORTED
-#  include <stdio.h>
-#endif
-
-/* This controls optimization of the reading of 16 and 32 bit values
- * from PNG files.  It can be set on a per-app-file basis - it
- * just changes whether a macro is used to the function is called.
- * The library builder sets the default, if read functions are not
- * built into the library the macro implementation is forced on.
- */
-#ifndef PNG_READ_INT_FUNCTIONS_SUPPORTED
-#  define PNG_USE_READ_MACROS
-#endif
-#if !defined(PNG_NO_USE_READ_MACROS) && !defined(PNG_USE_READ_MACROS)
-#  if PNG_DEFAULT_READ_MACROS
-#    define PNG_USE_READ_MACROS
-#  endif
-#endif
-
-/* COMPILER SPECIFIC OPTIONS.
- *
- * These options are provided so that a variety of difficult compilers
- * can be used.  Some are fixed at build time (e.g. PNG_API_RULE
- * below) but still have compiler specific implementations, others
- * may be changed on a per-file basis when compiling against libpng.
- */
-
-/* The PNGARG macro protects us against machines that don't have function
- * prototypes (ie K&R style headers).  If your compiler does not handle
- * function prototypes, define this macro and use the included ansi2knr.
- * I've always been able to use _NO_PROTO as the indicator, but you may
- * need to drag the empty declaration out in front of here, or change the
- * ifdef to suit your own needs.
- */
-#ifndef PNGARG
-
-#  ifdef OF /* zlib prototype munger */
-#    define PNGARG(arglist) OF(arglist)
-#  else
-
-#    ifdef _NO_PROTO
-#      define PNGARG(arglist) ()
-#    else
-#      define PNGARG(arglist) arglist
-#    endif /* _NO_PROTO */
-
-#  endif /* OF */
-
-#endif /* PNGARG */
-
-/* Function calling conventions.
- * =============================
- * Normally it is not necessary to specify to the compiler how to call
- * a function - it just does it - however on x86 systems derived from
- * Microsoft and Borland C compilers ('IBM PC', 'DOS', 'Windows' systems
- * and some others) there are multiple ways to call a function and the
- * default can be changed on the compiler command line.  For this reason
- * libpng specifies the calling convention of every exported function and
- * every function called via a user supplied function pointer.  This is
- * done in this file by defining the following macros:
- *
- * PNGAPI    Calling convention for exported functions.
- * PNGCBAPI  Calling convention for user provided (callback) functions.
- * PNGCAPI   Calling convention used by the ANSI-C library (required
- *           for longjmp callbacks and sometimes used internally to
- *           specify the calling convention for zlib).
- *
- * These macros should never be overridden.  If it is necessary to
- * change calling convention in a private build this can be done
- * by setting PNG_API_RULE (which defaults to 0) to one of the values
- * below to select the correct 'API' variants.
- *
- * PNG_API_RULE=0 Use PNGCAPI - the 'C' calling convention - throughout.
- *                This is correct in every known environment.
- * PNG_API_RULE=1 Use the operating system convention for PNGAPI and
- *                the 'C' calling convention (from PNGCAPI) for
- *                callbacks (PNGCBAPI).  This is no longer required
- *                in any known environment - if it has to be used
- *                please post an explanation of the problem to the
- *                libpng mailing list.
- *
- * These cases only differ if the operating system does not use the C
- * calling convention, at present this just means the above cases
- * (x86 DOS/Windows sytems) and, even then, this does not apply to
- * Cygwin running on those systems.
- *
- * Note that the value must be defined in pnglibconf.h so that what
- * the application uses to call the library matches the conventions
- * set when building the library.
- */
-
-/* Symbol export
- * =============
- * When building a shared library it is almost always necessary to tell
- * the compiler which symbols to export.  The png.h macro 'PNG_EXPORT'
- * is used to mark the symbols.  On some systems these symbols can be
- * extracted at link time and need no special processing by the compiler,
- * on other systems the symbols are flagged by the compiler and just
- * the declaration requires a special tag applied (unfortunately) in a
- * compiler dependent way.  Some systems can do either.
- *
- * A small number of older systems also require a symbol from a DLL to
- * be flagged to the program that calls it.  This is a problem because
- * we do not know in the header file included by application code that
- * the symbol will come from a shared library, as opposed to a statically
- * linked one.  For this reason the application must tell us by setting
- * the magic flag PNG_USE_DLL to turn on the special processing before
- * it includes png.h.
- *
- * Four additional macros are used to make this happen:
- *
- * PNG_IMPEXP The magic (if any) to cause a symbol to be exported from
- *            the build or imported if PNG_USE_DLL is set - compiler
- *            and system specific.
- *
- * PNG_EXPORT_TYPE(type) A macro that pre or appends PNG_IMPEXP to
- *                       'type', compiler specific.
- *
- * PNG_DLL_EXPORT Set to the magic to use during a libpng build to
- *                make a symbol exported from the DLL.
- *
- * PNG_DLL_IMPORT Set to the magic to force the libpng symbols to come
- *                from a DLL - used to define PNG_IMPEXP when
- *                PNG_USE_DLL is set.
- */
-
-/* System specific discovery.
- * ==========================
- * This code is used at build time to find PNG_IMPEXP, the API settings
- * and PNG_EXPORT_TYPE(), it may also set a macro to indicate the DLL
- * import processing is possible.  On Windows/x86 systems it also sets
- * compiler-specific macros to the values required to change the calling
- * conventions of the various functions.
- */
-#if ( defined(_Windows) || defined(_WINDOWS) || defined(WIN32) ||\
-      defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) ) &&\
-    ( defined(_X86_) || defined(_X64_) || defined(_M_IX86) ||\
-      defined(_M_X64) || defined(_M_IA64) )
-  /* Windows system (DOS doesn't support DLLs) running on x86/x64.  Includes
-   * builds under Cygwin or MinGW.  Also includes Watcom builds but these need
-   * special treatment because they are not compatible with GCC or Visual C
-   * because of different calling conventions.
-   */
-#  if PNG_API_RULE == 2
-    /* If this line results in an error, either because __watcall is not
-     * understood or because of a redefine just below you cannot use *this*
-     * build of the library with the compiler you are using.  *This* build was
-     * build using Watcom and applications must also be built using Watcom!
-     */
-#    define PNGCAPI __watcall
-#  endif
-
-#  if defined(__GNUC__) || (defined (_MSC_VER) && (_MSC_VER >= 800))
-#    define PNGCAPI __cdecl
-#    if PNG_API_RULE == 1
-#      define PNGAPI __stdcall
-#    endif
-#  else
-    /* An older compiler, or one not detected (erroneously) above,
-     * if necessary override on the command line to get the correct
-     * variants for the compiler.
-     */
-#    ifndef PNGCAPI
-#      define PNGCAPI _cdecl
-#    endif
-#    if PNG_API_RULE == 1 && !defined(PNGAPI)
-#      define PNGAPI _stdcall
-#    endif
-#  endif /* compiler/api */
-  /* NOTE: PNGCBAPI always defaults to PNGCAPI. */
-
-#  if defined(PNGAPI) && !defined(PNG_USER_PRIVATEBUILD)
-   ERROR: PNG_USER_PRIVATEBUILD must be defined if PNGAPI is changed
-#  endif
-
-#  if (defined(_MSC_VER) && _MSC_VER < 800) ||\
-      (defined(__BORLANDC__) && __BORLANDC__ < 0x500)
-    /* older Borland and MSC
-     * compilers used '__export' and required this to be after
-     * the type.
-     */
-#    ifndef PNG_EXPORT_TYPE
-#      define PNG_EXPORT_TYPE(type) type PNG_IMPEXP
-#    endif
-#    define PNG_DLL_EXPORT __export
-#  else /* newer compiler */
-#    define PNG_DLL_EXPORT __declspec(dllexport)
-#    ifndef PNG_DLL_IMPORT
-#      define PNG_DLL_IMPORT __declspec(dllimport)
-#    endif
-#  endif /* compiler */
-
-#else /* !Windows/x86 */
-#  if (defined(__IBMC__) || defined(__IBMCPP__)) && defined(__OS2__)
-#    define PNGAPI _System
-#  else /* !Windows/x86 && !OS/2 */
-    /* Use the defaults, or define PNG*API on the command line (but
-     * this will have to be done for every compile!)
-     */
-#  endif /* other system, !OS/2 */
-#endif /* !Windows/x86 */
-
-/* Now do all the defaulting . */
-#ifndef PNGCAPI
-#  define PNGCAPI
-#endif
-#ifndef PNGCBAPI
-#  define PNGCBAPI PNGCAPI
-#endif
-#ifndef PNGAPI
-#  define PNGAPI PNGCAPI
-#endif
-
-/* The default for PNG_IMPEXP depends on whether the library is
- * being built or used.
- */
-#ifndef PNG_IMPEXP
-#  ifdef PNGLIB_BUILD
-    /* Building the library */
-#    if (defined(DLL_EXPORT)/*from libtool*/ ||\
-        defined(_WINDLL) || defined(_DLL) || defined(__DLL__) ||\
-        defined(_USRDLL) ||\
-        defined(PNG_BUILD_DLL)) && defined(PNG_DLL_EXPORT)
-      /* Building a DLL. */
-#      define PNG_IMPEXP PNG_DLL_EXPORT
-#    endif /* DLL */
-#  else
-    /* Using the library */
-#    if defined(PNG_USE_DLL) && defined(PNG_DLL_IMPORT)
-      /* This forces use of a DLL, disallowing static linking */
-#      define PNG_IMPEXP PNG_DLL_IMPORT
-#    endif
-#  endif
-
-#  ifndef PNG_IMPEXP
-#    define PNG_IMPEXP
-#  endif
-#endif
-
-/* THe following complexity is concerned with getting the 'attributes' of the
- * declared function in the correct place.  This potentially requires a separate
- * PNG_EXPORT function for every compiler.
- */
-#ifndef PNG_FUNCTION
-#  if defined (__GNUC__) && !defined(__clang__)
-#     define PNG_FUNCTION(type, name, args, attributes)\
-         attributes type name args
-#  else /* !GNUC */
-#     ifdef _MSC_VER
-#        define PNG_FUNCTION(type, name, args, attributes)\
-         attributes type name args
-#     else /* !MSC */
-#        define PNG_FUNCTION(type, name, args, attributes)\
-            type name args
-#     endif
-#  endif
-#endif
-
-#ifndef PNG_EXPORT_TYPE
-#  define PNG_EXPORT_TYPE(type) PNG_IMPEXP type
-#endif
-
-   /* The ordinal value is only relevant when preprocessing png.h for symbol
-    * table entries, so we discard it here.  See the .dfn files in the
-    * scripts directory.
-    */
-#ifndef PNG_EXPORTA
-#  define PNG_EXPORTA(ordinal, type, name, args, attributes)\
-      extern PNG_FUNCTION(PNG_EXPORT_TYPE(type),(PNGAPI name),PNGARG(args),\
-         attributes)
-#endif
-
-#define PNG_EXPORT(ordinal, type, name, args)\
-   PNG_EXPORTA(ordinal, type, name, args, )
-
-/* Use PNG_REMOVED to comment out a removed interface. */
-#ifndef PNG_REMOVED
-#  define PNG_REMOVED(ordinal, type, name, args, attributes)
-#endif
-
-#ifndef PNG_CALLBACK
-#  define PNG_CALLBACK(type, name, args, attributes)\
-   type (PNGCBAPI name) PNGARG(args) attributes
-#endif
-
-/* Support for compiler specific function attributes.  These are used
- * so that where compiler support is available incorrect use of API
- * functions in png.h will generate compiler warnings.
- *
- * Added at libpng-1.2.41.
- */
-
-#ifndef PNG_NO_PEDANTIC_WARNINGS
-#  ifndef PNG_PEDANTIC_WARNINGS_SUPPORTED
-#    define PNG_PEDANTIC_WARNINGS_SUPPORTED
-#  endif
-#endif
-
-#ifdef PNG_PEDANTIC_WARNINGS_SUPPORTED
-  /* Support for compiler specific function attributes.  These are used
-   * so that where compiler support is available incorrect use of API
-   * functions in png.h will generate compiler warnings.  Added at libpng
-   * version 1.2.41.
-   */
-#  if defined (__GNUC__) && !defined(__clang__)
-#    ifndef PNG_USE_RESULT
-#      define PNG_USE_RESULT __attribute__((__warn_unused_result__))
-#    endif
-#    ifndef PNG_NORETURN
-#      define PNG_NORETURN   __attribute__((__noreturn__))
-#    endif
-#    ifndef PNG_PTR_NORETURN
-#      define PNG_PTR_NORETURN   __attribute__((__noreturn__))
-#    endif
-#    ifndef PNG_ALLOCATED
-#      define PNG_ALLOCATED  __attribute__((__malloc__))
-#    endif
-
-    /* This specifically protects structure members that should only be
-     * accessed from within the library, therefore should be empty during
-     * a library build.
-     */
-#    ifndef PNGLIB_BUILD
-#      ifndef PNG_DEPRECATED
-#        define PNG_DEPRECATED __attribute__((__deprecated__))
-#      endif
-#      ifndef PNG_DEPSTRUCT
-#        define PNG_DEPSTRUCT  __attribute__((__deprecated__))
-#      endif
-#      ifndef PNG_PRIVATE
-#        if 0 /* Doesn't work so we use deprecated instead*/
-#          define PNG_PRIVATE \
-            __attribute__((warning("This function is not exported by libpng.")))
-#        else
-#          define PNG_PRIVATE \
-            __attribute__((__deprecated__))
-#        endif
-#      endif /* PNG_PRIVATE */
-#    endif /* PNGLIB_BUILD */
-#  endif /* __GNUC__ */
-#  ifdef _MSC_VER /* may need to check value */
-#    ifndef PNG_USE_RESULT
-#      define PNG_USE_RESULT /*not supported*/
-#    endif
-#    ifndef PNG_NORETURN
-#      define PNG_NORETURN   __declspec(noreturn)
-#    endif
-#    ifndef PNG_PTR_NORETURN
-#      define PNG_PTR_NORETURN /*not supported*/
-#    endif
-#    ifndef PNG_ALLOCATED
-#      define PNG_ALLOCATED __declspec(restrict)
-#    endif
-
-    /* This specifically protects structure members that should only be
-     * accessed from within the library, therefore should be empty during
-     * a library build.
-     */
-#    ifndef PNGLIB_BUILD
-#      ifndef PNG_DEPRECATED
-#        define PNG_DEPRECATED __declspec(deprecated)
-#      endif
-#      ifndef PNG_DEPSTRUCT
-#        define PNG_DEPSTRUCT  __declspec(deprecated)
-#      endif
-#      ifndef PNG_PRIVATE
-#        define PNG_PRIVATE __declspec(deprecated)
-#      endif /* PNG_PRIVATE */
-#    endif /* PNGLIB_BUILD */
-#  endif /* __GNUC__ */
-#endif /* PNG_PEDANTIC_WARNINGS */
-
-#ifndef PNG_DEPRECATED
-#  define PNG_DEPRECATED  /* Use of this function is deprecated */
-#endif
-#ifndef PNG_USE_RESULT
-#  define PNG_USE_RESULT  /* The result of this function must be checked */
-#endif
-#ifndef PNG_NORETURN
-#  define PNG_NORETURN    /* This function does not return */
-#endif
-#ifndef PNG_ALLOCATED
-#  define PNG_ALLOCATED   /* The result of the function is new memory */
-#endif
-#ifndef PNG_DEPSTRUCT
-#  define PNG_DEPSTRUCT   /* Access to this struct member is deprecated */
-#endif
-#ifndef PNG_PRIVATE
-#  define PNG_PRIVATE     /* This is a private libpng function */
-#endif
-#ifndef PNG_FP_EXPORT     /* A floating point API. */
-#  ifdef PNG_FLOATING_POINT_SUPPORTED
-#     define PNG_FP_EXPORT(ordinal, type, name, args)\
-         PNG_EXPORT(ordinal, type, name, args)
-#  else                   /* No floating point APIs */
-#     define PNG_FP_EXPORT(ordinal, type, name, args)
-#  endif
-#endif
-#ifndef PNG_FIXED_EXPORT  /* A fixed point API. */
-#  ifdef PNG_FIXED_POINT_SUPPORTED
-#     define PNG_FIXED_EXPORT(ordinal, type, name, args)\
-         PNG_EXPORT(ordinal, type, name, args)
-#  else                   /* No fixed point APIs */
-#     define PNG_FIXED_EXPORT(ordinal, type, name, args)
-#  endif
-#endif
-
-/* The following uses const char * instead of char * for error
- * and warning message functions, so some compilers won't complain.
- * If you do not want to use const, define PNG_NO_CONST here.
- *
- * This should not change how the APIs are called, so it can be done
- * on a per-file basis in the application.
- */
-#ifndef PNG_CONST
-#  ifndef PNG_NO_CONST
-#    define PNG_CONST const
-#  else
-#    define PNG_CONST
-#  endif
-#endif
-
-/* Some typedefs to get us started.  These should be safe on most of the
- * common platforms.  The typedefs should be at least as large as the
- * numbers suggest (a png_uint_32 must be at least 32 bits long), but they
- * don't have to be exactly that size.  Some compilers dislike passing
- * unsigned shorts as function parameters, so you may be better off using
- * unsigned int for png_uint_16.
- */
-
-#if defined(INT_MAX) && (INT_MAX > 0x7ffffffeL)
-typedef unsigned int png_uint_32;
-typedef int png_int_32;
-#else
-typedef unsigned long png_uint_32;
-typedef long png_int_32;
-#endif
-typedef unsigned short png_uint_16;
-typedef short png_int_16;
-typedef unsigned char png_byte;
-
-#ifdef PNG_NO_SIZE_T
-typedef unsigned int png_size_t;
-#else
-typedef size_t png_size_t;
-#endif
-#define png_sizeof(x) (sizeof (x))
-
-/* The following is needed for medium model support.  It cannot be in the
- * pngpriv.h header.  Needs modification for other compilers besides
- * MSC.  Model independent support declares all arrays and pointers to be
- * large using the far keyword.  The zlib version used must also support
- * model independent data.  As of version zlib 1.0.4, the necessary changes
- * have been made in zlib.  The USE_FAR_KEYWORD define triggers other
- * changes that are needed. (Tim Wegner)
- */
-
-/* Separate compiler dependencies (problem here is that zlib.h always
- * defines FAR. (SJT)
- */
-#ifdef __BORLANDC__
-#  if defined(__LARGE__) || defined(__HUGE__) || defined(__COMPACT__)
-#    define LDATA 1
-#  else
-#    define LDATA 0
-#  endif
-  /* GRR:  why is Cygwin in here?  Cygwin is not Borland C... */
-#  if !defined(__WIN32__) && !defined(__FLAT__) && !defined(__CYGWIN__)
-#    define PNG_MAX_MALLOC_64K /* only used in build */
-#    if (LDATA != 1)
-#      ifndef FAR
-#        define FAR __far
-#      endif
-#      define USE_FAR_KEYWORD
-#    endif   /* LDATA != 1 */
-         /* Possibly useful for moving data out of default segment.
-          * Uncomment it if you want. Could also define FARDATA as
-          * const if your compiler supports it. (SJT)
-#        define FARDATA FAR
-          */
-#  endif  /* __WIN32__, __FLAT__, __CYGWIN__ */
-#endif   /* __BORLANDC__ */
-
-
-/* Suggest testing for specific compiler first before testing for
- * FAR.  The Watcom compiler defines both __MEDIUM__ and M_I86MM,
- * making reliance oncertain keywords suspect. (SJT)
- */
-
-/* MSC Medium model */
-#ifdef FAR
-#  ifdef M_I86MM
-#    define USE_FAR_KEYWORD
-#    define FARDATA FAR
-#    include <dos.h>
-#  endif
-#endif
-
-/* SJT: default case */
-#ifndef FAR
-#  define FAR
-#endif
-
-/* At this point FAR is always defined */
-#ifndef FARDATA
-#  define FARDATA
-#endif
-
-/* Typedef for floating-point numbers that are converted
- * to fixed-point with a multiple of 100,000, e.g., gamma
- */
-typedef png_int_32 png_fixed_point;
-
-/* Add typedefs for pointers */
-typedef void                      FAR * png_voidp;
-typedef PNG_CONST void            FAR * png_const_voidp;
-typedef png_byte                  FAR * png_bytep;
-typedef PNG_CONST png_byte        FAR * png_const_bytep;
-typedef png_uint_32               FAR * png_uint_32p;
-typedef PNG_CONST png_uint_32     FAR * png_const_uint_32p;
-typedef png_int_32                FAR * png_int_32p;
-typedef PNG_CONST png_int_32      FAR * png_const_int_32p;
-typedef png_uint_16               FAR * png_uint_16p;
-typedef PNG_CONST png_uint_16     FAR * png_const_uint_16p;
-typedef png_int_16                FAR * png_int_16p;
-typedef PNG_CONST png_int_16      FAR * png_const_int_16p;
-typedef char                      FAR * png_charp;
-typedef PNG_CONST char            FAR * png_const_charp;
-typedef png_fixed_point           FAR * png_fixed_point_p;
-typedef PNG_CONST png_fixed_point FAR * png_const_fixed_point_p;
-typedef png_size_t                FAR * png_size_tp;
-typedef PNG_CONST png_size_t      FAR * png_const_size_tp;
-
-#ifdef PNG_STDIO_SUPPORTED
-typedef FILE            * png_FILE_p;
-#endif
-
-#ifdef PNG_FLOATING_POINT_SUPPORTED
-typedef double           FAR * png_doublep;
-typedef PNG_CONST double FAR * png_const_doublep;
-#endif
-
-/* Pointers to pointers; i.e. arrays */
-typedef png_byte        FAR * FAR * png_bytepp;
-typedef png_uint_32     FAR * FAR * png_uint_32pp;
-typedef png_int_32      FAR * FAR * png_int_32pp;
-typedef png_uint_16     FAR * FAR * png_uint_16pp;
-typedef png_int_16      FAR * FAR * png_int_16pp;
-typedef PNG_CONST char  FAR * FAR * png_const_charpp;
-typedef char            FAR * FAR * png_charpp;
-typedef png_fixed_point FAR * FAR * png_fixed_point_pp;
-#ifdef PNG_FLOATING_POINT_SUPPORTED
-typedef double          FAR * FAR * png_doublepp;
-#endif
-
-/* Pointers to pointers to pointers; i.e., pointer to array */
-typedef char            FAR * FAR * FAR * png_charppp;
-
-/* png_alloc_size_t is guaranteed to be no smaller than png_size_t,
- * and no smaller than png_uint_32.  Casts from png_size_t or png_uint_32
- * to png_alloc_size_t are not necessary; in fact, it is recommended
- * not to use them at all so that the compiler can complain when something
- * turns out to be problematic.
- * Casts in the other direction (from png_alloc_size_t to png_size_t or
- * png_uint_32) should be explicitly applied; however, we do not expect
- * to encounter practical situations that require such conversions.
- */
-#if defined(__TURBOC__) && !defined(__FLAT__)
-   typedef unsigned long png_alloc_size_t;
-#else
-#  if defined(_MSC_VER) && defined(MAXSEG_64K)
-     typedef unsigned long    png_alloc_size_t;
-#  else
-     /* This is an attempt to detect an old Windows system where (int) is
-      * actually 16 bits, in that case png_malloc must have an argument with a
-      * bigger size to accomodate the requirements of the library.
-      */
-#    if (defined(_Windows) || defined(_WINDOWS) || defined(_WINDOWS_)) && \
-        (!defined(INT_MAX) || INT_MAX <= 0x7ffffffeL)
-       typedef DWORD         png_alloc_size_t;
-#    else
-       typedef png_size_t    png_alloc_size_t;
-#    endif
-#  endif
-#endif
-
-#endif /* PNGCONF_H */
diff --git a/exsrc/src/readline/shobj-conf b/exsrc/src/readline/shobj-conf
deleted file mode 100644
index 663869a81..000000000
--- a/exsrc/src/readline/shobj-conf
+++ /dev/null
@@ -1,579 +0,0 @@
-#! /bin/sh
-#
-# shobj-conf -- output a series of variable assignments to be substituted
-#		into a Makefile by configure which specify system-dependent
-#		information for creating shared objects that may be loaded
-#		into bash with `enable -f'
-#
-# usage: shobj-conf [-C compiler] -c host_cpu -o host_os -v host_vendor
-#
-# Chet Ramey
-# chet@po.cwru.edu
-
-#   Copyright (C) 1996-2009 Free Software Foundation, Inc.
-#
-#   This file is part of GNU Bash, the Bourne Again SHell.
-#
-#   This program is free software: you can redistribute it and/or modify
-#   it under the terms of the GNU General Public License as published by
-#   the Free Software Foundation, either version 3 of the License, or
-#   (at your option) any later version.
-#
-#   This program is distributed in the hope that it will be useful,
-#   but WITHOUT ANY WARRANTY; without even the implied warranty of
-#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-#   GNU General Public License for more details.
-#
-#   You should have received a copy of the GNU General Public License
-#   along with this program.  If not, see <http://www.gnu.org/licenses/>.
-#
-
-#
-# defaults
-#
-SHOBJ_STATUS=supported
-SHLIB_STATUS=supported
-
-SHOBJ_CC=cc
-SHOBJ_CFLAGS=
-SHOBJ_LD=
-SHOBJ_LDFLAGS=
-SHOBJ_XLDFLAGS=
-SHOBJ_LIBS=
-
-SHLIB_XLDFLAGS=
-SHLIB_LIBS='-ltermcap'
-
-SHLIB_DOT='.'
-SHLIB_LIBPREF='lib'
-SHLIB_LIBSUFF='so'
-
-SHLIB_LIBVERSION='$(SHLIB_LIBSUFF)'
-SHLIB_DLLVERSION='$(SHLIB_MAJOR)'
-
-PROGNAME=`basename $0`
-USAGE="$PROGNAME [-C compiler] -c host_cpu -o host_os -v host_vendor"
-
-while [ $# -gt 0 ]; do
-	case "$1" in
-	-C)	shift; SHOBJ_CC="$1"; shift ;;
-	-c)	shift; host_cpu="$1"; shift ;;
-	-o)	shift; host_os="$1"; shift ;;
-	-v)	shift; host_vendor="$1"; shift ;;
-	*)	echo "$USAGE" >&2 ; exit 2;;
-	esac
-done
-
-case "${host_os}-${SHOBJ_CC}-${host_vendor}" in
-sunos4*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD=/usr/bin/ld
-	SHOBJ_LDFLAGS='-assert pure-text'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-sunos4*)
-	SHOBJ_CFLAGS=-pic
-	SHOBJ_LD=/usr/bin/ld
-	SHOBJ_LDFLAGS='-assert pure-text'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-sunos5*-*gcc*|solaris2*-*gcc*)
-	SHOBJ_LD='${CC}'
-	ld_used=`gcc -print-prog-name=ld`
-	if ${ld_used} -V 2>&1 | grep GNU >/dev/null 2>&1; then
-		# This line works for the GNU ld
-		SHOBJ_LDFLAGS='-shared -Wl,-h,$@'
-		# http://sourceware.org/ml/binutils/2001-08/msg00361.html
-		SHOBJ_CFLAGS=-fPIC
-	else
-		# This line works for the Solaris linker in /usr/ccs/bin/ld
-		SHOBJ_LDFLAGS='-shared -Wl,-i -Wl,-h,$@'
-		SHOBJ_CFLAGS=-fpic
-	fi
-
-#	SHLIB_XLDFLAGS='-R $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sunos5*|solaris2*)
-	SHOBJ_CFLAGS='-K pic'
-	SHOBJ_LD=/usr/ccs/bin/ld
-	SHOBJ_LDFLAGS='-G -dy -z text -i -h $@'
-
-#	SHLIB_XLDFLAGS='-R $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-# All versions of Linux (including Gentoo/FreeBSD) or the semi-mythical GNU Hurd.
-linux*-*|gnu*-*|k*bsd*-gnu-*|freebsd*-gentoo)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-freebsd2*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-x -Bshareable'
-
-	SHLIB_XLDFLAGS='-R$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-# FreeBSD-3.x ELF
-freebsd3*|freebsdaout*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-
-	if [ -x /usr/bin/objformat ] && [ "`/usr/bin/objformat`" = "elf" ]; then
-		SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-		SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-		SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	else
-		SHOBJ_LDFLAGS='-shared'
-
-		SHLIB_XLDFLAGS='-R$(libdir)'
-		SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	fi
-	;;
-
-# FreeBSD-4.x and later have only ELF
-freebsd[4-9]*|freebsdelf*|dragonfly*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-# Darwin/MacOS X
-darwin[89]*|darwin10*)
-	SHOBJ_STATUS=supported
-	SHLIB_STATUS=supported
-	
-	SHOBJ_CFLAGS='-fno-common'
-
-	SHOBJ_LD='MACOSX_DEPLOYMENT_TARGET=10.3 ${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBSUFF='dylib'
-
-	SHOBJ_LDFLAGS='-dynamiclib -dynamic -undefined dynamic_lookup -arch_only `/usr/bin/arch`'
-	SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-
-	SHLIB_LIBS='-lncurses'	# see if -lcurses works on MacOS X 10.1 
-	;;
-
-darwin*|macosx*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=supported
-
-	SHOBJ_CFLAGS='-fno-common'
-
-	SHOBJ_LD='${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBSUFF='dylib'
-
-	case "${host_os}" in
-	darwin[789]*|darwin10*)	SHOBJ_LDFLAGS=''
-			SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-			;;
-	*)		SHOBJ_LDFLAGS='-dynamic'
-			SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v'
-			;;
-	esac
-
-	SHLIB_LIBS='-lncurses'	# see if -lcurses works on MacOS X 10.1 
-	;;
-
-openbsd*|netbsd*)
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_XLDFLAGS='-R$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-bsdi2*)
-	SHOBJ_CC=shlicc2
-	SHOBJ_CFLAGS=
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS=-r
-	SHOBJ_LIBS=-lc_s.2.1.0
-
-	# BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in
-	# the ass -- they require changing {/usr/lib,etc}/shlib.map on
-	# each system, and the library creation process is byzantine
-	SHLIB_STATUS=unsupported
-	;;
-
-bsdi3*)
-	SHOBJ_CC=shlicc2
-	SHOBJ_CFLAGS=
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS=-r
-	SHOBJ_LIBS=-lc_s.3.0.0
-
-	# BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in
-	# the ass -- they require changing {/usr/lib,etc}/shlib.map on
-	# each system, and the library creation process is byzantine
-	SHLIB_STATUS=unsupported
-	;;
-
-bsdi4*)
-	# BSD/OS 4.x now supports ELF and SunOS-style dynamically-linked
-	# shared libraries.  gcc 2.x is the standard compiler, and the
-	# `normal' gcc options should work as they do in Linux.
-
-	SHOBJ_CFLAGS=-fPIC
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)'
-	;;
-
-osf*-*gcc*)
-	# Fix to use gcc linker driver from bfischer@TechFak.Uni-Bielefeld.DE
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-osf*)
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-shared -soname $@ -expect_unresolved "*"'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-aix4.[2-9]*-*gcc*|aix[5-9].*-*gcc*)		# lightly tested by jik@cisco.com
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='ld'
-	SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall'
-	SHOBJ_XLDFLAGS='-G'
-
-	SHLIB_XLDFLAGS='-bM:SRE'
-	SHLIB_LIBS='-lcurses -lc'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-aix4.[2-9]*|aix[5-9].*)
-	SHOBJ_CFLAGS=-K
-	SHOBJ_LD='ld'
-	SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall'
-	SHOBJ_XLDFLAGS='-G'
-
-	SHLIB_XLDFLAGS='-bM:SRE'
-	SHLIB_LIBS='-lcurses -lc'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-#
-# THE FOLLOWING ARE UNTESTED -- and some may not support the dlopen interface
-#
-irix[56]*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-soname,$@'
-
-	SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-irix[56]*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-#	SHOBJ_LDFLAGS='-call_shared -hidden_symbol -no_unresolved -soname $@'
-#	Change from David Kaelbling <drk@sgi.com>.  If you have problems,
-#	remove the `-no_unresolved'
-	SHOBJ_LDFLAGS='-shared -no_unresolved -soname $@'
-
-	SHLIB_XLDFLAGS='-rpath $(libdir)'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux9*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,+s'
-
-	SHLIB_XLDFLAGS='-Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux9*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-hpux10*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	# if you have problems linking here, moving the `-Wl,+h,$@' from
-	# SHLIB_XLDFLAGS to SHOBJ_LDFLAGS has been reported to work
-	SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s'
-
-	SHLIB_XLDFLAGS='-Wl,+h,$@ -Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux10*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s +h $@'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-hpux11*-*gcc*)
-	# must use gcc; the bundled cc cannot compile PIC code
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-#	SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,-B,symbolic -Wl,+s -Wl,+std -Wl,+h,$@'
-	SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s -Wl,+h,$@'
-
-	SHLIB_XLDFLAGS='-Wl,+b,$(libdir)'
-	SHLIB_LIBSUFF='sl'
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-hpux11*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-
-	# If you are using the HP ANSI C compiler, you can uncomment and use
-	# this code (I have not tested it)
-#	SHOBJ_STATUS=supported
-#	SHLIB_STATUS=supported
-#
-#	SHOBJ_CFLAGS='+z'
-#	SHOBJ_LD='ld'
-#	SHOBJ_LDFLAGS='-b +s +h $@'
-#
-#	SHLIB_XLDFLAGS='+b $(libdir)'
-#	SHLIB_LIBSUFF='sl'
-#	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'	
-
-	;;
-
-sysv4*-*gcc*)
-	SHOBJ_CFLAGS=-shared
-	SHOBJ_LDFLAGS='-shared -h $@'
-	SHOBJ_LD='${CC}'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv4*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-dy -z text -G -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sco3.2v5*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'		# DEFAULTS TO ELF
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sco3.2v5*)
-	SHOBJ_CFLAGS='-K pic -b elf'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -b elf -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5uw7*-*gcc*)
-	SHOBJ_CFLAGS='-fpic'
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5uw7*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5UnixWare*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5UnixWare*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5OpenUNIX*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-sysv5OpenUNIX*)
-	SHOBJ_CFLAGS='-K PIC'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -z text -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-dgux*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-dgux*)
-	SHOBJ_CFLAGS='-K pic'
-	SHOBJ_LD=ld
-	SHOBJ_LDFLAGS='-G -dy -h $@'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-msdos*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-	;;
-
-cygwin*)
-	SHOBJ_LD='$(CC)'
-	SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a'
-	SHLIB_LIBPREF='cyg'
-	SHLIB_LIBSUFF='dll'
-	SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBS='$(TERMCAP_LIB)'
-
-	SHLIB_DOT=
-	# For official cygwin releases, DLLVERSION will be defined in the
-	# environment of configure, and will be incremented any time the API
-	# changes in a non-backwards compatible manner.  Otherwise, it is just
-	# SHLIB_MAJOR.
-	if [ -n "$DLLVERSION" ] ; then
-		SHLIB_DLLVERSION="$DLLVERSION"
-	fi
-	;;
-
-mingw*)
-	SHOBJ_LD='$(CC)'
-	SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a'
-	SHLIB_LIBSUFF='dll'
-	SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)'
-	SHLIB_LIBS='$(TERMCAP_LIB)'
-
-	SHLIB_DOT=
-	# For official cygwin releases, DLLVERSION will be defined in the
-	# environment of configure, and will be incremented any time the API
-	# changes in a non-backwards compatible manner.  Otherwise, it is just
-	# SHLIB_MAJOR.
-	if [ -n "$DLLVERSION" ] ; then
-		SHLIB_DLLVERSION="$DLLVERSION"
-	fi
-	;;
-
-#
-# Rely on correct gcc configuration for everything else
-#
-*-*gcc*)
-	SHOBJ_CFLAGS=-fpic
-	SHOBJ_LD='${CC}'
-	SHOBJ_LDFLAGS='-shared'
-
-	SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)'
-	;;
-
-*)
-	SHOBJ_STATUS=unsupported
-	SHLIB_STATUS=unsupported
-	;;
-
-esac
-
-echo SHOBJ_CC=\'"$SHOBJ_CC"\'
-echo SHOBJ_CFLAGS=\'"$SHOBJ_CFLAGS"\'
-echo SHOBJ_LD=\'"$SHOBJ_LD"\'
-echo SHOBJ_LDFLAGS=\'"$SHOBJ_LDFLAGS"\'
-echo SHOBJ_XLDFLAGS=\'"$SHOBJ_XLDFLAGS"\'
-echo SHOBJ_LIBS=\'"$SHOBJ_LIBS"\'
-
-echo SHLIB_XLDFLAGS=\'"$SHLIB_XLDFLAGS"\'
-echo SHLIB_LIBS=\'"$SHLIB_LIBS"\'
-
-echo SHLIB_DOT=\'"$SHLIB_DOT"\'
-
-echo SHLIB_LIBPREF=\'"$SHLIB_LIBPREF"\'
-echo SHLIB_LIBSUFF=\'"$SHLIB_LIBSUFF"\'
-
-echo SHLIB_LIBVERSION=\'"$SHLIB_LIBVERSION"\'
-echo SHLIB_DLLVERSION=\'"$SHLIB_DLLVERSION"\'
-
-echo SHOBJ_STATUS=\'"$SHOBJ_STATUS"\'
-echo SHLIB_STATUS=\'"$SHLIB_STATUS"\'
-
-exit 0
diff --git a/exsrc/src/yes.txt b/exsrc/src/yes.txt
deleted file mode 100644
index c6991e8fe..000000000
--- a/exsrc/src/yes.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-yes
-
diff --git a/exsrc/twisted.sh b/exsrc/twisted.sh
deleted file mode 100755
index fafb9ea76..000000000
--- a/exsrc/twisted.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-PACKAGE="Twisted"
-. ./prolog.sh 
-# Twisted.
-(cd Twisted-*/zope.interface*; ${prefix}/${version}/bin/python setup.py build ${D} install; cd .. ; ${prefix}/${version}/bin/python setup.py build ${D} install)
-
diff --git a/exsrc/vtk.sh b/exsrc/vtk.sh
deleted file mode 100755
index 7f15b4f50..000000000
--- a/exsrc/vtk.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-PACKAGE="VTK"
-. ./prolog.sh
-(  BUILD_DIR=`pwd`;\
-   cd VTK*; \
-   sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \
-       -e 's/PY_VERSION/2.4/g' \
-       -e 's@CDAT_BUILD_DIR@'${BUILD_DIR}'@g' \
-       -e 's/TCLTK_VERSION/8.4/g' ../../VTK_BUILD_ANSWERS.core > VTK_BUILD_ANSWERS.feed ; \
-   mkdir -p ${prefix}/Externals/VTK;\
-   cp VTK_BUILD_ANSWERS.feed ${prefix}/Externals/VTK/CMakeCache.txt ;
-   cd ${prefix}/Externals/VTK ;\
-   ${prefix}/Externals/bin/cmake CMakeCache.txt ;\
-   make; make install ; \
-   cd Wrapping/Python ; \
-   ${prefix}/${version}/bin/python setup.py install; \
-)
diff --git a/exsrc/xgks.sh b/exsrc/xgks.sh
deleted file mode 100755
index 5061fb541..000000000
--- a/exsrc/xgks.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/sh
-PACKAGE="xgks"
-OS=`uname`
-. ./prolog.sh
-# xgks
-if ( test "${OS}" = 'Darwin' ) then
-    CPP_X11="-I/usr/X11R6/include"; export CPP_X11
-fi
-LD_X11=""; export LD_X11
-FC='';export FC
-# The configure step will make a header file udposix.h that vcs needs
-cd xgks 
-./configure --prefix=${prefix}/Externals || exit 1
-echo "Installing udposix.h"
-/bin/rm -fr ${prefix}/Externals/include/udposix.h || exit 1
-/bin/cp port/misc/udposix.h ${prefix}/Externals/include/udposix.h || exit 1
-make port/all || exit 1
-make port/install || exit 1
-# added the CXX define for MacOS
-make CXX=cc fontdb/all || exit 1
-make CXX=cc fontdb/install || exit 1
diff --git a/images/2leftarrow.gif b/images/2leftarrow.gif
deleted file mode 100644
index 0f42224dad68b601ac86e3ef71cf1bb89bc7f85d..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1180
zcmZ?wbhEHblwwd|_};|;0)EW=9>T)@(h|{p649bOfvOUo#!|7G(qTr7=`73@44j1$
z;%O36rILKf$|9L+vT24gm0A)d#+qT$8gUldDYEJn(n`6CN}0B5)#{4HX6ChanqH2E
zQC=FE?uIFW21Pzrg+Xz3q5KVevaL$W-IAsqGWOF1)jQSIJ57wHE7{FAF<vO8zd*}m
znY!g-L)(o;j<d~!=I92kl5k(9>aoP!Ww(a=4)etI=80Q1Gk2JUw0XI0atdAS7Pu}r
zXt!hH#Guky5t*}+3T7te?C`E!o>I9z(dW31?@@FA%jywl^@0yuMqSknzhRnwM>^rQ
zM(Q2Yq-T<8FSPO=+El#IEBT~X@!cZoluPU-ul&<-Y1dq;js}(9@T|HUQ*}DE{AP0A
zqsWS9?xhc-YhM@jtgY+ZliRwnV#3zCj$^4k_bM9Rd$&A~X#45W^*MUd&&c*C*_|)S
zdw$l;e4aD&b@9}%X>-4puX<j+_-DnelWjBaHP3w2y5RDJr7zl7|7zRxwtv%)j%~kZ
z?E1ds(EoWS|1UrHf7|u{yB`1FdF}s^=l_rW|NkExL!$((LO}5+3nK$V7lRJS0#Ke{
z;P}TN$|>WqVZp&>4q>gB6B`yDZc9HU(DNf8VS-&68{fm`WlH@{jh8r{G<pQgYj+X&
z`cnB)hgT0@tIDs->;ltGnwS`ODNj<I=v>~!q{ZYhLDr_UN=u~Z#d%qWhBN`LmiDu=
z&AkMrG+9dd=E*ewQHcG#>?~iaGt1YV&g|!T9O_<O(Y&43+t*^l^7bY(n;)-z*^UQ=
zm!F-Qs#VU<Ciq3Q@3CmngNB2bUC&I>44QYr!b0i%Jd5}PdlZz@d&Ko)S2*nCVP$pV
za(~qDaZ(E7G=szqIh{hTox*DBLQx+UvT{$>*|s^wbJLQ}Nh%h48cPzK*#u-P#Ew)<
ze8M(K+AyKYB*Md?nMX{gN<-n}qN8Qvn#c5Y8Xg|vXIJ&oFlbbA<>X{FQE}j4%~-(1
z%wzE&o>N%iK>!<v1H(ZU9u|g1Hff)LgU+Hg6ArTQYZy7ONTw7xxU#o=Xk=tk{lLhf
jzUW6I2eT|g6T2v92}7T0*@Vaa4s4oFCRnnuFjxZsj7Lm)

diff --git a/images/2rightarrow.gif b/images/2rightarrow.gif
deleted file mode 100644
index befbdfad31ef374dcea72afb17a1442ca726ad88..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1180
zcmZ?wbhEHblw?p~_&$jN2zWg>7y=pD!==T;1o>hV_@j&^Jawcz%tS&p#iGm@vRN65
z8HAH1gz5yjQ*^}AO+?By#mi09ob6O&6qTcllrv<M3Z<1&ZFDOY<szJPd>mCG98A1j
zHPYQxE8Of;f}AtsoQq?fauQgY7$m#c<oo3%+SFyHDq8o;TFsDCYgJThv(}idtTWTd
zxKZ6;rJUwcMf2rq7HiCHcdEP2*Y%oj;j&HDWvh9}dY$0amH~UT{hOUV=Qu>ShD1;D
z@>}cbv&}tbeQ@Ajw}jr1l<A=b^Abv@RVFNRNnaSAzQ-YdOLFDn?7C$|z6Uit57<SW
z)`&Q6o_s?i{hCGLE%V$bI>}E=%AcB-eX{g9<{o;&E$4ts()pl_8-ArnJ!*~z6kl~J
zyAhppCb8sJYWWMV>Tk})Pm^1g)b?zxZG3Ond_1cCP+HsVtkzql(@xe+zgN-m#;5yb
zRQq@D?(ZQJe@AvaO6`1JKIMJR)aPZhex}a&TD|yn_0r#!EvuUQHZ@IIGjrOB-sv}5
zXFP44^RsQmtr<&RbuN8AdG)XMH6N#K_}91n@6=t7x9|PE=<wgQr~Yrf^ndr+{|D~;
zKl%Uve^AVg0*XUG@h1x-1H&W+9gs<&Ji);6k3p4F#$&^RgUuYmS}`X!C>}Ph<M?O!
zs6au=$(8M2rpls4D&{>hf`T{LgBJJjU3_y>SW|g{L#y21f)#E2@~*v;SgL|K)F-#a
ze|oa>CdWxdyJ`_jtHT$#Wv!e=HM<Ti_gmyPVTRTnBlmN`cI`q>En3_sEOK`f(NYX{
z>yuen&iC>{^KyYjlQ~N__%^fp$=H^&U9D(duD56=d#TUHrlnj{Eppy?gbB{<RdAN&
zy>%gJsn=AKkSi^c$>|fi49afpQ8d}~pj$m`iN``C)h<b6f3J*(M$66$aj_deXmFD8
znxYrAX~Km?sU0&^#01Pb91nL+mN8FzkT4<DO-#X7uSzB7f|47Lq*2tC1cSpzCy45E
zXyg<yv35!Dh3Qy4JmlKO#;6nGz@jc9(a0dQVL}4)C*3IxEJ7{<2O0QnJ{)9c3Aqr!
zB;mktka<$Zhek$;j2jmi<th>y9Aq*cFtUrbDjZ^D?loXyk}KW7=x9~-<Z-_Po92@V
JY^*E{)&RJ=S)>2}

diff --git a/images/ASD.scr b/images/ASD.scr
deleted file mode 100755
index dfc4be3e4..000000000
--- a/images/ASD.scr
+++ /dev/null
@@ -1,1268 +0,0 @@
-Tt_ASD1(3,1,1,0.2,1)
-Tt_ASD2(3,1,1,0.2,1)
-To_ASD1(0.03,0,r,c,h)
-To_ASD2(0.019,0,r,c,h)
-P_ASD(
-  File(p=1,x=0.0725,y=0.02125,Tt=default,To=default),
-  Function(p=1,x=0.0725,y=0.02125,Tt=default,To=default),
-  LogicalMask(p=1,x=0.0725,y=0.03625,Tt=default,To=default),
-  Transform(p=1,x=0.0725,y=0.05125,Tt=default,To=default),
-  source(p=1,x=0.0725,y=0.70375,Tt=default,To=default),
-  name(p=1,x=0.0725,y=0.68875,Tt=default,To=default),
-  title(p=1,x=0.1675,y=0.68875,Tt=default,To=default),
-  units(p=1,x=0.6615,y=0.68875,Tt=default,To=default),
-  crdate(p=1,x=0.7375,y=0.68875,Tt=default,To=default),
-  crtime(p=1,x=0.8325,y=0.68875,Tt=default,To=default),
-  comment#1(p=1,x=0.909091,y=0.0466611,Tt=default,To=default),
-  comment#2(p=1,x=0.12,y=0.72875,Tt=default,To=default),
-  comment#3(p=1,x=0.12,y=0.74375,Tt=default,To=default),
-  comment#4(p=1,x=0.85,y=0.070,Tt=default,To=default),
-  xname(p=1,x=0.499345,y=0.17035,Tt=default,To=defcenter),
-  yname(p=1,x=0.0169,y=0.420034,Tt=default,To=defcentup),
-  zname(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  tname(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  xunits(p=0,x=0.595,y=0.22125,Tt=default,To=default),
-  yunits(p=0,x=0.044,y=0.48875,Tt=default,To=defcentup),
-  zunits(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  tunits(p=1,x=0.025,y=0.80875,Tt=default,To=default),
-  xvalue(p=1,x=0.785,y=0.70375,Th=default,Tt=default,To=default),
-  yvalue(p=1,x=0.785,y=0.68875,Th=default,Tt=default,To=default),
-  zvalue(p=1,x=0.785,y=0.67375,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.785,y=0.65875,Th=default,Tt=default,To=default),
-  mean(p=1,x=0.0725,y=0.66875,Th=default,Tt=default,To=default),
-  max(p=1,x=0.2625,y=0.66875,Th=default,Tt=default,To=default),
-  min(p=1,x=0.4525,y=0.66875,Th=default,Tt=default,To=default),
-  xtic#1(p=1,y1=0.21125,y2=0.20175,Tl=default),
-  xtic#2(p=1,y1=0.63875,y2=0.64825,Tl=default),
-  xmintic#a(p=1,y1=0.21125,y2=0.2065,Tl=default),
-  xmintic#b(p=1,y1=0.64825,y2=0.6535,Tl=default),
-  ytic#1(p=1,x1=0.0725,x2=0.063,Tl=default),
-  ytic#2(p=1,x1=0.9275,x2=0.937,Tl=default),
-  ymintic#a(p=1,x1=0.0725,x2=0.06775,Tl=default),
-  ymintic#b(p=1,x1=0.9275,x2=0.93225,Tl=default),
-  xlabel#1(p=1,y=0.19035,Tt=default,To=defcenter),
-  xlabel#2(p=1,y=0.66152,Tt=default,To=defcenter),
-  ylabel#1(p=1,x=0.063,Tt=default,To=defright),
-  ylabel#2(p=1,x=0.937,Tt=default,To=default),
-  box#1(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875,Tl=default),
-  box#2(p=0,x1=0.025,y1=0.23975,x2=0.899,y2=0.65775,Tl=default),
-  box#3(p=0,x1=0.025,y1=0.24925,x2=0.8895,y2=0.64825,Tl=default),
-  box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#1(p=0,x1=0.0725,y1=0.425,x2=0.9275,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.21125,x2=0.5,y2=0.63875,Tl=default),
-  line#3(p=0,x1=0.025,y1=0.78125,x2=0.88,y2=0.78125,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.0725,y1=0.11625,x2=0.9275,y2=0.13525,Tt=default,To=defcenter,Tl=default),
-  data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875)  )
-P_ASD_dud(
-  File(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  Function(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  LogicalMask(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  Transform(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  source(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  name(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  title(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  units(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  crdate(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  crtime(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#1(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#2(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#3(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  comment#4(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xname(p=0,x=0.025,y=0.0112546,Tt=default,To=defcenter),
-  yname(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup),
-  zname(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  tname(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xunits(p=0,x=0.025,y=0.01125,Tt=default,To=default),
-  yunits(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup),
-  zunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  tunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default),
-  xvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  tvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  max(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  min(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xtic#2(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xmintic#a(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  xmintic#b(p=0,y1=0.0212495,y2=0.0212495,Tl=default),
-  ytic#1(p=0,x1=0.025,x2=0.025,Tl=default),
-  ytic#2(p=0,x1=0.025,x2=0.025,Tl=default),
-  ymintic#a(p=0,x1=0.025,x2=0.025,Tl=default),
-  ymintic#b(p=0,x1=0.025,x2=0.025,Tl=default),
-  xlabel#1(p=0,y=0.0212495,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0.02125,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.025,Tt=default,To=defright),
-  ylabel#2(p=0,x=0.025,Tt=default,To=default),
-  box#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  box#2(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  box#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  line#2(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default),
-  line#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default),
-  line#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  legend(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tt=default,To=defcenter,Tl=default),
-  data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875)  )
-P_ASD1(
-  File(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default),
-  Function(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default),
-  LogicalMask(p=1,x=0.0780229,y=0.00653595,Tt=default,To=default),
-  Transform(p=1,x=0.0780229,y=0.0163235,Tt=default,To=default),
-  source(p=0,x=0.0669935,y=0.717229,Tt=default,To=default),
-  name(p=0,x=0.0669935,y=0.705229,Tt=default,To=default),
-  title(p=1,x=0.348809,y=0.705235,Tt=ASD1,To=ASD1),
-  units(p=0,x=0.686993,y=0.705229,Tt=default,To=default),
-  crdate(p=0,x=0.766993,y=0.705229,Tt=default,To=default),
-  crtime(p=0,x=0.866993,y=0.705229,Tt=default,To=default),
-  comment#1(p=1,x=0.2,y=0.025,Tt=ASD2,To=ASD2),
-  comment#2(p=1,x=0.1,y=0.025,Tt=ASD2,To=ASD2),
-  comment#3(p=0,x=0.139052,y=0.711242,Tt=default,To=default),
-  comment#4(p=1,x=0.0339869,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=1,x=0.0221,y=0.327701,Tt=default,To=defcentup),
-  zname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  tname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  xunits(p=0,x=0.616993,y=0.215229,Tt=default,To=default),
-  yunits(p=0,x=0.0369935,y=0.505229,Tt=default,To=defcentup),
-  zunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  tunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default),
-  xvalue(p=1,x=0.993464,y=0.672091,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.816993,y=0.695229,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.816993,y=0.685229,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.993464,y=0.642729,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.0669935,y=0.685229,Th=default,Tt=default,To=default),
-  max(p=0,x=0.266993,y=0.685229,Th=default,Tt=default,To=default),
-  min(p=0,x=0.466993,y=0.685229,Th=default,Tt=default,To=default),
-  xtic#1(p=1,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=1,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=0,y1=0.0640523, y2=0.067996695,Tl=default),
-  xmintic#b(p=0,y1=0.620657,y2=0.624837,Tl=default),
-  ytic#1(p=1,x1=0.1071242,x2=0.115306,Tl=default),
-  ytic#2(p=1,x1=0.819543,x2=0.811361,Tl=default),
-  ymintic#a(p=0,x1=0.1071242,x2=0.1112151,Tl=default),
-  ymintic#b(p=0,x1=0.819543,x2=0.815452,Tl=default),
-  xlabel#1(p=1,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0.64152,Tt=default,To=defcenter),
-  ylabel#1(p=1,x=0.0979738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0.827,Tt=default,To=default),
-  box#1(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0.0169935,y1=0.235229,x2=0.936993,y2=0.675229,Tl=default),
-  box#3(p=0,x1=0.0169935,y1=0.245229,x2=0.926993,y2=0.665229,Tl=default),
-  box#4(p=0,x1=0.0169935,y1=0.00522876,x2=0.0169935,y2=0.00522876,Tl=default),
-  line#1(p=0,x1=0.0669935,y1=0.430229,x2=0.966993,y2=0.430229,Tl=default),
-  line#2(p=0,x1=0.516993,y1=0.205229,x2=0.516993,y2=0.655229,Tl=default),
-  line#3(p=0,x1=0.0169935,y1=0.405229,x2=0.916993,y2=0.405229,Tl=default),
-  line#4(p=0,x1=0.0169935,y1=0.805229,x2=0.916993,y2=0.805229,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.617701,x2=0.909091,y2=0.617701,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD2(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.3,y=0.15,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.3,y=0.15,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.599123,x2=0.909091,y2=0.599123,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD3(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1),
-  comment#2(p=1,x=0.5,y=0.691504,Tt=ASD2,To=ASD2),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=1,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=1,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default),
-  ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.0679738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.580546,x2=0.909091,y2=0.580546,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD4(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1),
-  comment#2(p=1,x=0.5,y=0.691504,Tt=ASD1,To=ASD1),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup),
-  xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default),
-  xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default),
-  ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.0679738,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.557324,x2=0.909091,y2=0.557324,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD5(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.538747,x2=0.909091,y2=0.538747,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD6(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.520169,x2=0.909091,y2=0.520169,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD7(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.501592,x2=0.909091,y2=0.501592,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD8(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.483014,x2=0.909091,y2=0.483014,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD9(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.464437,x2=0.909091,y2=0.464437,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD10(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.445859,x2=0.909091,y2=0.445859,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD11(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.402615,y=0.104575,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.427282,x2=0.909091,y2=0.427282,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD12(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.408704,x2=0.909091,y2=0.408704,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD13(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.390127,x2=0.909091,y2=0.390127,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD14(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=1,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.371549,x2=0.909091,y2=0.371549,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-P_ASD15(
-  File(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default),
-  LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default),
-  Transform(p=1,x=0.05,y=0.03,Tt=default,To=default),
-  source(p=0,x=0.05,y=0.712,Tt=default,To=default),
-  name(p=0,x=0.05,y=0.7,Tt=default,To=default),
-  title(p=0,x=0.15,y=0.7,Tt=default,To=default),
-  units(p=0,x=0.67,y=0.7,Tt=default,To=default),
-  crdate(p=0,x=0.75,y=0.7,Tt=default,To=default),
-  crtime(p=0,x=0.85,y=0.7,Tt=default,To=default),
-  comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default),
-  comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default),
-  comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default),
-  comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default),
-  xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter),
-  yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup),
-  zname(p=1,x=0,y=0.82,Tt=default,To=default),
-  tname(p=1,x=0,y=0.82,Tt=default,To=default),
-  xunits(p=0,x=0.6,y=0.21,Tt=default,To=default),
-  yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup),
-  zunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  tunits(p=1,x=0,y=0.82,Tt=default,To=default),
-  xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default),
-  yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default),
-  zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default),
-  tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default),
-  mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default),
-  max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default),
-  min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default),
-  xtic#1(p=0,y1=0.2,y2=0.19,Tl=default),
-  xtic#2(p=0,y1=0.65,y2=0.66,Tl=default),
-  xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default),
-  xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default),
-  ytic#1(p=0,x1=0.05,x2=0.04,Tl=default),
-  ytic#2(p=0,x1=0.55,x2=0.56,Tl=default),
-  ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default),
-  ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default),
-  xlabel#1(p=0,y=0.18,Tt=default,To=defcenter),
-  xlabel#2(p=0,y=0,Tt=default,To=defcenter),
-  ylabel#1(p=0,x=0.035,Tt=default,To=defright),
-  ylabel#2(p=0,x=0,Tt=default,To=default),
-  box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default),
-  box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default),
-  box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default),
-  box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default),
-  line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default),
-  line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default),
-  line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default),
-  line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default),
-  legend(p=1,x1=0.863636,y1=0.352972,x2=0.909091,y2=0.352972,Tt=std,To=left,Tl=default),
-  data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837)  )
-Tt_mwbotcenter(1,1,1,0.2,1)
-To_mwbotcenter(0.01,0,r,c,b)
-Tt_std(1,1,1,0.2,241)
-To_left(0.01,0,r,l,h)
-Gfb_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   level_1=1e+20,level_2=1e+20,color_1=16,color_2=239,legend_type=0,
-   legend=(),
-   ext_1=n,ext_2=n,missing=241)
-Gcon_ASD(
-   projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,yticlabels#1=lat20,
-   yticlabels#2=lat20,
-   datawc(-180,-90,180,90),
-   Tl=ASDCont,
-   Type=1,
-  )
-Tl_ASDCont(1,2.8,241)
-Gfi_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,xmtics#1=*,xmtics#2=*,
-   yticlabels#1=*,yticlabels#2=*,ymtics#1=*,ymtics#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   missing=1e+20,
-   range
-   (id=0,level1=1e+20,level2=1e+20,Tf=default)  )
-Gi_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   make_labels=n,
-   lines
-   (id=0,priority=1,level=0,increment=1e+20,hilite_ci=0,
-    label=*,Tl=default,Tt=default,To=default)
-  )
-Gfo_ASD(
-   projection=linear,xticlabels#1=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tf=default,
-   outline(1, 2, 3, 4, 5, 6, 7))
-Go_ASD(
-   projection=linear,xticlabels#1=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tl=default,
-   outline(1, 2, 3, 4, 5, 6, 7))
-GXy_ASD1(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD1,Tm=None)
-Tl_ASD1(1,4.9,241)
-Tm_ASD1(1,4.9,241)
-GXy_ASD2(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD2,Tm=None)
-Tl_ASD2(1,4.9,242)
-Tm_ASD2(2,4.9,242)
-GXy_ASD3(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD3,Tm=None)
-Tl_ASD3(1,4.9,243)
-Tm_ASD3(3,4.9,243)
-GXy_ASD4(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD4,Tm=None)
-Tl_ASD4(1,4.9,244)
-Tm_ASD4(4,4.9,244)
-GXy_ASD5(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD5,Tm=None)
-Tl_ASD5(1,4.9,245)
-Tm_ASD5(5,4.9,245)
-GXy_ASD6(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD6,Tm=None)
-Tl_ASD6(1,4.9,246)
-Tm_ASD6(6,4.9,246)
-GXy_ASD7(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD7,Tm=None)
-Tl_ASD7(1,4.9,247)
-Tm_ASD7(7,4.9,247)
-GXy_ASD8(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD8,Tm=None)
-Tl_ASD8(1,4.9,248)
-Tm_ASD8(8,4.9,248)
-GXy_ASD9(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD9,Tm=None)
-Tl_ASD9(1,4.9,249)
-Tm_ASD9(9,4.9,249)
-GXy_ASD10(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD10,Tm=None)
-Tl_ASD10(1,4.9,250)
-Tm_ASD10(10,4.9,250)
-GXy_ASD11(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD11,Tm=None)
-Tl_ASD11(1,4.9,251)
-Tm_ASD11(11,4.9,251)
-GXy_ASD12(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD12,Tm=None)
-Tl_ASD12(1,4.9,252)
-Tm_ASD12(12,4.9,252)
-GXy_ASD13(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD13,Tm=None)
-Tl_ASD13(1,4.9,253)
-Tm_ASD13(13,4.9,253)
-GXy_ASD14(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD14,Tm=None)
-Tl_ASD14(1,4.9,254)
-Tm_ASD14(14,4.9,254)
-GXy_ASD15(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   Tl=ASD15,Tm=None)
-Tl_ASD15(1,4.9,255)
-Tm_ASD15(15,4.9,255)
-GYx_ASD1(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD1,Tm=none)
-GYx_ASD2(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,40,120,100),
-   xaxisconvert=linear,
-   Tl=ASD2,Tm=none)
-GYx_ASD3(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,40,120,100),
-   xaxisconvert=linear,
-   Tl=ASD3,Tm=none)
-GYx_ASD4(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD4,Tm=none)
-GYx_ASD5(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD5,Tm=none)
-GYx_ASD6(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD6,Tm=none)
-GYx_ASD7(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD7,Tm=none)
-GYx_ASD8(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD8,Tm=none)
-GYx_ASD9(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD9,Tm=none)
-GYx_ASD10(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD10,Tm=none)
-GYx_ASD11(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD11,Tm=none)
-GYx_ASD12(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD12,Tm=none)
-GYx_ASD13(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD13,Tm=none)
-GYx_ASD14(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD14,Tm=none)
-GYx_ASD15(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1,1e+20,120,1e+20),
-   xaxisconvert=linear,
-   Tl=ASD15,Tm=none)
-GSp_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tm=default)
-Gv_ASD(
-   projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*,
-   yticlabels#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   Tl=default,vector_scale=1,vector_align=c,vector_type=2,ref_vector=1e+20)
-C_ASD(
-   100,100,100,   0,0,0,   84.7059,84.7059,84.7059,   31.7647,31.7647,31.7647,   100,100,100,   100,100,0,
-   0,2.7451,100,   0,5.4902,100,   0,9.01961,100,   0,11.3725,100,   0,14.902,100,   0,17.6471,100,
-   0,21.1765,100,   0,23.9216,100,   0,26.6667,100,   0,30.1961,100,   0,32.9412,100,   0,35.6863,100,
-   0,39.2157,100,   0,41.9608,100,   0,44.7059,100,   0,48.2353,100,   0,50.9804,100,   0,54.1176,100,
-   0,56.8627,100,   0,60.3922,100,   0,63.1373,100,   0,66.6667,100,   0,69.4118,100,   0,72.1569,100,
-   0,75.6863,100,   0,78.4314,100,   0,81.1765,100,   0,84.7059,100,   0,87.451,100,   0,90.1961,100,
-   0,93.7255,100,   0,96.4706,100,   0,100,100,   0,100,96.4706,   0,100,93.7255,   0,100,90.1961,
-   0,100,87.451,   0,100,84.7059,   0,100,81.1765,   0,100,78.4314,   0,100,75.6863,   0,100,72.1569,
-   0,100,69.4118,   0,100,66.6667,   0,100,63.1373,   0,100,60.3922,   0,100,56.8627,   0,100,54.1176,
-   0,100,50.9804,   0,100,48.2353,   0,100,44.7059,   0,100,41.9608,   0,100,39.2157,   0,100,35.6863,
-   0,100,32.9412,   0,100,30.1961,   0,100,26.6667,   0,100,23.9216,   0,100,21.1765,   0,100,17.6471,
-   0,100,14.902,   0,100,11.3725,   0,100,9.01961,   0,100,5.4902,   0,100,2.7451,   0,100,0,
-   2.7451,100,0,   5.4902,100,0,   9.01961,100,0,   11.3725,100,0,   14.902,100,0,   17.6471,100,0,
-   21.1765,100,0,   23.9216,100,0,   26.6667,100,0,   30.1961,100,0,   32.9412,100,0,   35.6863,100,0,
-   39.2157,100,0,   41.9608,100,0,   44.7059,100,0,   48.2353,100,0,   50.9804,100,0,   54.1176,100,0,
-   56.8627,100,0,   60.3922,100,0,   63.1373,100,0,   66.6667,100,0,   69.4118,100,0,   72.1569,100,0,
-   75.6863,100,0,   78.4314,100,0,   81.1765,100,0,   84.7059,100,0,   87.451,100,0,   90.1961,100,0,
-   93.7255,100,0,   96.4706,100,0,   100,100,0,   100,97.6471,0,   100,95.6863,0,   100,93.7255,0,
-   100,91.3726,0,   100,89.4118,0,   100,87.451,0,   100,85.4902,0,   100,83.1373,0,   100,81.1765,0,
-   100,79.2157,0,   100,77.6471,0,   100,75.6863,0,   100,73.7255,0,   100,71.3726,0,   100,69.4118,0,
-   100,67.451,0,   100,65.4902,0,   100,63.1373,0,   100,61.1765,0,   100,59.2157,0,   100,56.8627,0,
-   100,54.902,0,   100,52.9412,0,   100,50.9804,0,   100,49.4118,0,   100,47.451,0,   100,44.7059,0,
-   100,43.1373,0,   100,41.1765,0,   100,39.2157,0,   100,36.8627,0,   100,34.902,0,   100,32.9412,0,
-   100,32.1569,0,   100,30.9804,0,   100,30.1961,0,   100,28.6275,0,   100,28.2353,0,   100,26.6667,0,
-   100,25.8824,0,   100,24.7059,0,   100,23.9216,0,   100,23.1373,0,   100,21.9608,0,   100,21.1765,0,
-   100,20,0,   100,18.4314,0,   100,17.6471,0,   100,16.4706,0,   100,15.6863,0,   100,14.902,0,
-   100,13.7255,0,   100,12.9412,0,   100,11.3725,0,   100,10.9804,0,   100,9.41177,0,   100,9.01961,0,
-   100,7.84314,0,   100,6.66667,0,   100,5.4902,0,   100,4.31373,0,   100,3.92157,0,   100,2.7451,0,
-   100,1.56863,0,   100,0.784314,0,   100,0,0,   97.6471,0,0,   95.6863,0,0,   93.7255,0,0,
-   92.1569,0,0,   90.1961,0,0,   88.2353,0,0,   86.6667,0,0,   84.7059,0,0,   82.7451,0,0,
-   80.3922,0,0,   79.2157,0,0,   76.8627,0,0,   74.902,0,0,   72.9412,0,0,   71.3726,0,0,
-   69.4118,0,0,   67.451,0,0,   65.8824,0,0,   63.9216,0,0,   61.9608,0,0,   60,0,0,
-   58.4314,0,0,   56.4706,0,0,   54.1176,0,0,   52.1569,0,0,   50.1961,0,0,   48.6275,0,0,
-   46.6667,0,0,   44.7059,0,0,   43.1373,0,0,   41.1765,0,0,   39.2157,0,0,   37.6471,0,0,
-   38.4314,0,1.56863,   39.2157,0,3.92157,   40.3922,0,5.4902,   41.1765,0,7.84314,   41.9608,0,10.1961,   43.1373,0,12.1569,
-   43.9216,0,13.7255,   44.7059,0,15.6863,   45.8824,0,18.4314,   46.6667,0,20.3922,   48.2353,0,21.9608,   48.6275,0,23.9216,
-   50.1961,0,25.8824,   50.9804,0,28.6275,   52.1569,0,30.1961,   52.9412,0,32.1569,   53.7255,0,34.1176,   54.902,0,36.4706,
-   55.6863,0,38.4314,   56.4706,0,40.3922,   57.6471,0,42.7451,   58.4314,0,44.7059,   59.2157,0,46.6667,   60.3922,0,48.6275,
-   61.1765,0,50.9804,   62.7451,0,52.9412,   63.1373,0,54.902,   64.7059,0,56.8627,   65.4902,0,59.2157,   66.6667,0,61.1765,
-   67.451,0,63.1373,   68.2353,0,65.4902,   69.4118,0,67.451,   70.1961,0,69.4118,   71.3726,0,71.3726,   72.1569,0,73.7255)
-
-Gtd_ASD(
-detail = 50;
-max = None;
-quadrans = 1;
-skillValues = [0.10000000000000001, 0.20000000000000001, 0.29999999999999999, 0.40000000000000002, 0.5, 0.59999999999999998, 0.69999999999999996, 0.80000000000000004, 0.90000000000000002, 0.94999999999999996];
-referencevalue = 1.0;
-arrowlength = 0.05;
-arrowangle = 20.0;
-arrowbase = 0.75;
-Marker;
-    status = [];
-    line = [];
-    id = [];
-    id_size = [];
-    id_color = [];
-    id_font = [];
-    symbol = [];
-    color = [];
-    size = [];
-    xoffset = [];
-    yoffset = [];
-    line_color = [];
-    line_size = [];
-    line_type = [];
-)
-Gfm_ASD(
-   projection=linear,xticlabels#1=*,
-   xticlabels#2=*,
-   xmtics#1=*,
-   xmtics#2=*,
-   yticlabels#1=*,
-   yticlabels#2=*,
-   ymtics#1=*,
-   ymtics#2=*,
-   datawc(1e+20,1e+20,1e+20,1e+20),
-   xaxisconvert=linear,
-   yaxisconvert=linear,
-   missing=241,
-   mesh=0,
-   wrap
-   (0, 0),
-   range
-   (id=0,level1=1e+20,level2=1e+20,Tf=default)  )
-
diff --git a/images/HARD_COPY b/images/HARD_COPY
deleted file mode 100755
index 048305957..000000000
--- a/images/HARD_COPY
+++ /dev/null
@@ -1,76 +0,0 @@
-#######################################################################
-########################## H A R D   C O P Y ##########################
-#######################################################################
-#                                                                     #
-# This file contains the user specified printer names located on      #
-# their network!  See the "/etc/printcap" file for a list of          #
-# active printers. It is important to read this entire file for       #
-# instructions!!!!                                                    #
-#                                                                     #
-# The '#' at the start of a line indicates a comment or statement by  #
-# the user.                                                           #
-#                                                                     #
-# I M P O R T A N T    N O T I C E ! ! ! !                            #
-# - - - - - - - - -    - - - - - - - - - -                            #
-# VCS has no way of knowing which print manager your system is using. #
-# That is, 'lpr' (the BSD print spooler) or 'lp'. If the set          #
-# environment variable 'PRINTER' is unset, then VCS will use 'lp'.    #
-# If the set environment variable 'PRINTER' is set to 'printer',      #
-# then VCS will use 'lpr'.                                            #
-#                                                                     #
-#                                                                     #
-# If sending a CGM file to the printer from VCS results in an error   #
-# message (e.g., 'Error - In sending CGM file to printer.'), then     #
-# set or unset the 'PRINTER' environment variable.                    #
-#                                                                     #
-#######################################################################
-#######################################################################
-#######################################################################
-
-#######################################################################
-#######################################################################
-#######################################################################
-#             I M P O R T A N T    N O T I C E ! ! ! !                #
-#             - - - - - - - - -    - - - - - - - - - -                #
-# The lines below are used for GPLOT.  GPLOT is a graphics utility    #
-# program designed for the processing of CGM metafiles.  We use       #
-# GPLOT to convert the cgm file(s) to postscript output and send it   #
-# directly to a postscript printer. The absolute gplot path must be   #
-# set properly (below). That is:                                      #
-# landscape = /absolute_path/gplot -dPSC -r90 ...                     #
-# portrait  = /absolute_path/gplot -dPSC -D ...                       #
-#                                                                     #
-#######################################################################
-#######################################################################
-#######################################################################
-
-############################################################################
-# PRINTER ORIENTATION: Landscape                                           #
-# OUTPUT TYPE: Postscript       COLOR: YES                                 #
-# NOTE: THIS IS FOR SENDING TO THE PRINTER                                 #
-#                                                                          #
-# .cshrc file:                                                             #
-# In your .cshrc file you can set up an alias for converting your          #
-# landscape .cgm files. That is,                                           #
-# alias landscape '/absolute_path/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10' #
-#                                                                          #
-############################################################################
-#landscape = /usr/local/bin/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10
-
-#######################################################################
-# PRINTER ORIENTATION: Portrait                                       #
-# OUTPUT TYPE: Postscript       COLOR: YES                            #
-# NOTE: THIS IS FOR SENDING TO THE PRINTER                            #
-#                                                                     #
-# .cshrc file:                                                        #
-# In your .cshrc file you can set up an alias for converting your     #
-# portscript .cgm files. That is,                                     #
-# alias portrait '/absolute_path/gplot -dPSC -D -X10 -Y12.5'          #
-#                                                                     #
-#######################################################################
-#portrait = /usr/local/bin/gplot -dPSC -D -X10 -Y12.5
-
-#######################################################################
-###################  P R I N T E R   N A M E S  #######################
-#######################################################################
-
diff --git a/images/PCM_isofill.scr b/images/PCM_isofill.scr
deleted file mode 100644
index c42b94f24..000000000
--- a/images/PCM_isofill.scr
+++ /dev/null
@@ -1,976 +0,0 @@
-L_PCM_p_levels(1000,"1000",900,"900",800,"800",700,"700",600,"600",
-   500,"500",400,"400",300,"300",200,"200",100,"100",50,"50",
-   10,"10")
-
-L_PCM_height(1000,"0",795,"2",616.6,"4",472.2,"6",356.5,"8",
-   265,"10",121.1,"15",55.3,"20",12,"30")
-
-Tf_PCM16(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(16),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM17(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(17),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM18(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(18),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM19(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(19),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM20(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(20),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM21(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(21),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM22(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(22),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM23(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(23),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM24(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(24),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM25(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(25),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM26(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(26),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM27(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(27),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM28(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(28),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM29(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(29),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM30(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(30),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM31(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(31),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM32(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(32),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM33(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(33),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM34(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(34),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM35(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(35),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM36(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(36),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-Tf_PCM241(
-   1, fais(1),
-   1, fasi(1),
-   1, faci(241),
-   0,0,0.1,0.1,1,
-   vp(0,1,0,1),
-   wc(0,1,0,1)
-  )
-
-
-Gfi_PCM_clt(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0.,level2=.10,Tf=PCM16)
-(id=2,level1=.10,level2=.20,Tf=PCM22)
-(id=3,level1=.20,level2=.30,Tf=PCM23)
-(id=4,level1=.30,level2=.40,Tf=PCM32)
-(id=5,level1=.40,level2=.50,Tf=PCM33)
-(id=6,level1=.50,level2=.60,Tf=PCM34)
-(id=7,level1=.60,level2=.70,Tf=PCM27)
-(id=8,level1=.70,level2=.80,Tf=PCM28)
-(id=9,level1=.80,level2=.90,Tf=PCM29)
-(id=10,level1=.90,level2=1.00,Tf=PCM30) )
-
-Gfi_PCM_hfls(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=0,Tf=PCM16)
-(id=2,level1=0,level2=25,Tf=PCM18)
-(id=3,level1=25,level2=50,Tf=PCM19)
-(id=4,level1=50,level2=75,Tf=PCM20)
-(id=5,level1=75,level2=100,Tf=PCM21)
-(id=6,level1=100,level2=125,Tf=PCM22)
-(id=7,level1=125,level2=150,Tf=PCM23)
-(id=8,level1=150,level2=175,Tf=PCM24)
-(id=9,level1=175,level2=200,Tf=PCM25)
-(id=10,level1=200,level2=225,Tf=PCM26)
-(id=11,level1=225,level2=250,Tf=PCM27)
-(id=12,level1=250,level2=275,Tf=PCM28)
-(id=13,level1=275,level2=300,Tf=PCM29)
-(id=14,level1=300,level2=1e+20,Tf=PCM30) )
-
-Gfi_PCM_hfss(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-100,Tf=PCM16)
-(id=2,level1=-100,level2=-50,Tf=PCM18)
-(id=3,level1=-50,level2=-25,Tf=PCM19)
-(id=4,level1=-25,level2=-15,Tf=PCM20)
-(id=5,level1=-15,level2=-10,Tf=PCM21)
-(id=6,level1=-10,level2=-5,Tf=PCM22)
-(id=7,level1=-5,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=5,Tf=PCM24)
-(id=9,level1=5,level2=10,Tf=PCM25)
-(id=10,level1=10,level2=15,Tf=PCM26)
-(id=11,level1=15,level2=25,Tf=PCM27)
-(id=12,level1=25,level2=50,Tf=PCM28)
-(id=13,level1=50,level2=100,Tf=PCM29)
-(id=14,level1=100,level2=1e+20,Tf=PCM30) )
-
-Gfi_PCM_hus(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,10),
-missing=1e+20,
-range
-(id=1,level1=0,level2=0.0005,Tf=PCM20)
-(id=2,level1=0.0005,level2=0.001,Tf=PCM21)
-(id=3,level1=0.001,level2=0.002,Tf=PCM22)
-(id=4,level1=0.002,level2=0.004,Tf=PCM23)
-(id=5,level1=0.004,level2=0.006,Tf=PCM24)
-(id=6,level1=0.006,level2=0.008,Tf=PCM25)
-(id=7,level1=0.008,level2=0.01,Tf=PCM26)
-(id=8,level1=0.01,level2=0.012,Tf=PCM27)
-(id=9,level1=0.012,level2=0.014,Tf=PCM28)
-(id=10,level1=0.014,level2=0.016,Tf=PCM29)
-(id=11,level1=0.016,level2=0.018,Tf=PCM30)
-(id=12,level1=0.018,level2=0.02,Tf=PCM31) )
-
-
-Gfi_PCM_hur(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,10),
-missing=1e+20,
-range
-(id=1,level1=0,level2=10,Tf=PCM20)
-(id=2,level1=10,level2=20,Tf=PCM21)
-(id=3,level1=20,level2=30,Tf=PCM22)
-(id=4,level1=30,level2=40,Tf=PCM23)
-(id=5,level1=40,level2=50,Tf=PCM24)
-(id=6,level1=50,level2=60,Tf=PCM25)
-(id=7,level1=60,level2=70,Tf=PCM26)
-(id=8,level1=70,level2=80,Tf=PCM27)
-(id=9,level1=80,level2=90,Tf=PCM28)
-(id=10,level1=90,level2=100,Tf=PCM29)
-(id=11,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_pr(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prc(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prsnc(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_prsnl(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=1e-09,Tf=PCM16)
-(id=2,level1=1e-09,level2=2e-09,Tf=PCM18)
-(id=3,level1=2e-09,level2=5e-09,Tf=PCM19)
-(id=4,level1=5e-09,level2=1e-08,Tf=PCM20)
-(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21)
-(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22)
-(id=7,level1=2e-08,level2=3e-08,Tf=PCM23)
-(id=8,level1=3e-08,level2=5e-08,Tf=PCM24)
-(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25)
-(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26)
-(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27)
-(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28)
-(id=13,level1=2e-07,level2=3e-07,Tf=PCM29)
-(id=14,level1=3e-07,level2=1e+20,Tf=PCM30)
-(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-
-Gfi_PCM_ps(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=97000,Tf=PCM30)
-(id=2,level1=97000,level2=97500,Tf=PCM29)
-(id=3,level1=97500,level2=98000,Tf=PCM28)
-(id=4,level1=98000,level2=98500,Tf=PCM27)
-(id=5,level1=98500,level2=99000,Tf=PCM26)
-(id=6,level1=99000,level2=99500,Tf=PCM25)
-(id=7,level1=99500,level2=100000,Tf=PCM24)
-(id=8,level1=100000,level2=100500,Tf=PCM23)
-(id=9,level1=100500,level2=101000,Tf=PCM22)
-(id=10,level1=101000,level2=101500,Tf=PCM21)
-(id=11,level1=101500,level2=102000,Tf=PCM20)
-(id=12,level1=102000,level2=102500,Tf=PCM19)
-(id=13,level1=102500,level2=103000,Tf=PCM18)
-(id=14,level1=103000,level2=103500,Tf=PCM17)
-(id=15,level1=103500,level2=104000,Tf=PCM35)
-(id=16,level1=104000,level2=1e+20,Tf=PCM36) )
-
-
-Gfi_PCM_psl(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=97000,Tf=PCM30)
-(id=2,level1=97000,level2=97500,Tf=PCM29)
-(id=3,level1=97500,level2=98000,Tf=PCM28)
-(id=4,level1=98000,level2=98500,Tf=PCM27)
-(id=5,level1=98500,level2=99000,Tf=PCM26)
-(id=6,level1=99000,level2=99500,Tf=PCM25)
-(id=7,level1=99500,level2=100000,Tf=PCM24)
-(id=8,level1=100000,level2=100500,Tf=PCM23)
-(id=9,level1=100500,level2=101000,Tf=PCM22)
-(id=10,level1=101000,level2=101500,Tf=PCM21)
-(id=11,level1=101500,level2=102000,Tf=PCM20)
-(id=12,level1=102000,level2=102500,Tf=PCM19)
-(id=13,level1=102500,level2=103000,Tf=PCM18)
-(id=14,level1=103000,level2=103500,Tf=PCM17)
-(id=15,level1=103500,level2=104000,Tf=PCM35)
-(id=16,level1=104000,level2=1e+20,Tf=PCM36) )
-
-Gfi_PCM_rlut(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=100,Tf=PCM16)
-(id=2,level1=100,level2=120,Tf=PCM30)
-(id=3,level1=120,level2=140,Tf=PCM29)
-(id=4,level1=140,level2=160,Tf=PCM28)
-(id=5,level1=160,level2=180,Tf=PCM27)
-(id=6,level1=180,level2=200,Tf=PCM26)
-(id=7,level1=200,level2=220,Tf=PCM25)
-(id=8,level1=220,level2=240,Tf=PCM24)
-(id=9,level1=240,level2=260,Tf=PCM23)
-(id=10,level1=260,level2=280,Tf=PCM22)
-(id=11,level1=280,level2=300,Tf=PCM21)
-(id=12,level1=300,level2=320,Tf=PCM20)
-(id=13,level1=320,level2=340,Tf=PCM19)
-(id=14,level1=340,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_rlutcs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=100,Tf=PCM16)
-(id=2,level1=100,level2=120,Tf=PCM30)
-(id=3,level1=120,level2=140,Tf=PCM29)
-(id=4,level1=140,level2=160,Tf=PCM28)
-(id=5,level1=160,level2=180,Tf=PCM27)
-(id=6,level1=180,level2=200,Tf=PCM26)
-(id=7,level1=200,level2=220,Tf=PCM25)
-(id=8,level1=220,level2=240,Tf=PCM24)
-(id=9,level1=240,level2=260,Tf=PCM23)
-(id=10,level1=260,level2=280,Tf=PCM22)
-(id=11,level1=280,level2=300,Tf=PCM21)
-(id=12,level1=300,level2=320,Tf=PCM20)
-(id=13,level1=320,level2=340,Tf=PCM19)
-(id=14,level1=340,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_rsds(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=25,Tf=PCM16)
-(id=2,level1=25,level2=50,Tf=PCM30)
-(id=3,level1=50,level2=75,Tf=PCM29)
-(id=4,level1=75,level2=100,Tf=PCM28)
-(id=5,level1=100,level2=125,Tf=PCM27)
-(id=6,level1=125,level2=150,Tf=PCM34)
-(id=7,level1=150,level2=175,Tf=PCM33)
-(id=8,level1=175,level2=200,Tf=PCM32)
-(id=9,level1=200,level2=225,Tf=PCM23)
-(id=10,level1=225,level2=250,Tf=PCM22)
-(id=11,level1=250,level2=275,Tf=PCM21)
-(id=12,level1=275,level2=300,Tf=PCM20)
-(id=13,level1=300,level2=325,Tf=PCM19)
-(id=14,level1=325,level2=350,Tf=PCM18)
-(id=15,level1=350,level2=1e+20,Tf=PCM17) )
-
-Gfi_PCM_rsdscs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=25,Tf=PCM16)
-(id=2,level1=25,level2=50,Tf=PCM30)
-(id=3,level1=50,level2=75,Tf=PCM29)
-(id=4,level1=75,level2=100,Tf=PCM28)
-(id=5,level1=100,level2=125,Tf=PCM27)
-(id=6,level1=125,level2=150,Tf=PCM34)
-(id=7,level1=150,level2=175,Tf=PCM33)
-(id=8,level1=175,level2=200,Tf=PCM32)
-(id=9,level1=200,level2=225,Tf=PCM23)
-(id=10,level1=225,level2=250,Tf=PCM22)
-(id=11,level1=250,level2=275,Tf=PCM21)
-(id=12,level1=275,level2=300,Tf=PCM20)
-(id=13,level1=300,level2=325,Tf=PCM19)
-(id=14,level1=325,level2=350,Tf=PCM18)
-(id=15,level1=350,level2=1e+20,Tf=PCM17) )
-
-Gfi_PCM_rsus(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-Gfi_PCM_rsut(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-
-Gfi_PCM_rsutcs(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=0,level2=-25,Tf=PCM16)
-(id=2,level1=-25,level2=-50,Tf=PCM30)
-(id=3,level1=-50,level2=-75,Tf=PCM29)
-(id=4,level1=-75,level2=-100,Tf=PCM28)
-(id=5,level1=-100,level2=-125,Tf=PCM27)
-(id=6,level1=-125,level2=-150,Tf=PCM34)
-(id=7,level1=-150,level2=-175,Tf=PCM33)
-(id=8,level1=-175,level2=-200,Tf=PCM32)
-(id=9,level1=-200,level2=-225,Tf=PCM23)
-(id=10,level1=-225,level2=-250,Tf=PCM22)
-(id=11,level1=-250,level2=-275,Tf=PCM21)
-(id=12,level1=-275,level2=-300,Tf=PCM20)
-(id=13,level1=-300,level2=-325,Tf=PCM19)
-(id=14,level1=-325,level2=-350,Tf=PCM18)
-(id=15,level1=-350,level2=-400,Tf=PCM17)
-(id=16,level1=-400,level2=-1e+20,Tf=PCM35) )
-
-
-Gfi_PCM_ta(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-yaxisconvert=linear,
-missing=1e+20,
-range(id=1,level1=-1e+20,level2=203,Tf=PCM16)
-(id=2,level1=203,level2=213,Tf=PCM30)
-(id=3,level1=213,level2=223,Tf=PCM29)
-(id=4,level1=223,level2=233,Tf=PCM28)
-(id=5,level1=233,level2=243,Tf=PCM27)
-(id=6,level1=243,level2=253,Tf=PCM34)
-(id=7,level1=253,level2=263,Tf=PCM33)
-(id=8,level1=263,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tasmax(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tasmin(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_ts(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=238,Tf=PCM16)
-(id=2,level1=238,level2=243,Tf=PCM30)
-(id=3,level1=243,level2=248,Tf=PCM29)
-(id=4,level1=248,level2=253,Tf=PCM28)
-(id=5,level1=253,level2=258,Tf=PCM27)
-(id=6,level1=258,level2=263,Tf=PCM34)
-(id=7,level1=263,level2=268,Tf=PCM33)
-(id=8,level1=268,level2=273,Tf=PCM32)
-(id=9,level1=273,level2=278,Tf=PCM23)
-(id=10,level1=278,level2=283,Tf=PCM22)
-(id=11,level1=283,level2=288,Tf=PCM21)
-(id=12,level1=288,level2=293,Tf=PCM20)
-(id=13,level1=293,level2=298,Tf=PCM19)
-(id=14,level1=298,level2=303,Tf=PCM18)
-(id=15,level1=303,level2=308,Tf=PCM17)
-(id=16,level1=308,level2=1e+20,Tf=PCM35) )
-
-Gfi_PCM_tauu(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauugwd(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauv(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_tauvgwd(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-1,Tf=PCM29)
-(id=2,level1=-1,level2=-0.5,Tf=PCM28)
-(id=3,level1=-0.5,level2=-0.1,Tf=PCM27)
-(id=4,level1=-0.1,level2=-0.05,Tf=PCM26)
-(id=5,level1=-0.05,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=0.01,Tf=PCM23)
-(id=8,level1=0.01,level2=0.05,Tf=PCM22)
-(id=9,level1=0.05,level2=0.1,Tf=PCM21)
-(id=10,level1=0.1,level2=0.5,Tf=PCM20)
-(id=11,level1=0.5,level2=1,Tf=PCM19)
-(id=12,level1=1,level2=1e+20,Tf=PCM18) )
-
-Gfi_PCM_ua(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-25,Tf=PCM29)
-(id=2,level1=-25,level2=-20,Tf=PCM28)
-(id=3,level1=-20,level2=-15,Tf=PCM27)
-(id=4,level1=-15,level2=-10,Tf=PCM26)
-(id=5,level1=-10,level2=-5,Tf=PCM25)
-(id=6,level1=-5,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=5,Tf=PCM23)
-(id=8,level1=5,level2=10,Tf=PCM22)
-(id=9,level1=10,level2=15,Tf=PCM21)
-(id=10,level1=15,level2=20,Tf=PCM20)
-(id=11,level1=20,level2=25,Tf=PCM19)
-(id=12,level1=25,level2=30,Tf=PCM17)
-(id=13,level1=30,level2=35,Tf=PCM35)
-(id=14,level1=35,level2=1e+20,Tf=PCM36) )
-
-
-Gfi_PCM_uas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-21,Tf=PCM16)
-(id=2,level1=-21,level2=-18,Tf=PCM30)
-(id=3,level1=-18,level2=-15,Tf=PCM29)
-(id=4,level1=-15,level2=-12,Tf=PCM28)
-(id=5,level1=-12,level2=-9,Tf=PCM27)
-(id=6,level1=-9,level2=-6,Tf=PCM34)
-(id=7,level1=-6,level2=-3,Tf=PCM33)
-(id=8,level1=-3,level2=0,Tf=PCM32)
-(id=9,level1=0,level2=3,Tf=PCM23)
-(id=10,level1=3,level2=6,Tf=PCM22)
-(id=11,level1=6,level2=9,Tf=PCM21)
-(id=12,level1=9,level2=12,Tf=PCM20)
-(id=13,level1=12,level2=15,Tf=PCM19)
-(id=14,level1=15,level2=18,Tf=PCM18)
-(id=15,level1=18,level2=21,Tf=PCM17)
-(id=16,level1=21,level2=1e+20,Tf=PCM35)
-(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_vas(
-projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5,
-xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5,
-ymtics#2=lat5,
-datawc(-180,-90,180,90),
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-10,Tf=PCM29)
-(id=2,level1=-10,level2=-8,Tf=PCM28)
-(id=3,level1=-8,level2=-6,Tf=PCM27)
-(id=4,level1=-6,level2=-4,Tf=PCM26)
-(id=5,level1=-4,level2=-2,Tf=PCM25)
-(id=6,level1=-2,level2=0,Tf=PCM24)
-(id=7,level1=0,level2=2,Tf=PCM23)
-(id=8,level1=2,level2=4,Tf=PCM22)
-(id=9,level1=4,level2=6,Tf=PCM21)
-(id=10,level1=6,level2=8,Tf=PCM20)
-(id=11,level1=8,level2=10,Tf=PCM19)
-(id=12,level1=10,level2=12,Tf=PCM18)
-(id=13,level1=12,level2=14,Tf=PCM17)
-(id=14,level1=14,level2=1e+20,Tf=PCM35)
-(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) )
-
-Gfi_PCM_va(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-3,Tf=PCM29)
-(id=2,level1=-3,level2=-2.5,Tf=PCM28)
-(id=3,level1=-2.5,level2=-2,Tf=PCM27)
-(id=4,level1=-2,level2=-1.5,Tf=PCM26)
-(id=5,level1=-1.5,level2=-1,Tf=PCM25)
-(id=6,level1=-1,level2=-0.5,Tf=PCM24)
-(id=7,level1=-0.5,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=0.5,Tf=PCM22)
-(id=9,level1=0.5,level2=1,Tf=PCM21)
-(id=10,level1=1,level2=1.5,Tf=PCM20)
-(id=11,level1=1.5,level2=2,Tf=PCM19)
-(id=12,level1=2,level2=2.5,Tf=PCM17)
-(id=13,level1=2.5,level2=3,Tf=PCM35)
-(id=14,level1=3,level2=1e+20,Tf=PCM36) )
-
-Gfi_PCM_wap(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=1,level1=-1e+20,level2=-0.03,Tf=PCM29)
-(id=2,level1=-0.03,level2=-0.025,Tf=PCM28)
-(id=3,level1=-0.025,level2=-0.02,Tf=PCM27)
-(id=4,level1=-0.02,level2=-0.015,Tf=PCM26)
-(id=5,level1=-0.015,level2=-0.01,Tf=PCM25)
-(id=6,level1=-0.01,level2=-0.005,Tf=PCM24)
-(id=7,level1=-0.005,level2=0,Tf=PCM23)
-(id=8,level1=0,level2=0.005,Tf=PCM22)
-(id=9,level1=0.005,level2=0.01,Tf=PCM21)
-(id=10,level1=0.01,level2=0.015,Tf=PCM20)
-(id=11,level1=0.015,level2=0.02,Tf=PCM19)
-(id=12,level1=0.02,level2=0.025,Tf=PCM17)
-(id=13,level1=0.025,level2=0.03,Tf=PCM35)
-(id=14,level1=0.03,level2=1e+20,Tf=PCM36) )
-
-
-
-Gfi_PCM_zg(
-projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels,
-yticlabels#2=PCM_height,
-datawc(90,1000,-90,1),
-xaxisconvert=linear,
-yaxisconvert=linear,
-missing=1e+20,
-range
-(id=0,level1=-1e+20,level2=0,Tf=PCM241)
-(id=1,level1=0,level2=3000,Tf=PCM29)
-(id=2,level1=3000,level2=6000,Tf=PCM28)
-(id=3,level1=6000,level2=9000,Tf=PCM27)
-(id=4,level1=9000,level2=12000,Tf=PCM26)
-(id=5,level1=12000,level2=15000,Tf=PCM25)
-(id=6,level1=15000,level2=18000,Tf=PCM24)
-(id=7,level1=18000,level2=21000,Tf=PCM23)
-(id=8,level1=21000,level2=24000,Tf=PCM22)
-(id=9,level1=24000,level2=27000,Tf=PCM21)
-(id=10,level1=27000,level2=30000,Tf=PCM20)
-(id=11,level1=30000,level2=33000,Tf=PCM19)
-(id=12,level1=33000,level2=36000,Tf=PCM17)
-(id=13,level1=36000,level2=39000,Tf=PCM35)
-(id=14,level1=39000,level2=1e+20,Tf=PCM36) )
-
-C_PCM(
-   100,100,100,   0,0,0,   44.7,62.4,100,   29.8,44.3,62,   76.9,84.3,100,   100,100,0,
-   100,55.6863,16.4706,   0,0,0,   100,100,100,   0,0,0,   100,0,0,   0,100,0,
-   0,0,100,   100,100,0,   0,100,100,   100,0,100,   98.4314,98.4314,100,   78.4314,12.549,3.1373,
-   88.6274,20,5.4902,   94.5098,33.3333,12.549,   100,55.6863,16.4706,   99.6078,80,24.7059,   97.6471,87.8431,24.7059,   95.6863,100,24.3137,
-   79.2157,100,83.5294,   52.549,100,94.5098,   36.4706,100,94.5098,   7.0588,78.4314,100,   23.5294,52.9412,100,   57.6471,20.7843,99.6078,
-   84.7059,6.6667,99.6078,   100,0,100,   80.7843,100,15.6863,   44.3137,100,14.1176,   23.1373,85.098,56.8627,   65.8824,0,0,
-   47.451,8.2353,11.3725,   0,50,100,   0,40,100,   0,30,100,   0,20,100,   0,10,100,
-   0,0,100,   10,0,100,   20,0,100,   30,0,100,   40,0,100,   50,0,100,
-   60,0,100,   70,0,100,   80,0,100,   90,0,100,   100,0,100,   100,0,90,
-   100,0,80,   100,0,70,   100,0,60,   100,0,50,   100,0,40,   100,0,30,
-   100,0,20,   100,0,10,   100,0,0,   95,10,10,   90,20,20,   85,30,30,
-   80,40,40,   75,50,50,   70,60,60,   65,70,70,   60,80,80,   55,90,90,
-   50,100,100,   45,100,90,   40,100,80,   35,100,70,   30,100,60,   25,100,50,
-   20,100,40,   15,100,30,   10,100,20,   5,100,10,   0,100,0,   10,95,10,
-   20,90,20,   30,85,30,   40,80,40,   50,75,50,   60,70,60,   70,65,70,
-   80,60,80,   90,55,90,   100,50,100,   90,45,100,   80,40,100,   70,35,100,
-   60,30,100,   50,25,100,   40,20,100,   30,15,100,   20,10,100,   10,5,100,
-   0,0,100,   10,10,95,   20,20,90,   30,30,85,   40,40,80,   50,50,75,
-   60,60,70,   70,70,65,   80,80,60,   90,90,55,   100,100,50,   100,90,45,
-   100,80,40,   100,70,35,   100,60,30,   100,50,25,   100,40,20,   100,30,15,
-   100,20,10,   100,10,5,   100,0,0,   95,0,0,   90,0,0,   85,0,0,
-   80,0,0,   75,0,0,   70,0,0,   65,0,0,   60,0,0,   55,0,0,
-   50,0,0,   45,0,0,   40,0,0,   35,0,0,   30,0,0,   25,0,0,
-   20,0,0,   15,0,0,   10,0,0,   5,0,0,   0,0,0,   0,5,0,
-   0,10,0,   0,15,0,   0,20,0,   0,25,0,   0,30,0,   0,35,0,
-   0,40,0,   0,45,0,   0,50,0,   0,55,0,   0,60,0,   0,65,0,
-   0,70,0,   0,75,0,   0,80,0,   0,85,0,   0,90,0,   0,95,0,
-   0,100,0,   0,95,5,   0,90,10,   0,85,15,   0,80,20,   0,75,25,
-   0,70,30,   0,65,35,   0,60,40,   0,55,45,   0,50,50,   0,45,55,
-   0,40,60,   0,35,65,   0,30,70,   0,25,75,   0,20,80,   0,15,85,
-   0,10,90,   0,5,95,   0,0,100,   0,0,95,   0,0,90,   0,0,85,
-   0,0,80,   0,0,75,   0,0,70,   0,0,65,   0,0,60,   0,0,55,
-   0,0,50,   0,0,45,   0,0,40,   0,0,35,   0,0,30,   0,0,25,
-   0,0,20,   0,0,15,   0,0,10,   0,0,5,   0,0,0,   5,5,5,
-   10,10,10,   15,15,15,   20,20,20,   25,25,25,   30,30,30,   35,35,35,
-   40,40,40,   45,45,45,   50,50,50,   55,55,55,   60,60,60,   65,65,65,
-   70,70,70,   75,75,75,   80,80,80,   85,85,85,   90,90,90,   95,95,95,
-   100,100,100,   100,95,95,   100,90,90,   100,85,85,   100,80,80,   100,75,75,
-   100,70,70,   100,65,65,   100,60,60,   100,55,55,   100,50,50,   100,45,45,
-   100,40,40,   100,35,35,   100,30,30,   100,25,25,   100,20,20,   100,15,15)
diff --git a/images/UV-CDAT_logo.png b/images/UV-CDAT_logo.png
deleted file mode 100644
index 17f40d09e09f3347e2a80a16800fb7a6621ac509..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 31325
zcmY(q1yEhV5-oag_u%gCF2UV{ySux~!Gmi!xC9Hq-QnOG+$|8?-R*PleXr`j>Z)D4
zW>@W+?&<F7S-mD&RapiFkpK|@0HDapN~!?>kk+5?dGN5GPw(2Qqt5{uTL}qOISC0e
zRX1lVTL()3zz3o#mEC4u6>s>%`|H|IHM@mLv0^$&et!xlqwL0e2mVBR`6_*uQNy2d
zEL3e3%&65YsHmR5)ER%Ox>TaZ6e+-AG=70|MSlp>FE0M;&(04W8%w&o2MT__@V^K&
zhTL`geXybnNFaP(I7#iRDp~6TlRUG>5wYwv{)X9<Ea8wzm-zXDkAHtW2a|rX*P#7x
zXmWqOh*A9PWvIU_N>P57JRb*|g94$9fE^cq$xI86HOfeqB(=`!x4>1P*^KsY%qygS
zh0+r>pC8}BtySF1>xTb2EA&~Ch^e}N6NX3|P&MC)x_@sO@t$|hO6y77<1h0S{&V&s
z9gw~dasFl8cPmTro%XBa5p?YWjP#QK>N0`LHREOJ+?+s1r&i`nZ-Eis!4b!Ba|osV
zw|QM?*h#1t<DZ91USG~c%9<X44~A8HEo~s4j1`FYJEDC4Nz{O1PSs12x7?j}7u;OO
z26!f6fR&ud8DY^|d?gh2><^0i2ywSTZYm^vs&Q^2&5UH26+|I@!|m%2LZS|-w}0&7
zJFYES-Et3{9&fjOF)1$7dHM3KPD&umMa@j&?=RX)<vA`J`PDQ5GF{)~mR)f7vF;t-
z-#(1&IqnF1HI8+;v5$>&{yJ;oVs@|PD803lm-)M&alDlIC^IsZyhK4Gu^{uQlAFMC
z#B<m}2rU<XGu?4cvnmS4^I@_GA-&inP<*5m-yyl^n!nWws4$J}gMKTFq)Rh6`$-pf
zz`ExOh$m>iYM%gL0{~<IIY}`MABfXHI1~Nh<a+>F@1{R3^|xZlJ8?~g`UR$k)*Sow
zl-S%|Ou^9-fnb({9M(3zL9Z-zqG8uWW*IMs6^3SN-5)7*@;?#fqLnD<W4clS5buI7
zwP2`AfDiyFDa9*gxW(Dq%j+)?M|g5#(q;%ggsc$a|B(=cLWr~Q<%s3*Y2g1OfCV7H
z`X8>Cjp+YROdxH^EFjOM9Hc%6!9PPt`tk7o0K$sQ*-VI_d{4z*YFD1)-Y^7EB*Sfh
zFmxuydf}1sF{lfo$ZgOFDDeLk0|A2R?->mW2mP3WU9Viz{2Z|G1Cj9XCzNEBAOjjC
z%zhWu&?!3d%RVQ5KtE4HLr}Pjn{b-MFuV#8giYaqavni-grpmF6w<UbHe=BLeyS*n
z0`I~DA@ZfJWLzDp2wKjNh(jH)s07dzfiR2Hsc3FSLPW%*`_;#eSk3f34w8MA{7#Nj
zH#>xjbNdGr7ZGnNsk}QzC3U1NoymCY%r`KN9JWXVZ1{g=NF53?C%)`{a23=AJ58|2
zVFL?oewe300j&)62J7>T)=}W^0&S%tANAy3PtMJasHXO_YLPW(ywWxSfSivnnmY7-
zA^w(KSO&<$W>TOZ9XBtbZjRg7d@CA!f$o|NS(%#;L%3feif4<2Nr3)ef$xTe=0ob9
z9E<_A!QzogfcD^TzP0-t1VN4&1r!nK3BJ&?x%ZYX=Mvz;EE#d4#FUlkiDfMOhz28n
zL>0t0JK{9npctxpK0^`-Yt_)<3Yt^&o9G!KuD3F&0JuSVm{8*Xm5`awWe%CCNb-xY
z1sJB=klWMXLRk=?xjy3R!ELG35QigeKzXgM))u43AgTZIZ%72Tw70Xson-E=<`|_M
z?^aXsDMDK8vV?GCXQ%2-!S`PTJs{j-Am!?b7kDk?7_<g&SkR5aYYQq}{$CGx`~?9q
z8K9AqLBj<k0Jqk;zggK(($eAM%>l#58WFcD|1HBT*Y%+4Y3PmZqvIZUp2U!SW!A(0
zp8S=sU4WiWA{IX(S%D;K2(a4wcBksqy2zL$E!o=`%C)STh4fH4f$5~8@%?|E9s;lt
z1vUWaG{1Mgas$~d?gFjc`ci-#aBMWg7Ra?{{4OM843oX1H^jp9aAK={Z7Xc<(pgxt
zMPzZ0tR;d_7CcB<L<Mq-T-mWVnbysry*Af=Dw(5owQkC&0Vj*%5VM_&c^PC}|B)@D
zPd{l}O2oq^sGS3Z*26-M`7swDSD7Qnv6Btu{e3(@;PdasI%c_F9=$x+CVXHmIalUK
zHy8VW?^E3EeU4k??3C@n6zzX(9E+i8onA%y^Cx$DcJt(J(JXrWW|dlfORxnYX}{@u
z0Q!GHj7xyNH4$MZ*)k0@fHI_N?FngR%`Axv;ov7TKTA&!@_F0Xap{Zr!6c*shg50>
zj;tsXRcs{bz8Oz6)hW3<zPB&!5B1eWm)a+XHz$kN9Y7B+%vKL#143{`55Fe?c8bg`
zt`i)~)=9o3ok(sf$Ha#FFM+^s72zG~)wnY_|8><%sZX_^esZ%P#LR%N+J$}g&#<Ni
z9l}}Hzml4n-_EN=ipJ@V)OI<_5Id~;63~<lN^nNgp15JSkct6UcrWy!)yt^#JRiq~
zuxhJ1C!bHtlk+j*9yWyGW@U*!jZe<j5XyxHh!0^GbdlaK>b!=!t342HnmV7nSm!0x
zQi;8}B``DiKUoSK@fkPng?KZ}T$gVmq!DM>@oYj2gBFE@C%hu7CNS2&$R||p6N7WZ
zH)to-Bb4Oh!v-7b3%l&m<x8m~?1#zDWXM42Ul1ZvvB<M@x^{2x;WRJL^Xz;<zrBQ9
z>EDN;V(CAfBNo7@4fumN^xXpSa0G2A*4zz=h$W$I>i(iUWWU)dUv#z4cIC3G!x}u#
z<=U>k5JjKyzg%MRxnhc?$TMFEL{UzKR#87*Rxxb6WL??63L<nrjbQpQV2Q0LI1`8U
zTeJSsBGCJj_j;|Fl!;-^TUK6z)_t>%+9BM8mbl&xB*7FdD1n~dWw&ty4k^3L3iE*0
z@zL~q8oEd~H$3nJ{Hc;gpBL8IVTP-;ar>95S<$0l=jjwL|CcazxX8enkDBNNNw!=5
z1A6zpg#x;X^9EWeDBkOAx$WF?3zR<3Pk}7vV>vYA0`E7~5w#{>?Uge1UT?<{VJ??(
zeg(tOij=@Bk?z(Ey<Q;*E^9KY3JNlBXNvqBSU8l+efyUHNE<q9H7!h`w-IrW7MJ_5
z9NtMKoVzz=+#Of{vWrN@s@)OJkTfiKeum+1!f-Q>0BmC&eHts3HBQ&bHBJ}}J87fb
z+}BB5<p1TWovP2gG4z6kexd@8QqXitZ^mrzTARMTzptVkRU_00Z0GbG5NX?+6Yz0-
zLCC+4Uu^hjbtGZRp@iUy3=8^@hdHejI%t}(-^T_M^MYjCCPp0iMa@l+t1J$ii>JaN
zF7ECOfd%&R?By&b`29ir`2n0GFJlTFU6Zjx76sU6SDtm3Z5dtA0e8KJZRRHDj!^Ew
zq~-`?hDA$ZEc}Gx)&@w0NTeuZS!Njji%)0nPb;?&V7-EJVe|aUV2$1%8@Zbmk6<`=
zinid4jbPWj7GA!P(tt8$fn)i>oe5s#Wa7`G&JVqH?q{=MWBKp|q`oF0$Odw*7IZ0X
zeDq|LQ}ELnp2!ubueU2_9@vTzu6NfI8SJ#<h^KV24W|m-*ul&;SF2)=8^yt*-D34I
zqn3#>gv<^YEniXEPfK>{I(5|l8!F(E{{%`o0lN#e%@&f%MHI)6b4h3$iO6ptTK0wG
zlU`41mzz~NF1GwXN9Ua2!lJ6nmj?zN(3)VwzNw4IKRVc7Hca^b&Fjf<%x3y6n*A(l
z*d~(Xj>GNPb0)q20^|}LNNfdW{0YYkOE(3ziws718@zfQ1rUhFUWO}(>#lFm+h5mq
znmTo~|2H2wD01bmKP7Co5T`}$m*4~5tvkMKWK<jdy#fLmg%|Uc!0Yvyx|jRb_q6QV
ztdp?BqdmQG>Q=Mz-xnktH0ZLAuY|4j1P<Xr2#Fz>Vm|%r_8K(8sm<x@N1V3yvPDTb
zBvm3y2;54OnuyKk0x(Djd_^2C-BjtX9;Mme^>NS;sc2+g(s0kTl^6?zSV;2}{qQ}C
z&g}z^Q)Wf6aHPye)Wirus97$UD(PBg8)?;-0`_a_VS~-&`Q@e}mNE~=Bil-Bt2z*-
zC;8~?ws~oEL;tBCLVrS-@*~A$4N_pqLE#zS=LfGIEXE&Q1hF@tf{gVc_%qpEc7nUD
zX<4Fx%{A|9M0A;1k}vc?iYn)~0NVLql{79>S(rM(o*@gGa#iM%=woKITr+y5yPNAx
z9VDB7T~n%=Kd$;OoN9HqMgX4C^?ye3iC6VEMPPquDmC(oa3XNjW19T!<wP}@Or-aN
zdUYMMPjcc@*#Uddr-~Bhi}h;PhCK|!%Dw8(iMiT<pUg48{wLaY3O?se2v-tIJ*)zi
zEo>iIw#i4NY7*%TN<~L`(q+G#Tx?Kg^-mxYc)Y-U=nocY#IyQB{Y^ANl#ZW89GmZD
zKNrcAg#Rj<MNy~#2tZacv^f(uZCs>f|54b4tdgF!XFgEe9N;S~86MU5-hamzR-OMN
zQ}jR`zZmVpr_e^F@jX(Pje|qpFjsDroy}S-kHKZpYP!FQmNZ<yIuBK()BL!#bZuI)
z6v(T^ry_u{^S>PN{<+<A1~<~~8>tcaq(i@dSjIY+F?9vb{OgoN%vod_4PmC*5r*X8
zM@eq2!R68H<NT{{3nLyYKWl0n|KdSlliZ)0XM_E@{R6lV!G{gEVZw>qbPzVsrtA-;
zF#X@&eQz2}SlZCPP_!0laqQAkxCq~G8z4@*{Dc$NrlqM?l1&I5k$s`+ASvxDRZ;0{
zOE-GlS;CED6uoIwVQ2Yooq*1SvenZc8V<|ij{=R9K`j;0o0ZA$a)Bbn6;$yg-h;(v
zJ~pWoTLM6>GWMx5PDVPs_K?vXponrceI#Y6x{yOp+u%Hs5v7iob409{rG?Tbm<97-
z?hSUF;%BGJoDlmGBa91m-OrLNcT_}{?v0Eo)j~GtJ7vIS2um;%3kQ;r`1p~=$qwSp
zD`N%x&u*aqq=OD75v?mVxBpCPqq7!Z^pSsexe;}MiCaRFpzYbG&Ptl*3==c=&&0`z
z<1Ch<S_*aoUD4G4sgg-Bs00o+?^48QwHe6#tSEqPBPTT9EWH+oySJUO(GZuPkZO&1
ze%T9SsuwS@*YsJ`C>Un2zFNXnjgBtU;+FIVqA}4r=}%qs@8|ro#|FgHORWDVe5!3w
zT-@zRUV;<d)h&9v9wZd6gq*JD;g1CkQ9Wzk4N)re^M1vu#eM6WH<%t?0Ufyt&ipaC
z#SPxK@_3Ds7JethJ5hFDlU=SHMA!2}!u9Qiv}#?sKPN}5MSp$WZ<1ejD~5n2p4G~6
zIty73r_+(Cv_C3dEKMgXXZKM0M<w1;od<n!E_lMFLI8tDQ!a!aK;9~@P1VfK7N<-r
z!ZsP#lnnPZ#YYDWyS&^mcxH=V*2(wxTG6TT&we4YObktWov=cPH<a&`ig$kKTv_&3
zx*(1k5<xpJJBg#bDS^YLn}%k*GVHX`^L+~A<RCxKEP^@ly_5G%M1O)EDd-@{_xvL;
z*!BWxdi`XRnlvundZ<*wOT5{$ymL1oBSp<3uses4J3jDo|6ZWS*mdUii%th$KP$f=
zPj2{F*W^`m5JJ}g?UarHzShRiZgB)&7q9yT!*vZ%y@lL#uWqm-%B4@#PX`pLK}fvp
ztlkkhN{o|p7{hZ#<=MyBqrj&c6Q`EM@@wlK)1hX;rTPcL4Rx?a%XS}@*2Ku7=4jEl
z615Sc3J4i>)Px<dTgim-TfZY_MS$iOsPRt}@N~&K>)io6OpkklGI$*uZ$i<A5>ILq
z_z<?xTyhW}#aLn_+ceu&m}HcW;U;+gk;^e5+#HpUitSaeBRdJ`J{vb+4nF+_MC?5j
z)xS(72cilD>3;1udDL3Cw0bjIXev=?Eu`MGvjSdiP>prndTt!IfH==oe{ZNbl;$ws
z+PUX<th#N497o|hgfwqiS})pv@8;rns<QHaCCL?RzL<(Ctf32Yiz)+E&z)s|{HOY}
z;p_tQrf3!o5AO(b$l2CzLfUiU9X1%Zg;};v<y9-QyichXY|wMAijy}k#CE@8Vdcc<
zt&d`7*iwj-IoyQ>Rn!FfF#Q6@IcBs$8GqDx)>=;SKd#-Y$7r!N%Cg(zL((E<Fp-@-
zWAnT08}N4etA3hzcd%`qBGS>V5SbzBmQi=Ac1l3mXNA*twbBlU<WwT1CdB;*d<d@5
z^+2Nz74b&S&A}J&EOvy49-iyAp9K8$`^g`>$Ca<@U70+lzJJbSzyVgA8Chs5K4d(m
zKMse7p5tH<gfIb5rn%4rX2Q?Z6H4?z>{R*fkP_odiRXcgXV*br7D9S^o2Ud5D5_+U
zFEH`>u&uh2Ll@XIi(|Qgw~R?ArP9j8QL@#TPY8sYFXSSol4{FB`k~*9VY~^4w(6`O
z=8jHqaK%^Zqz{{0{|5Xx;&Zk&6{uwX6H)fzx{TQE#7ynmj!uNJpXT#5;Qe{@3*Xy)
zzx>JV8UorZcPs~8Yu{g-{k#FeNnR|3#0^2LpOm+k$JGpDtREp@)MT8^I$L{vgW_c=
z*bK%L-Kk}voY0%YUI0ht)l5*VB9ZqgNQAN`$IP5^H*r29x^0p{B`55pCf}a7^nN=m
zmzF%oV)gAR7sMjUnHxkbt<p^m$XrGH`aJ-C*^66y=41uRx$ikN;xfYW!8pZ9)x&lX
zBUXsj&)J$%$Ybt{(Z6saQZ8Vy2hzV^5Lj2dp0{paWF_$v=7DVo6vl-)#V8<<dkR(b
z3~1s?Ausvx<6t1yx3U@gpRD41eaTbMsCrf$EyKqPV@a(UIsJEKgQR>);7J=RYm{?9
z5KTP)%c%*{DT95;>dl+W2TH1Ge<jg(Dtfjgo>acZ2@6bn=~W*9pck*hJo_u9%X43q
z)FtSn6Lx=J77qRw;Ce^HlNQEmMB!ZpVb3)CYXgtJG>rqNaDdf-5Ag@P$=jECb}QV6
zoOQ`Tj5s-e>p3wS$FqpOS1u_M?_5F=egxKwY`mVA(i{I?jfRyCuPSr+G3m`VY~-kS
zI$)+IBUVd@=-Juv*V^qtnXQ-XJYYi5#qoN=L-twqWmitt?54MNLhx(BY<G3B@Mon1
zE+d9tD!rPvVmTMitFgBeLM@f1U;GYQul?cuM14$sE%nZt@<tM}a<DiR{<o(V)Uj|{
zc%o2=Q%|V9CtnanmMPK|Nbuf0w>f*)zjn~4LXxZUc+fxmqQCVP7=PK<tPb5M7yP5%
zhb@j=Li_|DQL-Q$r!n-@=JAOa)Mp*VA}ZxcD2e%CqAbl-KkJ!!hg_wnrXR!JpgTvU
zOP8+Oy-eic_)p;a6dRMQVX%B#;2o0$htJ>}T+3#CQp|NT%K>K%32@>On@XB)@^6*T
zS5gJxOyxjP1)<EB7qa7!gN-rKhrPpY<IwVhPJLl^K38ranWMsnn2*CuE-0=el{W*k
z`p?zJ+m|gSKt2RQ+qQDQWd{urU%w7tUU9y@>@jWWGt;%n8Xb|)B67Lwx68wRh3)=C
zWifj+5p|J$wwGh(hV9=l#Y2=UCA&Cjq8NI0Jfy$9S6@Cl1EHIVgy3L~>Mp~gta?|e
z(<K1Vc;qG=MIXrpRLG|?g><-B;GaTN&>G7|%8LdZBDum1#dS0k5W{qAsmmpS$<3UH
zDFEE2&}}`A`cUpucf5X<bwndX3BJl0^nyYd$Nfek%~v;?oR?HwH1H|ylDHe+In||w
z(4$b?Vt}+V`5G#FxP>yL;k_CtHk;uCIWk7uFd;TXK?cf3?j?VNwZD3m<MD}EMNF-+
z>&}m&3t=dVJma5IU(=|5(dMZ8t%VP-z6+xr*o4YeNRB<myxWW5x6l&9t!8u04I54Z
zZ3L^w+u(t1Zo4oi7fOcd!WjK!NfZ@7oXS;$5gFUdSxZeR83F_bB<B%J8#M+;BU&7t
z(+$~E)Qv?Tt*3F2TLDba%@BPBqxzV8Wc`Sg6{YG=w>qVK3=KKQds0SyT;JqN<pa6I
z#e9dnh(D`|C?|MWX1olk<y<QvxTIIA;7D#x2z&c>3TR#8v)zF>w0W`je@|naFHl*<
zI2-BxggwxKDy+c5s-6}otJb|~{3V8SB6Sly5nlrUr_`CjYA#-?*NHJHPy)pg>s|_7
z5+>9~e+RN|cwCp}<>B2eXxP=4*O5Kn1Kr^!<2R&=wDOleJifSy9l7pyt`6y;f_jCq
z4{YM9s9(QSfAP%@INIZx-_I9~ld)?#!*J8BBf8X{K@4=h?=tv79+!Rm0^)jU^twCC
z15kSLl`Vm>kX%ok30T_>IaLw|Ntr$%tOWJgyd8k)PxIaLa}mGx?mmMjg6h>dYwLbT
zZ;+n(_$}4sB>SYp>aAi5%RtKxbJ+;Or15*j<ifS^oq4bO&Uu;X)>9MP<!q4&Ul-{R
zGAym`>LDJy;o~oL5g04|9@L^g6pp}DH<r|d{cVWHSwWGUS^%>uSxg%bYii<Eia(0?
zhqAF7D`zr~5KnaQG14Y2<=YJI5crBJ*dQxvRR#hLl1&`FJ|BAXhOE-^sbRP}w6oF%
z(FvW3oW0f}FumRIU2>^kLSzHM5$9y}Jyi|Bdj11~i`W-z?S~=Pnr6w*fV+m=IfFWm
z){Z-yepByClX#YTH-4d5Ivc=GI;JKut}S22Sf(rwSJ{@C%(R!@D+tL)Sy%8RPuQZ}
zH(cfykn5Do>-Xx3=XYjw_;P25xBA49A+Qdo5%s5MPyob@$A)!fsYnUB_}q5Ad!C5F
z79Cje%3R(s4hkXlV=xZ=pkxyacbo#WIighL!ZtYrsQoL8;xC9K1xo(pIObdPNi;EE
z=4g5>S7kBlzWfugqvzU?WX4<NqW=-eV^3~Fp)SPYS~&;(mC7@cDB$(vE!8Diy>jJv
zr7cO6$;z#MC`(rWACPjx*Q^~lOfsR>?A6cXCJNwm<z%B7OepkspCBksG>6aAVG~J>
zy@*|v?A+yx6k1NaS??ulXTkXQtS%;(!nCWYC-blNBQt{t<?r(%to^~yLG>*IbJ~fl
zY^dC`x5eL(TCl7HhXij=j8TQR!`c#xbloL>;w^hNgKH*yO$svt`|>H$HBm{vXsakl
zCRSw$jQbEf)AIFJeW9JR2v?JYExceLY4E9!A420;bQ(ck<ChFqh)KvIZ3gauO(8s+
z^&v+XA^d1sDq8U-VDdFq%teHWBFF~TkO@L@S50C6oWZQOR^;fryQiFO0YYqOQjTsx
z^f<I*0Lvj_O_AA5uI_@wdL~Eq3>Z-UB<p5>MWek)4I4Vib?@J`8<Og{sJQ+Gh=JfX
zE!F!&G2)tCTQqt!S;>7BM1(AWGA3}j(HSOEKUz<tpb=}RL$zUSN23WJbUP;ZcqftP
z{;zP&t?|{_<k$M~r>1tsg{k}eZ-pjQ)rzL<?j?2O9i)ai!R>d!G|-}JzD%|BtkX@=
z{PT$JPeZE4n?2+Ko60~=VLy#D%x(`TWQTunlP(w%gJtOKlO;KEmf0o_HuLwB0~9zZ
z;?>A0ujCWNOtnhi*38aJXZH10(k{MDFS6Z`11Hvg?M0&m-OkwJZ9jr6xa%JIFhi;A
zW>UyO7AYP)F|DFqPmK4eV>FL$D(rs-&JfwQF>fi5b;8u6i(R9qU}8N9t{D{S>Zf5L
zjmi&G1ABT@woNxR5nY-#tNWWSul_lJj<}~C`SsrD<H_BrB^-N#7IwTUho*wafSI|{
z)s=7_u0baoACcZ9LAL8EuIUS~!f<5i5q?bGt3U3@Re&vnB8@OGvQXr6+MzuBF%`Gl
zVg7AnH&V-&-Diuo?epyL`MijQPc-M?iT3Z}{cJ+en)ofcGMBhdT+urM0neRHi84jc
zW@w!acT6640&vkq0f@ih|H*wv344{icyAwgY!GqKV8Khg2ooK{vek5zQ`^OnR&j&k
z7Iyw7?CHDd)-MERjNC1{iOBGGm)vg|%(0-n2*!HOH-rtlcuPZKLt@TASS>zUWS))t
zK-u~DTi_}2lbwYCECKnKeAI3@m5r${TLd<6gR!PwwYxgFWD;wO37jABxUOrU&6Zca
z!!U(w+AotZ^T-~UjBuM8t^Z9v=pvR*Kn4pFF+*bP%8j<R{<cH8{{?kX@b2pHaXcN%
zWhqf4v_&{Y|KgO|`yoN}s}<>eO0DWu?8Qo%9KJJp3G5fYr!3{uiG?<kO{y~XCgQ`*
z*SX;Xh$;)8)%uU84wKzOFR(@3C-8r1R6)n2<5c<Bai_~w?SNvE75_KjdA*f>d*^z;
zpfcXFaJq?}u9X1Y=?~yU(Cmzut<{mVMRPir@A~TR5HmS5v&ZHJnkh96{m9=~Ud{(Q
zuq4Y30Ku1*zIO<giz<i(Atks+^T!wD!^lFXUw8=-H_1r0V8LuYHXGm#ZpVtb=y~v}
z8Un5$oNXq}Fig;sUddGO;QMiyJQ%!|pPZt4wR<T>wjQ1E;`vZHbav#_&1*i$$!R(j
zVy727=Sq@y&creq`gzWEBdQnfdEa(zedy7WxJ^^v^*%PQ1|!c|*FH)cOt&6bv+)R6
z80vF{!x~lkkn&9Eh_Kx8jZZ>vmW&FUZm(Ko!EYMsOhQ(kh_>>e{D^|=v|sv%g5A*t
zX$3!mY%O7OXaXOX10OQuF2LrU;zI*Dk#sp7uGe>ZirY{fp^l-3PxjPVrT;jTHEKD&
zmW{gDN2ziKR8Ap)LNM=qe}C6q_8sf#F=M~HpQIIOW{JoYn+H-gFtEs-`h*FejE@o|
zC;MfHl!-2y+eaQrKx`D;SnomYE2>jeZal@Jb%^!s%Y6@WK$EO|L2LpuT=}ixHs9VX
z(jQK8HEHYTOG(%OxPe#JDxD7&(?7mxt(Yo}W5o?d*mL6PH5*F+IsF5ncjb(^A}S1y
zwRRc4b+Pr&JZS1p3`ujbxv4=~l*U4Tk7p1Z|5S<BBdvJSrC7{OydlTEGINLE`e3f=
z>t$>YX`v_EM^h0-P(vFgEI*Mr7_SlZZh7%f5-82eoBTf83jJm8MZ>NFB7l!IL)7Ug
zMtpD#QoH7dS(jE-F0Aq6KL*6vv`t%?6<r&e1@a`eZw|W>*)}MOfmdeKUYdbifzr`m
z=49ur;Jr&3x(uTg_lT7)rzoA_?iT*8Xp+v~h1+xfIxdYxaKVAjjRI9~wIgaFfzt?j
zX(|y5qOT1qX6MTzw#^5`7mJWbmb<TNq4jVUFoPhBiBh`5SBM6fh8Z3;p-kMqZmjuH
zJ!tuQcF=0o3>mNKN>sT%8a2YVJMajb_Ymet$COakx~b$@)UMDCr!ozlTzu8IATpg<
zFnuwUKq3s~5|RW`wG<cLooZ*Fs(Tdt;C^iu5xXsN7JdAw59m$j{lfDgVE>Ff7u|#8
zUCKMw0qL9nbUIkjeJ2;8z8kU$Xy<`}QTc3M!H6xHR**s~h2MY@MHFsW1gfoA`$>h)
z*dO!T*(<&yjYX6wb|<t%Q73rG9|@wfu@!_~qY^tN#iHmDKFmz7j(dJ>%zJyKCcLyF
z`-4GM_uvgP|0_Z0H14x+CB%2y+8jgwTSuAXD;l9OYuI0*7HqrYT+Un4WoPHQ^YHEe
z%LR~<Ec8h-k^Y7g|E*uJ=o6#j+c<O39866=ypDUiCqKR(=%=-#vp6?QL#eVVTF{?Y
z#1PW1g}uDBs{IXT;v%e9ANpBp;~T`^NFn!byihmkEq7w#ZMvte<wc>90!RE?qt&TL
zR6UU!osiMZcRO66Lo((2`Z4Vgs;sIcidO;bL!)P|3uP?gg7h)O4tA%f-uAuM)*{(*
zN9E3&BB`Mi{A^}Gh0UlF)dK1Bgg3sSN{f-5n2e@BD6yIejQHKR!L1vx;y?CVlIAkx
z|AXI7&&<G?Qdtu~Hje$Kp<|Md+9~d~bM3soAf+kd=1#{#%J?Q%Y)DEYY<0v*6SJU*
zg(-%~cCdWrPVrSyevGu`3^^_#$znc$dAYlD!WgX!3XSP4lvTYei_-{)X@6BCHHn9s
zk%WV`#OKavlcOcIUNh442+hm+K;T+tSVq*rtkIlz$$CDTmkON8yLuglYnUz6o0%Ly
zUYPxj5Oh3T-I;CIzKpo)u;ebS|D9YbVi1|y=VaDGi*tCv`=ZI)nm+;V7~gziJp|u*
zDV_@AVrgnT4C{`#wt#QL4nK{_!hF%126t(wACTam1jak^CL(*CP_w$gv%)M9gt0?(
zKWlpw+O}2H229K$GS=jw_DjIZ%EpA|<Vb#EvSekJAwzkV(kp_{=ou*QZzK!Ykm5H7
zb9-U7+;fALVLbK_HkhSJ0U@phat!1Tk}7r?Y&#-dd*uiXxa%W@ksOEkr=j(c5-z1(
z7zvFFGh1(>PM_EaB+B&ATxiMt{C;jR3A<59vGc~G5Z6+(tFGC7J;h^U{@)QP99DA3
zoU7SlIR3-caO005D3Eu*PC(Asol};|dPX;tx(OMTi&QDk3#*iN!16`BLPMZWoZcUz
zJ#HQku~ej!7)ocdy!>Z({v3)ZsMnITUu=(yjt%7*EwY=O1JWO;gSQEq5SRF@usInD
zsmfq>j_;ry+W!6RjpRymzHcFw96zG4t$hAQ08Zjt?mKdQnt%Uj=OE5Rk7MuC%Zo!F
zu$Eq#=*S4NwApFVEL)zImI`}c&Hv`QpA`?0$c>a+kK;k@9Y$k?=?~+2PynIz;&#_~
zr)7KS6>tyEhJdlnRtdyZJz4Q-<I2Ruz!UZY{lWp?VvKY=JI{%p|3gf!%GV#cQ(C|w
zJ*cBI9Oo|{W*|_pPCywaPFN%#lzvR%j(L;MrTyL^ul$8CDFKQ39g4Eez<EtSG5^LZ
zhWO*Qk<Z6P6QlFB-`-^5Co&>92?FBpLH7ejzz(iIm8vuVvtJcigPio99YiBcS?6k9
zU<KhjvL%ZAy^fnGvWkQF<t<PesF{(!ucau}<|afwR6&o?-u)6ct3ZGzj22cCY{<<f
z;cGg6-V#Gx^*+72zcDRMBfi|!6PYxq0&g;;-^Q|AC}qFa(*w)M5o_S`_&1)Gcg7Bv
zZq;y`C>#G*Q`#=aQt52Q;ZDV8$Mo@duB!1eLSZApU-0nvV{41s{N;|G&ok!ESQ>ol
z*l%hfA`CshSOzbqDfK1mFakHgj6^7ym0BDLAF}?3I_FmOCl2#=61z1c7(>n?ts=Hq
z4okS~Knn*oMz~hBg~V+F^NAV3Tl^G_do9=+<4|UH#N<hSPG0Kk_mFz3OY|H$Uye%o
zF=XMeo;9wm+$89GMAzEg21OKthods=!`=9s{+sU$9bWSXd}Q-G$q=%dumH92u&|;)
zY&l=j#Gm{S-|4pM-#bLcc{35^J@2doO(IL6BeMij0@)blNZj)mN(Z`(ov1Gl>bxYr
zOJBoCi!v(xI&C}6Hiuri!n(Qv)?)KM%koFics0~@dH}0aG_Gxe%4z=6(M!;!OSs<e
z<8RVonBP?yEnlC|(+W@A{ERS>kqNh#E=a;!yynIITS6eop;etPJcV-~6B8%(#L}ZL
zxBS&jqzT+A@L`<mmn4O~>YQw=ztWlCxJ)E32k?`5?Or|L8>VoU?X_ahIm+pWD(=A@
zoBkD(Q;{#@?~f-|y{rii+lI<sAJZV12URwEWlzEm*+t?sOnXils$E%Vl^96JADsKp
zm;gS}rCy-T?=Y(FA&Z3_r5*emp3MuAiMzLtQj~os)~hBqpZpGccW0As$=%;1tM}=5
zGuf>iCsa|9yuN>b`<)tQ(ZxVS&spmcQK?J}Kede}W#b_IeeuWdG}J=6+b!NE?P){I
zd%Ne*t40u8N~p&flxz;B@%$<xIBy1{=T-r7*RS_sXC$11Z3rI~!+|j>me?HLXK!;5
z-rNhg4kB@!Fpi9ZGWbR?LyQT@=}S*5q&PjlY?+I*R*x&syMb`!Xk8V!$t4a7`ZMNa
zCt&7y;}Bl`7z~+lrSXfj9=!m2kotvO%$Gq61zu8Z^}&mWffaApXnPeop$~=+DIu?_
z;?m0z6$H|^g!|2|X33~8VVF(TJ(0|1n(%dO>Tp)W|NAG7u!SH*z1mO&%l33wB1#P-
zcR9nZAF#^nl#BATGBbZ2vB=t~j0K{}TMpP=#lI}M<Fv`{(pPvXSl}LA&z!J}B#YJU
zNy>+0?9^XPOBCAT#Dp?B{VaCw$CtndqL?J8E>Z_f2)v0T-U%WVjfb9-qh%(4&WFkH
z9jZ;WuWL!M=EfM|P4uzoiQjk^x=5440MGmnh}xwcXqH4nK>=spkoCu9)?V%)K@5Ye
z(~@xeR9gxc@6Qu*nZDHMGru<D{mZJY-6_0H6QEKOXg@NlT9Ya=+e{O;ONImr-V>~w
zMs+sG;;uHxvdA>2buf|fEs37V&PLzctv2Xp>qiWjw!0s2Mgw!l)^CmTPKK^6sf4c8
z<U*?mcS1WE=!n)-fOb`b1ifi8qeRE8&fJ=_l1(AUii751p<I^}cOT;d<MOXejM?DP
z;07eyWgc@8pHB5oj!pJxy?cwXj@L7E=*f=%21#ShMJhJEwfr)^YrW$h)=Sh|>m<Ol
zF3rBW=o~=c&*1`=36SPBrKfUbXB08Ns=q2igJ}FE{#(?q8v+}^M_S+NS-fcu-BP~{
z2b^(f1RS4eB;KSkV3=&XRYGscwnI-DA8odBO`$OiXBYaPsMTe0rzmbFc=CR{-rLfN
z$cb{ZD|KCM>n<AxLfn&fz@OBlh~hyuAE{$)&vaPzfzERf*k^XZfIga8m7U3v4Z8&j
z$LruDHyd>lq5)T;27}oKDzuj*2Qn4Ep?p57d~-qsMMl=)mBDb!EUQ0%LMHEn%A{(d
z`CwSfgi+*(KQ$-MOeK4+APxQO_%kqC>~8?00JZtlxq}euH>PDxuV?U?AK!Z@23`TS
zTMFZ(AsMRr&)6j6CX<|bWjwr2-pvFz%Vjkj;r4Udf6DV|!I^%Bt~+rC-6`2jU$_EX
zShaoKOm(6~qthy_+cjQeyj2R3idm%}3;^e!5ZRdhSEc;voto3%q0T}*gy%kHNS$w)
zx42%Gj)E{o@ZIPd+qq4|A^7+|!J~Gc4L3xwG$Io@Ase`c1F1n1(rkr%Ik+9`Ddg8}
z_u9kX_A$9;Q|ZiEh(0!3TS05qB%ky?QmS{Xpp<KY0h@)k=9&0tn3>FzlrHtdU8w?N
z7h3=D&#u~e8JFj%x>&aF4^Nbt3>^Y6R*tu#{%D~3i`~Y-<&9Nu?(on@TIo(ug#Fw%
z%7A<+Oug=B4QghGC#C6!Rl(mr3vBo*OXa3FS#XT#Ce)DB1ccMTt4<v8hAVp`3ddc%
zavjn7l@2<egedZP4KDCN;f8f0goV(6r2YjSAgEmDPi3~(%putkN0QsWqAij0OiJDI
z5l$#+%&nB)P;88Np>z&KizUM2eEu}vLlRX+kBS19;(ghM?I_oZAz1<IeKJmIq!+zp
zOin>lP~)fRtJm-Bdj4~w3>;)Xj_tPvM?b>j&^_xcvwq=y?|et7Xqr&w^PM)n2^0id
zIjOyy54h&>PPGSQzszU*hVuFlh@PzImwCA<sp`*ogsvf_Eg0?6#Dd6@lhQ#nKlb3q
zGwY2YSI5_yqHS5&!bF@2R-PjDlZy^Kl+XW6J6LK{A-Z|vK|-vIy!;LCF2e<b6%s?M
zb)O2LkO1BtbqY2vpPjSUxHSG~^0vv{MGh=6b)Ro!RuJ0X^YPeH0U!(fqOF)XIq*2-
z>PYIHE|0XO&MoC=Ed<+roIP2c^%4D}2zw&_;vY$qp-Rgh<&SJTlY1k3_S5XUV3rX4
z?&KosKkNJ5n`JF};H-@He;r-9G>>7G5RG}vQ(l2|gP=2$Q96=vmOl{Vq_jf2S1IHM
z*_~YFOfo=D%P#0n-cLBM+8&Pq&GkoA-+ZeAinXU#v~5$}fM2@Jlb82$vUwiwC(|p7
zsqtgQ@=H=Y(T0-}fnTpw=wzh(PX590c9Vt(O)^!Zo1c#%Lcm9|8sY?<4TT6ET@m;@
z8>fp^SE?`(XDTUQ)YXjOQ9&U51e2o`w)xbA9nBR6e>zGb*J706b#nyuA+MJ_mihSR
z$Sb!2X%$PC)ZbquK>>c)owIyU!mCFcy~%aw($yFEDLpwALgY*?1g(CEf2R=D?Q(1@
zuKoMrDrnDpeEXV?9)?fUJ}Iy)npn#rmnDR)ENlVctq}aF1m5)N4jdXQ{=;`q1AaK6
z#uq#(9b7VrJ*Z)<n~-l)f2C3*vR+;s;?lJw_Wvjcg6b7Gvs+p6pB5RulR=!z!cRCE
z31)_}LMtZ}#OckzyNRm~0kI+J5sFuU_^b4;IoHWNV}(=cK|Hu3D4ldV?nLJHWKap)
zmPbXzJGYdMl?ZtXfio4$^(w2hz1oRJXtXaWriz4!VKkH(c03h9Q0j?px^xSWM0nh`
z4a9x}JCY0FgZ<6PHxuyH&AYzY_~fO#?WAn%!S5hfyVMGSv+$MRidB^8C{a5yi)Z<i
z8#=^^Gbn>}G5(BuNW)Sg_>JO91|Y33p1Bp(5w|!-8b)6uK_R~t+v19vQ-K1OJ#oTm
z;EH*bgt?gja+E;6O;vf8k62xvN7&BN#7IcZSGK77%~l@{#j%7ishe5)B(oa~baJqG
zJtPCP>~1C`gF-g8xXfjwxt7sxzp!o(F?vxari-}{Pa97KK9nz=zYxMry5Z6bd3aO>
z(xWp$Ldby~uGj|fc8`-N8G2*J*5s7EA1u-h*IsNK7WD48c&#XZVdbQ4ovEI6@>@mQ
zC9HycMg)SJjkg5g#r@)ZW>(|Xz96D&`Ga`cDq6{s<E^bmKM4iOKOV>;M>1fL9nr=v
zG=PB5f$|xziuk=wkyE*Yo^mCPxZNyUj^FNPaG2woSAufIXa8tDl(PI7P+8~}vnOWI
z9VL43VRuh9#|<RGcTG;#O7=JNV982BHSP*d82kt0au6?7>*CZtF-x60#)VNFUVzYx
z(Sc630{D`kz;2TOj=Rz~%)R!NgdZz(znWxORPm$J>4$jA*wZ~m*_=9Uu5#JfU*K}q
zZ5<1(V&$3)yU5p(2*KAIZ^uF`pyWYPTt&CxJT!e+a8@_5;_Jkijz|cmqMOmlzuv~B
zHsTKSkyoECDGA}%JysYm_if-jdv6>4L~UxU@E3%5t0p3j+M<6yEwCXjl@c)fPMN>(
z+yzx<Y?r}OEN*U>LBAYT&l)Yem-};Q$lW0=xGES7aS8|mad<4h^c^IG23^x-z~$U%
zw54pM8T{s(7aJonv}-gILmjo224uvJ!&c^q;fJ!>5{YG=Y0&(mks+gnv-;AhyHCNI
zt8g(ZB(Ovg2FdBr;Hr?l9xCq|w%~G?&{5zqjD;>dU14_?wxN5ue1%e5;)#iz?Fn40
zUn~~Kn_m0C?&+{f)0_B7vH_d?@P<E8wZL}ysz;l|v$*$m(b*Air~U++(V?#pcQpe}
zk&AtTua`X{{q}K@H`^y<m{sCCn)IbCN?&`Ik$KNO-^KB{QlJGZLVvEx4^nXCKUtcU
z?JXk91N(dv87cSW*ok}5UW`x*6(*^XK(sT9fH>&PUyKTW_bJUY2J4wM*-h#C59c)H
zg}VbM3Ogfyf;p+Xsb24wGjnqu(Guv1!+=;K8W05gv*Iw*xnf@&nW`Sv5_Omu1(SyG
z%`PihnP0~-_Dsan*pseHD`VIsdsMiDw*wDygfzq0uO1VHj*wBSey2z5tr_IT0u<+0
zOazz=nclT!K+iJC`ga=Bn14Z%2LlTLE#xh^J>qVCb9;i9)^;w?lx$gzmH;J!+Q>GQ
z(Yf08>h2Svc`7xNHIHGOf`akD1F6dn`Cac+wUN(qqS-M4>2@~@JqpEQAk1;kp}^F9
zNycAfF*NCaZCy!G2FPS`S{$Ql`=}Xm<vb2oW)o(g+?-J}IOy6$qur>x1jqY{q0z5+
z;RVP$V~c?<4cg$iHa2>;IU7?l0b;$B(spR8M;0X(rs_{VY{cc-yQH!AVUvHaN3WJd
zdK4-byROgxJigeXC|OM5%Qa%>M#T6*v2F`HrG6_<)mTOW3Tcl#?cth1=vtqfbie=O
z!gaq+UF`zea|wcRY`OjSL8eNJG@bEz*2EI`zI3dUh>~Ju?eP*^?fu8*%w3E9-Ad@y
zua<6*F!I-C+VBax;W!*ri&IS6T1dE1s_7ljolJFR>Av#dIK9nXciA<s_Bq&Nyrd%0
z_)F-l7=m}iRF%%(tAWuky*ZW_Es7yd=6oA@<V++2=A5<$nPqQgN|VAnc>K|{A730A
z(2FC|)?uMO$-6*J&O=NJF>`)R`<?6V63=ARA%8hDX*O<jY_t69p}$Qc!c8z*D*6{q
zczNf2kZL>HB#k|8S^@!j3;*u&43y9jYz=LP9)dnEM=TM8GU1E4rar{(MrBoE6VhHl
zKlI9{+se3I*Y!;z)9Wu6{?>+aG!Je4;t;?i;$llj2+SAZC+@zVhMXR80l2G-@t!}X
zCjgOs>2*>z>JlM1`3vgXnX7jAa8sA%fr%k3g!xF`G%V<%*Dz_O1}kfS<|@nBWF(3T
zL<AP-t0(4~A4o2McQ-lq-u1p2aatfu7#g*H_r?-jkE`v1_;{0kqrXkb0nAuN&Z3H!
zx=`-*rC}oHviZ2_N|GR&8mu35UUxYTnI6Ek#ogYy>;zW@Cq{#8uj9RIGB7?a_cJY(
zgAoGn34Uo0ID<XT`X(ZP|Ee58+nIeXJY4j#zJuK3sU^2LO3v2Yj<JAyho`JA-Q7?*
z5UY-ooF!DRjEm!TkHHJji%B^&DM}#B=@Kb~oK`2yGYJ4;SI+p}gOP`mrj%iiia3(;
z4VU2oPmp8n9y6C&+XBnFl(q;erl{zuixQ#lfrUdXp7i`4#R;_NK&xW{3T~cw^b~ni
zTpLSS{k?x#vP+_>CGfponKoMiOTIOK!&?+?+Dq=Yka;yyG69w-ubAuq@<_*X_wPDl
z5XJ~x?z##mnEjxBLU=e{Qw5t1eUhdg{1~4{X%?y{>Sn$H*<R*6TFKI#2_8@0&=+fn
zL<QSMa9FqRV8i&x1vwK!84$lS0Y)@C@poK3Wh%qXnVRFSbzdA@?UoZeeaLIp&4%`6
zcTyPBt~iCC<B)R@#PCZ8>F8g=>~Lv9>s$JqfyNQSsJK5%#dR`BBEM@QshB<^PFyxA
z=eT}Qhr>+aA7K{#(sy^TNSjYQqtRp>xu`5)l!h9#l`{C-)VB2U8aHew*m?#L_OZvJ
zM3X^`ZZh@0>DzAlwlTiPW(;gVxTJq3q}+c*>>i5SmQ(3ZCXjJ;%4d(lPj|SM+`g%y
zj25K^ayQ;Rc-bO<Vw&CWo_g)&wB5@kzv6avK;~GcWFpk4>1kH#qEccQ-(oui?c(E$
zq~b!UW}^z^Aaz;PY|u$6U{DUe@xzZ<xZ2?O@=0e&Hh*O^xBtdJ0#1H~u^bkaUnax=
z$%nqh7I4x?XgXweXyXmxdP#WkPkt{^l);o#r$7|vEo)QB-HT@?pS59AGVps?b~z9s
z346jJ{t8=-Q#DIp9<w%_V=WZj#^XEInPQ31vJd@qTi2P~0v=gdREj4S)<nA0I5e>R
z>K!qOFOt_cP+<S{2s{rHIrm~Ku1B<FHBk@AIdpMMLaby4TwT}$yHQi3zYVU)xomJt
z0ee5-lel(+We-s#E_V`kGsI$`8~*f`I|z&wipH+nigZw-;in~j+Zucv<%6>FAPH<9
zO{r0d44@GT%v^e{UJ7})p<g3uN5muRM;R!y7PXQ?a0sY?j8xq0`0~T(-};ksi4!Q0
zAP`<o4og{W4q2T<y)5mUAtBbjTLK<B!B%0&Cx~*5ZvJDNp$30nsNYTbN7aUwQ7|EE
zr!k7i%UD40;P)rV&%TFJk}(vM^8ixLHX}ZRa0EQ70zba8hEEBK5HzqPYaIj1DlV#&
zB?*EnPOPX}d1!0jh=UTz?pS}rC2k7c&SaKqtz40DM6HgP-^V<waZ?UT3>Wj-wC7HF
z?Y>|_z$#Nlj39IH`6|<hgTMF)mz<8V+Ly$wLL?l$$nPXYP!$IUA4W%}DS{IHVUWF?
z`kV}QQdJJJ;OVY{f~H~~d@g^nQBW-@ldL|Ru{43roR>F*opYs_bz(QAE#Ko}ubiYJ
z;IL<C;LQ=>QnPg3`Sqy$W@{4J*dx}ASpwQ-J61&I;Q)T(`RBFax&P0hT+Q4IV4&qe
z6&MZzy9*D`&7D>PleKM*=PATQ-px_ok@53(AX0u&Qyc`F0IsE)Q8wco1JnU0Kiz~(
zU);L_Y7k2Gng=s;Q!!4)sQFk%6Ed^qoOVq}XhaDRjBM-k{o=^L*lpKT0A)ChptM>H
zBW!=?Vxok@Q5n^EJh6p3#|QQZq^#y;TR$a!Q~)5n*-MCatRwkF_Q6fYWc_}U6bn6Z
zJT=#3D`a@E(EZ69=u|oQ<ZrAQVv)c+bE`0%L#S9Lqy-o7En@lFE>a3D%|am!)ia|i
z{hOt7KF|}qubHegUycPwZ<b`E$m-^A=#RU!LwB<o#W&?LTR9NN>MXQ`?zfQlJUhzT
z2v+!^a^xZW388FXi8)t*9qGh8q;6o{ieuUKQA?$poacupKf)4xpHaAj_sWD#`T3OB
zcvl)eH*|(lpZoZCAi(Mz9u3h1l`GFyu$JVPpnYz~+jsWtPY!v*k=eSwkYRR)n)&Wd
zPr0(JV5hU-(gT|D74T<gT`v!w-w8XxYs~=AseW5^$;zjMAFEIWQgU)j{%n&eV*)=1
z1Hy({LL9xU5%qqCeU}`~jU-2|$_mk44#dzmW#RT#k)(Z7eXI^rWDBj&C1)%QV#@c_
zH6IIw7^{qRbJ(I+79u12H^s-G`TZxsvTt0C0@h8YF9lQhBvuZ>QYmeG1tIR1Vs2h&
zUrD|@x9$P8lRZ8IS~;AxOoFREF3dIrfSTax5`oRLHGxpSZijri=~zk%=+!)SCsfiD
zx&lHcl@mj+jwV0F%&9*E3)5Fs<u~oVR{k*njZnfF+^)<Hi)M12BTwkGBM(2(^<$NI
zfqHkdK;$Jy-?gtS=vtT@XozN+Lvc8t#y76TnWl|V=#Ub@JCp(CzF_3cZ${E?CWC%3
z99S(C0Bgh;48tJXg35H`K<x?h(#T6N7*1xY+*EV0-yplsuT<WUCG$U*c(S(84rKv7
zEWFV<jk`Myq%=T<KZP#FNV$peTe!J5AP^6Y)HmOvVdFAdZfrQYGIAk;pB?%(U4w5G
zK`^*GPdMaWO)QD;&}!FkRa&wH{Fn{!Ali~a7FaM-zt#z!FlW5pz&9h<Y8RrlJkiWA
z?le^rPsF&AVu%CEhWnb%Kl39XW5=sa<4I7Dt^36c={+(>aiLa<waUVF*M(&BB*#2-
z|BV_TW|%-_{B>|h3naMDCM0aX9gkb-N%kRdMk!Em2Y=VacuHCovg$-C+&TXQl0K$X
zft(+nBbws7*FXLPBN<p^t-)!O%(H2n-rIh~vZBp671AW!K}IMSyeTpk;2igR?n~WH
zEq>W{$2A5H3U(uCx|2r!?u@D;3aKsCiFlcjjgJlecT~>xopjKKr~p*)$T?gS$d4z%
ze{H?!1#KCJR)>4Nl?%b?W$m84gl*a47?JZF{A5AhPxdh-j@$40hX{`ZsjPsBiirr5
zlat)>SOb2>KR@W-Wv_s?K8jvXoA>gWp80@ZDFQHJ>g=c4W&Hv`1w_Lznk{6d1D|VM
zrL?_z`>~|)fAm_u&Tb9iT*pf{kLk27UHgi9zIo)ezPI%YR(h=wJ;e--b4a>4NA=qG
zaFU3#RV2h3F*@9wIC?tenX=V+w}z`BU_cZ}LoqlsW^7Cem@&e3$=|BqO`dR;U7c^~
zZi|^;N`zb(JGbz+559PpDt~yWHqjRM<ua3x^@ywd&$C0p#E^9l?vvi~5Ef`fiS+4<
zLgWp5-l^yz1)n3!kjy$B8Qpk}9<<!kQ{(s&>5%K>n27ZMSJOL&S<*CJqix%^ZQHhO
zOxw0?+qP{@Thq2NZBFa#`+2{!f9~t5%F2k$sEidWR?%Ud0>2J4^!MjvayGF$Ru;ux
zK`fi3@k7VQsCj3_%scgecCj&HGbLo{9O^^qcXF)$+GQ^QvaEMwWgy+w&xt@pay<Q~
z?o01FM}~9NRw&E1mF6&<$-<XZC7-RZ(HIwHxswDSnuW6ugs+syJ2us0_!B;pcb1ic
zSmb)*qc9Le_)Ysu9xXgLCl@u#bd9F)z6arKH9F%QA4_x3Il_aPIy?KOCm8DnYquLf
zEF1^MJ|IhgncM0i4Ax?>q)NV*_Q+S)jcuoAx#{)^I7O)}xOvGLg7vu3*4E2jC&bDI
z#vYPH%AcrO&{A_lT;}j9J#{8ljl3hmO~hFZi{ST+KAIN><qP+_`LgPTdQGZ_T~Hz|
z+!c4f2y}i(@_I%+O2-2-%u<dkGT^z|>P)V<JFiEhLMPk|cx>pzp=P<vYSAbr?K(rT
zTFY=&Vk8Z-f>W2LN!&kbIP=pj`u{%{VEsi5dCebIM5^h$7CQ<lB^$;q0%2J9S+&UN
zThVxI<7_|M?gt_w{Tt2MnS~OLOP7t8P$mOSDR8XT^kJPv^G2?V0Sn6152_o0bjw{D
z7skXHGX}H#lk+!9xo_-pU3dW0cuP6|hQ@lo5@`!a?rJ#eoA7Dd%FxW7&hzHic47OS
z9*LIQCr`)bPP$5FBRyR3ly5k43dcgjcP;{fVCRPlB~Y$EnmYg>w3t7(F8>Ss!>z*^
z_JKk2zJu8PsQ?amMg@nTw8m8>l?x@Xm8GbV;KoZBhqD=LD8m<ymDJ*|k*Eaxigo@7
zGlk&9EDurF95%u_Msk}!!y8|aMte~VR(bzOwOD(R(;Yr&N@w2reDQSPAmW2s+;i>^
zi#&CB_`wEiO|{ymf-@>pnK|}_%kkuxGsqQ;jbhp^@;;5ySzcz|j-4H2fmsX{bPs*W
z_}skoc(>AFLV5w*NY#f&;w-)X(G=<AkRMiw+jr0*H0+0JpnCU<i_%jqkG`6!VneGz
zY0wPPMC5w;iiC|C1mL+|6-~!X(eef`dg|U-;4eO_PTLGw$MG7o)Qe&W5e>Sq#qvJa
zX)MEVCySyO0x*|=f~_^Efs25=@q5_;t1*~PC4KNV^@@YGQrv@Tb6O*VF~<F@Ytp-r
zaNI$(d?x#QOK@%JM0t~aB&wzcUv9P$(42UqsS9#?d}je(M@ugwKR}*iNv<>wn#ip^
zMF%TEStjIcz(8hZ-s6pK|CN<R_L6A&PO+^eNA*#f+fgM=Q`9xB6-8l*po^$8+uBq*
zSA&bg3fGN|E{duRO3A1O?&j00pDn!88!-kbQG^6<!@)Y9N^l=>qX}f7f$g;?Ok?P!
znGch-@lk)eC=RcWi86IwNV2^V9?i5WN7-ix)nLcStD4AB4xKcO&T1R_gSD&wDl%U*
zB=Y-lIAFX82pq7v4S9ulALXL1`&;iY+l}JQqiV3N_r1#K(33JUo(hz`Z5QPmnS@o)
zI}4iFq{SjhyWv?j9Qh37pSkO~Bf0yN-{xMfn+4`}rBb}p962Pb3;IQR^OO1X@;7=p
zdRt*bbBeJeYb$Kr_|CZ%3PLsx!03@(HmFB!dO@(?hpPcMVqk5ZoPwtR)e9x7xY*g=
zc?Brwz#4ll9~ss1B8)_E_UMZY>??E}abaN>04+CkL1fb;h>q==F6{JRi11JjAubsN
z1;1U~*}g&~umCX5S;Y{rR$G)JIJ&8!OuWV_eE{Bb+<{eX>meJ_R>0;7#G)6Ire6IA
zP1c%;F2*P@z}dcjuY$n|-mucsg2K1^Bb8AV{ful}oaQBG?l$b;^=I17#Oz%Rf6B6A
z(dXN8dBuRffMPQv5klwRX6FqD$2NBxfC!R6J2y?pSkn#&FyBtz-<snd8z$w1IQuu7
z+aJVYyn(Em80?1aab*=)#<|*~{0$;h(mcFxL-W!M&=?H!T(%yu?53&6RbfZ^^<FI&
zGohi^w(GCIKa}>WgKo!tVe7*X4;rioA`IPkK`3Lh<WlE7<W`+N@XHbbykP)M*wHb7
zUTw{wF~%u|SOXuT9E&RDEYCb}c1xb=F6=QA?hhuf68wc-4{@C}A{pnq<}C0|dc>29
zv_Z4>Qj~0WA>akmHANvZygn(#e=dFCWY83*?ER6FmgzCy`O3-UYm{=CAf17AqOKm;
zl%%i_+SP$qW1z<bArD&D=R1_u6Sv0p;zXZ!Y+rYegIX`Y$0kTTv|;6t;NakrHM@i<
zA9V0iqPP32vls7p4y(1aNIVWRtgd%j8V#gDY6J+*|0#DF%e_!`SyzNsXIx!V^hmH{
z*k~1cJ+%Y49g%IM1BG)azsUMd<aV`M{4gT`MVcsv%>*xOE?+p3VjnID8)bKGj|tX-
z?Q{^55U**xL%qN{NZqDFnE)amOKL>HD~H9nK$H^H&=&7j4#O@5lCB_iqZeK!2f;J1
z&X56fi_YiANyT?tM`fy+C|y08vXBm|$t?cE`E>?1^8g1YYd8+FAmRW&;6Bqsv7gf+
z2zw6yiVHY<rsmj0>;q*%2;S1M-FwqJ!=caX_QZzWl#yO9M-Fv&hmwrzipL4Xxz{IQ
zI+?fE6&&vN0CcXA4DFO{gzuM>=+^FGdCNjET}BlTo3%<kYuT8Hv&Dv64Yzy*>hQ2=
z5i12~`0x=?#}%F))}P^l1YhWzNPejKvVN$5Yn|K0XpWF;?4XSQy84;q7$k>}J6`!4
z@dSJU*~IQJS*LuQe&AQE+1{5s5$uFMK0@NWu)vIP`r-?<yjRkK`_dK35tXO{U3{&C
zrkO07&D0Ju20Ey9X{m~y#ds*cQ&hba*bHAK882FFPeFLnNDd!?Qj>0iuO~6JvVb}W
z))l#7?>DZ?-*!?0keZ<QZ})Rno*!JKO7+)*+|K7<uf7(*ZYZCEhMx2FNsD5qr5g(H
zkXXNj6n^|9?9}dLs?ZJv&o$nWH$W2zNRm|s^uQO!#Vdb`;NjkM<ec5G<>fj#E90DZ
z07#`IMV#FD<CoP$T|L0N?6cv0qT+Waq4bX9i8o8p=}CB3U^7Yr-Ai{WB-itkGK~}x
zI|CIE_S5w0CT^E5V8+f6t?^~8e<}Yo^$C!NrA-gF^9qg2(8z;zlpXb_<$pj!y<|AM
z<pC4~Kpka0xZ(ajDddN1X)wPL-u5qOdDJm3kLEh92#s}(=xG*Yq@Ji4NG1T&==SKx
z8!CVTUb9~n)Pq^#mi(0QJ`MbwtSA|^ZmiY*RR4#gowCJrUR<pNkJHLS_Cu%VI!hYM
z3L=W?2HcLC@c97A0cuu7to30FVF^q3p-k|uag+GI3d>d#GuGUWZ(97WPjG=jT8kp`
z5K`CNkY6K7cYez|O4SWv1r!4aKiB$T<`6!w7j0|v8W@*F9&^EbfR-uAqe3R1uML`u
zhTYIosv6pr5GWlMXwwN4MuPjQ__AbQ#XvaJv2wKI<q&k4N6KFX4feC1e~I+amBDRF
zXpH<l^ParpH~jf~TR9l;T<?EJK=H}7A>>RE!UZ$X!>6+29d;j_GkGyZ^_>Ioj7<-{
zamD|v5HSs(8zvSpY&Ed>fiIVc9d6)5I|sIbap!7>qbO^rrsa@=dEuN=Td$(Yg6xTn
z;NZ%GT*0UfpWP{kwei6c2Mm+PhNx1pji`?ds~^?;GM`Nw#QFkmzj9H;?!+<*=OOw_
z6F)anhPd$B{!Qe%f{C>knB#>p*uLe-eA?u9?XVihf_HjG8XWsG3Kj3#I<y-+X4RG5
zcdf4tUbY^di#kOIK81Ou!W%yCxLHEdB*#pRyPhGGdAGPqvKB0mFziMd%;fq<QOt%e
z4YXZ&PE?w$__1{4qPHcvsg3oph!i)A8zsvIo~DPB$#R+nh5b*Kdw-y{jC1{T#*V(%
z(8>A<2a#URZb0$&pthPY$`jt%<21)QPVX|yZVg>0>p~m!z&Ct`_&pHNDF-gaGl!Ql
zAcLwF6W{&8dHS&r(MM{(pAm@kcExSWHcsRSZcSb`44P7IT1Z!iq0iDB`LE3JfNX@G
zoQeTQ#a!Nh=ZUo<De4<Q8;bZkyf>pMtYO8-htz`_pKv@vdAtxPsK!D%-A$2>rPE=L
zQ5chQasq>fHNtPY8IwZhU;CR4+d`PUmh&bkD;y^intcHu1#}{%A?Rip-bk+CcAi!t
zvI{D7bu^clNEg)&t)3*1j&$@jMC{yK686?=6<2R>Z~GJ5YODyTo(I0Pk<<=DmbOA%
z?}SS<!$Mn>w?dr(o*A$zcFsoObG<rheQt&{`ok=D8Ytr032QV)%A+CcBKe?QBJY16
zCv?p=?ou#5950z&8F|N5M*JX%I`CVx6;8pHmbk$;wENAESiK7pS9tUGorVcW2X5+}
z%jLnsXK^kcp2?j)q^Z^kMLi$vl)?wauhT^`A&8hbu)2m#47ZfSudKeO#CjOpI<h>8
zDWoYX?~HqPV6$tt_FKBTg3ZL$1-Ze#h}wF<li9uqMjG}b%XujSDvvmBUMnmdd>9H5
z@)V@Lt_<KBW*5XDWU#8AR)l(P2l_zHZI-jaF7LR743F+zowD`#7}cNi%k0P%MiyFq
zdx3E1!$R0DH3@F1po%dWT}~N5mKl={y8vqFO$4xog)C$vo_gbvQH<%D`41gn$@q=_
z{`u!c8N%BUg=W{63jKekaRRl>RR}ozsB*IY{T!GEv=BUwvU0dSnO<R~z&6gQ;mr8i
z)DC8G$~HVBP^H9)aW!58_6_uPkL}28gWOtyv_{!f&3u)mqEK73AcYk@AJ$ofAa!XM
zOsc%$IL8H<|LY%A^8qPWPCx_xx~Y$&+r<7?XugbArYh*-qMHhk-~dV`Tjl@-8=`_u
z%ziG%e0qo+24fg`s!4K6AtOC}yhnc667rdr^h`+jT5F}C60Diqfv7-;vSv4wg`m!a
zML7H5^_}-&^KnY1Mur$d+RVT#&ei0uByPb3dTv%W;z;PHWO`p?r!ITrJ=FVAIO$mE
zrLj!3vs+-^#G7Xpmp$ixG2rOSi|+E_LF)BK&E9_YdvjlR&a=y`Pj!IDK3nu!k+I4Y
zfkaa)>Gcn#SMX+0g1>YPRA?d^WViL&GK&H=vJRH*091iKR2YBvb7HC+DcvGr(Ba$;
z(y~j?xUIN{L$Gusw9Emt79J~fJNfZQS0l+=C#f#8wY4HxR17R`SZ0xei8f9kYM5b#
zH7&*308EH-mI4L^n6bAhCs_x6Xk-v9k|;<|NzeOFSxnF;)c7S75^uw7#zR8W{5-Kg
z9S8du10puX(ask&u;o77TxR55kU<T2Gp#ub8)j_?00r5&3+=P)uq*3UM63$W+OYJC
zCv@=@jx#{ST8n%(xJPS3u|E8Zm_T*!xok;rO~&q=kf_t;(w9ZyIUx+{gG<|4&%;?Z
zSk~LT8N)Eaa0Yy^srGZ>3~G6qp*FGM(T!&keO6`qk^;MSD1D!zPgw9`j!zdey4-d#
zO^|wO6h&<ajk+SCXh3Riz4uBH`_~tHAsbm_8|XD%F6ehV6W1y-xE(%)Ct~zPTEPl@
zS$7Fj*DIA+vL{RFOuvpNQF(f+KoFq?kXl^n2(~Fi9Agc8srF-DQB4ak4LZbR)!gm~
zp@`l!6SxI6=F8CWazn_tElHHjEHJRKYsoQWH?@rjOrdZJq}`^gI8%5V9qAEjl^Yze
zmR4yhS;mx+$2BK~To}u;ocI;?k?Ct_d&}t3D<l=qgXSfLfbN@DCsqRkI#ddBBrix)
z<<MiY?)t&tDA`FZg@Ij(gBoJ^^+uv<H^lKOl^*mWN(~*}zmgi`f`7A?9DrOZ<usTR
zT0eVs1XK?*H$}ae%Hik*0*k4;KD3zYg5?6OUjCVojMFh)DR@f4qsnjt)H1+Meqm7C
z1AOmjnOOO^VI_gV#$P7IweqbnHH2T%cG43%c{CL|8w=6phL2?b<<j8Rq>>yXlZ$^x
zX~HpqWEY$mSQ5<cI7KZ3`1BT%Wm$1eNFj*Sw_vfuElj{$$Z6<ddVoCj3H?U9=L)qE
z$C>d*FTzU&MxB8vTBGw2r9-+CeXT<II)N@!{>rr1kFiidnriLi(-KvdwNj@5P+O9Y
zKdh44#N)`9Bods=-I2KEyN77gXIv2zF55=zqi`#b+c;4^b^s2CKK{GXY6h747xmpx
zz?a}^Lm_dxmYo?%;aCi2+@D(jpSiabnmcG=aBte+h{1h|>;&;k#?_kT1ZIt5IrNN|
z(-8v&_sol(k!$niZ%FLb&67#^ZLL;PAf>cWT7_7$EZj7yJkgp3poMGOKnY<<LeVy~
zNc0xSb<>2!8A7|i(nX*SyXskhKBNN{AAcr%Hxv#8Z~3~;#PlVZX;z$z8#~g;bT^pU
zM_TB#LRfZ&&4hX;h+5*-Eu?-kxmj>Qt3Um7>=<b%jzMvPr@rnKTe1F>eL!|iY2=m`
zF_;<57_x_T-!=fm6}Kz|m5Py>e$}RFGu4EN^ZMC4zXsa0MAV4nkGB5(PQ8}c57#fC
z8W3NtiM6t*dkffyRQlz#vv=ShC$*e;S=hC5o>f33MoIi9=vGs`f9U1CwCr5>p*Qi@
zGrNXu4!80U=X`S%$d_(}4SA9<qrB#eRLZo1qrm%yk%|5ZZU&DJ&U!?}fLZe~ctcQx
z&g3joycT+?u$mr0f&HS$TB$iT93{gHPWm1>BM-`3_Iqiv6AUULidcvSoCFm`ubbM>
zFXDJxZ+)XDIj}|37hU+QNffiC_!|)i#v%Msd+ncJq!CxDR=rfLuI4tJR_{?xpaw62
zYa=i092}X*ky0%xmg&PpYY`I)1}v)v)kHe8Kv@sQ(IQtctDfo8u`Wn1HMI}O)0fFd
zL)Wc|7l(IpZdGkF=z{lXOtI8+bnqya!6DkoUA+n5Y!m&<{qh)43}TrVYsD&buyO$Q
za}HlF_i{Jt#l8iF3Tl2cR%q&9i&VGHjf#+6UZypZV+hKtFg2!}2??EKm}KS@+SLLz
z%VHEtZ<<|x3XF@0!>&xD(OrhornYXRK`yK(#hH5*M*^!WBQL|j^&#?;VL_XhiAd5@
zIS2|&6;f957r<*^2xkywBg_0$ERBtTil#9r$1Fdtr0-p}wArjQ2HU*4Dk}FwLm2hL
zyt#Z2(Ja944xh(SH4bp0X~iWe%nEEuH^w9e#i7R%Eg39K2nJ5RLP$X&t<z0CYrAv<
z=L|d@yal2+=mqeuN>1j5CVMBmqR^bVtbj~sck6dYgp>OsYbxkEZX5ZERwF5Q`g~#d
z+%p2YaQXk9!T$8**eGxSz~oXV>s(r{qpV1b4v&8uKOx$JUJ^SwZ<DJ}jQvKGe5)ob
z_fCR0<p66%0nK@Kv+4KUerks~{?p@(g?m~|MbS?LnqYI0`bJ<0(R<JKhFXIYy~L8{
zuptPC1KCWRIc3iL#iO_-BdFj?nXD(Vgy|vEiwqHzyAO&w`YVDcN$((Axy)L@?*Jn)
z98{HAZiZP6ZY))o!8rvrKUa+e6&RyWAQvMSg%hMMq)Klv%hymMIeL^3W-&tepPC%G
z5;(oO-Cai_<UYl051UiH9z)E$4RHinH=EN!xpc?ac^Ggj9J`1^1ARTxS+?k~u^OfV
zkTzFrV(CcvB$igY6%8K$E|7$5(-WBrm)}sT*1Jme7EiLSLu$+OjHffIm5XNd2ynGv
z*dXioTS<Y7A((!4K~7DsX=2Cfd3V{y_UkQ`<9n=Y{XX0YVE>h*=M{7Xv*us0Io816
z-h>=ZfY|WYY;}%k)A~P+%Do7Ti~n`zQEBxh>Ew83#i-nAp7$NgnqvCOwlOku1p3F=
zF6^)+etvryyqIK6)8bD@DD_OjC9mL|%p-wfHG&N0Py}SRO24qNNt!pz7MIK`EPGs9
zJ|0UxUMke6lzhw<-5GsiX8$*kYZhoU0-K227K~!-NmwTu+ai>0dgJrg(Ui1i==;=o
z3KA9mGUm`wrj2lc3XGEzw*T3MNFas;!W0`pU0^`$4s?OIK_D7Pxexoj$_DiWrhDY_
zw4n}2gg_l|3W9#xA`>2pFe-eDHFJ`&<KwdnF)b(X;UTHf@<(JeFicCN_Oc{Qs10g|
z$@Vkqg5eM5Lg;%xC!wLVoXf)C+MYLFwfCFl1!B4^D>)4uj0@=1l2{sjK?=mczrrP9
zpKtyLh^p_u7AAA2Fms*->!Uh(X+IyNnXfg1kYYX+1>eMQhOIv_+k0WjXH*k7Z?qY+
zgbr)v6eM878iJGUsEs~Yz}ni%Iw)J{;KuLDfj7KlU^b**Uke`hBv|3$lk9Ne2gI~i
zC344-zgOTQrDGx<Cvc}Qxk)%of{B|Gtv2C6*5SY!!&-5P+KI4dN+h77B13NwNZAsB
z9{(xR$HGS>rsDHRMNtC98+B``!11bjAV-f^7Y?of{mll8#K9%g{uj~bytXJW*GLHU
zTL6|){LnbbwK~;Ps*RF%NJ=jy`FVN(p3*F-l?bdbkDfLJBl)H@EZ3mT$gF0+u@Y+`
zEL6yqBx;#7f`a7msJ22AaplqqUX`|`ENCj?*2jq3sEe``HBCMpIWQ-~>vqX3WwEV_
z+*;Bze^YFOYQFRN3aR^2dN8FZcw7<`YQ~cg#K^(<F&4ho+G9`CYw>1dFJg&0h!t$|
zH5{msh#2Ow`>hNI08n>@r)}vCX>VrZ0{h?fKFA2XaM<#|IjII9q!50^C!nNOxZP|)
zuroTS5<Hb~s+-qI8)hubYa=klJ6Re(%J}YduRS^=<-%(@D`;7m`ZM<4l@_Bg-;IyE
zTAvktCeEsu|2YSug`BkZ4^nIWboGMIwGsd;`bP|fjbH9HFJu~c2bQceWEOlgr*KXI
z)WBg3Zv+#otpq%FO0!<23q8R=AvB$ZF$qN&IHd}mNoGGcdVpc_mo+$D>jP$(H!xIO
zSgjVrc@bT9yBIZy>2>pELERB9a(dmQX&ALEYjkrAh`GVfBs<Z%Mnl-fjUDdgMcB+X
z)>SGvkVBDL4CX<l=uxY0S;InMx(S38&(<oLj<{(w+kGS$jndzN$Mv0dP);wYcWT9j
zV202)jcnwLh6oA`jSn8c2M(Ut^CBU_VD*iexueqJx9dBE_0v1x*%omMj+hHmP2qRE
z!!_Wi#QlD-Mi&ecTcXvS7rKD+nFn73XK*ss8^VUTl~k13Col@QK66lFe+03Dnt9=S
z5EYunQyd0Gj*|WJfObI!>k?xu)Zdl2XUMDo0BfKa^$#pkZ<T~&5nc>O_vxQU2XS+h
zyE)+J+Hwl}H=e22Mvi3(8D+>a@$Q-vIk+(_>J5QHc6N+daEdxkKwNWefR-1CawD~Y
zR?AJ2Ew6!Q6nTJnKcs9Wd?-RwYJcp=I&jXTrdNZBX)}ewe1h9y568W)CE!L%khqti
zvz+onmNEcsp-_xN1Q^PKNZn8hIa37mp_H<!l#0zYVuRzJJIo05Hsf*)V|6;baie+g
z8>=efHQE;}x}OH`LEbiDk!dcz9obE@7cdSWv8b?M2gaZy`G$e@5AX}<$(LB*S79}*
zNYU++pJz!O2g|HIpceF|x&fX;b%3SZV10e`%WGIQyQA*@$79<*#h!qvJ&u#BN2P|W
zQ@LdaNz*YrImJSrWdFKxhlGy+sj$trolvIWD{r2X>;8$7fQ9Shl7T?iIXQ`mb-IcX
z2_Gv_u+z?vNUSFfn=XT53#^+1)}&Qw&C-G2^yt?uu6;hJLw~#CBK@hfgscxEQK3;f
zkL&yiy_V2UO&ofuim)w1>zN@U{ug~C$B%mF>o5XWek;yzx9Q4h^b^`H{gW&AEL0#w
zzR4+}!y^bE6e58?Uxw+POe7}PtnX?af14Y#9QAm0qo12A*#-DPlOG;xZ_hObxe)3%
zm_T_9u1X3>0UCS`+E$kAylMur9+-gM3*C%&LaI3XiNxNrZA6Gk^+*jiR1Gqs3<fV6
zBRfSp;<Bh&3j2AwgMX>Cvf{Wd>FT+>aD5|wj@@lAbFG}iN$os0;InD%!H~fAnQkph
zNVdDvYriy-t$%LA4vX=4K@T=ZX`r>}i8F~(N@PG7qItPwsr8}Gqv62X{?<g$uWf@+
z$5afiL=dRimHpcIq}W*YS!n-(^RUVK<Y%qdp$mZ1!gYy_gXDrOmrbE=VA43_<QnlT
z<-OHDWEC>?N!nW!(d=}keloBP8#kS(jH!1%eI@qR5JX$!Lxj!wj=-=I1Zc(R)4<Cy
z25E3{8-M1K#^FdBcQzoxg^&XJg2$(+mxqU5(;&_IKux{5tmpz0J}d_V8IVbmQ=s=(
zgaPG9v-DKQ`92EWY?~Cnc=QM<owQFgZy=$5A;LxUr3UBS&<*{*(q}5amov%9Tt*1A
z+^JWKUk#@^uKEmHd+wutyxY;8k6}JVy(1X6XX)=O_`Rl^GE&hR?(7fl)`!r)jh3Ib
z#p?eEZVPJ20+e~*Q;reloNxZ2sD0kT0)q;lN84`rN9*m8d#(CIW%iS@&<E+g+>>XH
z{t-+5x}P&Hua&6cJbRh%!9C^RFsPUJo5HidE!0q=h~kZb0auAp_zPz)@M#io9z$z<
zTaQy&ZU6E++Wq0&D^oys10@85y%(u;6`u!=_!!Jeaq0+3lS@u`lYQ$uZl#$n>~c85
zN=35^J^pX1_Q^VH4=203u>2~xR+|WS0<y(^LEBW@$O`pjfmvE4ag*~@mscW(8%-Nj
za~ne%uYbI(CK`|3tF1ci-|1DiWKdi;CIBz5a5nR#d9W)52ory(F~3Vo$JI~-N2q2a
zBVDk#^9Z2+=Aq=W{9A9V4P?XXD%$sL|D3yC79i+l<K_%$*@>ppjwA%0jZ|setkVkw
zGLYZ%Dy^M-+tRa|r>ALpbg)@UUaDtD75F9YM69HMlJ!9JX);j*5F}<`a}yO82Q9o}
zNje_Aq)f*Q5fMI2@W?Oqwf$o9qoOm=u(Iq*3TgEQS$aYSoF-qWz^!RJ1ms}jeN;)H
zRYf~Xrk)#U5L$?Se#tkr<y#g8f1-0TA>`H7Sy6*_vNp|h|63S$5M3E?0IB6WKm=d*
zKAjgB+rD28<gMB8;l=H;hf`Fb7?Z9@>4JG7f(v<N_Jf><fdl}DIv2pemgml5(|^*$
zyf_VTSJuY#Oov?UF3&KQP*jBdq^C{9op`u*xnW9QxH)CW%+v3Zbw?Lep6AL#6oBA?
zaY4Aa<9vUG`VeSQ+!otOCWhx>#tcAJjrK*|%z63bhh8WTlz~q)wI=c6;IS!9V+Xh5
zZz&Abc5%DT+bU>$x&JBkfO`-u&Y{oi9VKx7hZ6GHG>UqAv%b6t1cX`l-!}qMGTdPm
z10(+NOtIr7b3(R;Obl)BNj4nFCy0*n|0dG-+rH?H!DCL3HE*lRD}JXh3~12G-Wzy<
z&T#1ML5YL>taSZ_BWP?OGyH_#TW2}kR`p42mDclhHHI4F`Rh@JxE3bxX4^;wP&5Gs
zeF_mjfbDBgp0siBWGc)AY865aCV~T$WV0znXmfZ)5;a;)z|GJwg{LoQxZW;^-?$zO
z)_Y&*VP+@_JkjkpTk$Xmz63_$HE5$nFbVQi#AxW66PTs{GJw+Q#k&G<a7M9hNX82S
zfFS94DOzT)74y!>3BnB0D2eB|D9E(HE0UOCJ0yw2KLKvK9;{fm3FD`?D>cRM&xP49
z7AvU-U11T>cpT5q>Il4^T0rwBoy~sExx2n~y1IWOZq1pwv!O~SN18egl4H26e79fU
zLv)7m?Fys=PDTfeMd0^;E#W4XWDt;%Btj!2ppTZwob2z-9^!Ow+pAnxmPGUpb9hz`
zwxRTlw&9BZFoqFD3xX0*Zh}+GS+u@}`u^9hQ21`gF?$jJAtFmyGR4j~a=+g7GtkEx
z6kRj(@K#V<y$%{097;Zk0%8nH9Ah?Qg)(D8Q63lXWR1Vo-ZR0ie6~Kv_1E<^i*<7+
z%cVQ-f;A`Jy+9z|J)uNFLb4>vLfMp!dGs1qI*{xdi3l8ATzIwj;~WwkO=)@y=Eg%I
z@Ab0p)^G32zhNQAXTXQiAN?KEbJ_7$soXCbppwv{0jE*eRUyjj>&TR_^DxZaIfX*O
zu%YEjINZ@6n>p|a_60!5%x$2dz|Eg3dd;>o19^jVRh%d2SH-@L-K~>#m8Z9j(9NfY
z3HI-Qdd}y@w_K*$F6@3TmK2#!Vx5zzwJlW`86*EtX@0XCu9a%cG1Kd3qo7jPg+J*}
zm6$bcevkh3JU(!y(`8vZay4tZZ%-;-!B3B`zQI~C><ka@BRm*D6hAzv)w~lvP#iVJ
zAlMFyBditvmv6bC=OxuE;dA8w@jHll`I_+!eD}IXw3b%}+9`#lCDhmUDyhrNdO^%E
zUOSZ#^Bg~y&qrOYG{Ez9eGq!nK|&_GhLY(dR;(nnX&mi2#N=~uIH6J~Fc|3Dcp4UU
z&h^@4EA-W`Pdk%ic}>#nc7RRx`#0F}<}pWdpviA+dDXWSQBupC;5`}tX8jiRs*_E)
z9K+HLoE=IAS(c&FbH+b@?L#ij8p=RJ;ceokbk^{iTN46IbLi11JFoBBt~sU)_$fi%
zZ+NcH2>UgAeQ`hDEf@oe@4TOuFR*XFvqQ%4Kkh?h75B^6zR>{k&NvD-v{XaYJ~n;6
z*sW87eOACQe>5Va^r=x?yW3nSug3U;Z!}mH;ozX(^BF#TXFCrmuXmraHdlQ2Cp!Ec
zwpXy1qEFm6>SzeM`h>G3XG>66CTFGP31nu=&VMeg2sLbzUIZ!1_LV~|9>#sraCU2)
z<h`;Co?%N>*JbRs+e^$fY;HHsy)Jmc5p4B3j7IAe1$iT;YAsz0=nNy`_3*sg>sDbT
zE3xC4t&5ZY+`vtIYwnHp1aO68D$<YYXA}%N@Ys*=x$x)9zY%^CpNMZOcYs>c_e6|G
zD$0s3{=FKF`(9QCs7xyzZZMfr<@b2huF74Ko+WMPoVE$3xJVJuLP7ghbef2GP|!!A
z)Fw|W%(-7Bn{h`x+R%|dBsbZ(T-PY6ahF`AUwax%N?PFXx+EGFu`9QM)a5MUK8r4+
zB4yVjPGdKf_gVhZoiFvs&Etg)>l(>%YVg<`9c^FoQ<PLpgWi^vPD=j6L5d!i&i4ex
zz?D*hJKpYn-OYx(H76L3PoJZF8#&P&)X?Pm<5Ikp5)73*8`N{bH-#&=HkmEu^K;Y9
zj@WXv+Jb{a2WxEb`DG+B*8E$_NYr8~bTMm5&AI0?eb;P}EG)d0;FWZW#3LmqWBZ|s
zKmYKzwwwX=1L0ovOE+49!<g)RP4=2?FF19dvY^U=$jwomP4AT%zpj}wCD&jI+!Y5p
zDW$1r2|u;fZNENuIqhHc5{cJ7YfuQ%`U$cU0VjcOFmpR0rOo1Q9H-XtSj&04!J}0~
zlY~CBgB?z$v*8I7q@lk)93_R9Yey13-mDEeeo}MEtc-yuXTMkD3u?uSk+eUa^tqkk
zFP^!Iyk3X&9s<|Cx{g+e5^-&Y6gfZZG-Hys)HQS2ov1k*LX)dfP|#tHPsx{6*l{J&
zex?`0-_w5|Rg;yghnU?P+C6r=rY=2TnE+J8Uq0g1saett^s;Yarek2o-sgZ@x5C5>
za9U?|WyQm4tIMTx^Wq(TzL`+ym@Dh;2vjDw)3^*ruVbZv{7CRP#5_8NZEuBYkm7y_
z?Og7+B0=aBe~o^%AefdX%8<V1tc@ya>akq1{5jOQ^lm~4VtlMx%s4KtT&Z2=T})IC
zX9eE^O-`W!Nj&^0`N#-FbEns_IxuB&V|V#xrQf$<qNKJM@Y&y^+_97>#ur$n=#fo{
zPPi&l4hse}?a633k*$@Z*sE%Tt6MXT(OWL@{5J5%pR!rqz$BTi8cjlDtt)1o&`nUO
zm+$Lh+gTehNpG)rR#|A@Q4KenhEt|<Rwv+BW-ZNJ6qojL$=J$xBv2*OIb2%I%w+85
z(<d%E?|9x{&ern%nUwf!wWnp6H=wyTghh49Pb!vxS;$6wwp8xfM31p3D&%ZC&Sn(n
zc6dKrME*GUHGY&gr;l#7IOta&v9~bH&!}l=y5@Fw`VFo+aisJc&H<fOXPmd-&tx}_
zOp)-|#Pd?62yhQanjda8wPkCpk><ivWhSy_nO=^!i}jLDnFTZ=usXs*v+@mdHA<to
z>P4j0Tx@En)3U=GX+zKSql7IFp2(<W<Z@8UGqLAOpR4cLxy5y7H-@Ag_TgRAAK@4r
z>R-qI$UI2M>sGt*&+)ODt*dy6Z%Rn9U2Fo5+C0tJZLDqv=LzYb|6!B!Ml;Nz3m#J`
zkQ3W*z3e<{?5{!^KKwSJi1K<fX>p~QPGWhys-r%zFkd^p)72QO<L28b41PIzC4l0=
ztS~@lHIc2CZ+3>)OL7oIjI&|TmWJGES24cbVBZoN6ki0@Cjy62%E`ltaIwTuYJQ_o
zs&BnxO34%;UAW&V=m?((5-1N67+v2)+T8-~83`(ha~if}J{*=&;;4bfwa6E4u2OBS
z6!*AB+llH^W*sHDu8WFM%5(1cXpAmU&F@GtHa~B>mFToF^XIB|RRV~t2&tp~a=95T
zi-laUdFUMoFE$k1@WDZJpu&;is_DElkQ3rwfJ^t~Lih#&t;6i~4DLD413F<YamC)#
zIp5lY>0&#n6_n0Rotz=03UF@eZ@%>{9EtVJ*UuT@QfteVUQPzL`z~-u?Z5mdrJQB{
z^SRdTVjdq((UCT76cx-IuzGIo`keQIQ-|bSa8V}By(q<8qT%flu31+2nN(2BR|W*q
zT>cz37B%9!9`|-9LzDkm4x*U98wwf@C`-e28<ZJ=$hgavl}y%2Vbt9JmV75&6PqWq
zFx!vxGUGvc{U#&qvTz9aQCQqOldezj$U`AvEot&o4gV<V=BmKSJ$+`32?Ei(l*Vir
zbs(P9X+LY?^wpTL8s3Sx`pjQj4oOWa_|K(DIcd?2(>q+hToNlO058_O*O^DH=YI(L
zYx|{q7FRDfD|{JQoPz#coHr4SbT1Y<vmd2m*h**(bv`a6P+gBI6y5aG{LQ7(KFd@~
z%hqt16LyVZM~3|}BlCKS`*D>%jx_efc;iE<s1gzQ#~NfYX~*BCX!vR(GINnKui!dT
z+_3tW{!>TXQxZX-WNXc&TM?;}GRIqFovenwz7NcN2(?S%FlbO8f3BEL9E>$9WM)pX
z!<Wpiy^qO1^(H?HknHYl)<-ZQK*K)0*GELbssGif_LYl$6M+MSNWP)eJZ!puUMhvW
z$0pFZ?vE;zJ_aHd_D&oJ>NdF3%!)4|eqpjAnhmAK$SJH@h>h_FMV4H)#Gu4k-MAH3
zN#>l-Mq$XVf-B_p)0`e<R~F#CS~+B+T}a!{Qw^P9Q+EnLhKglcCdIUXb_L!e5KHQi
zribSV=l?}3{>wR9HPt3XD6`yz#JiB68TV33(n`TF)Hb?xf`ph#x0aTg5aZ^;wC1#0
z!%XcNC_sO|@pf&?{e61iaP>QndzXYtyRPYpIuwHs%<@?htJ0egG;eMEYHp34Oz39X
zElc+9JUW7jQ~3=Sx=>xeskpn3HxPMTpjvD*x~b*OBTPT%+v)L?T0V_cdhl@INAj3|
zBhcliRaBKcE|rDGq`t#SE^66K(%DXD&(pHmK4ljXBIER2yjFqvsO}dp-rS-`>kk1w
zc6qe(X}^lIkGmN85Jl;Fr^_=+tJ>PWge<G=UrxtGjJVCoG$zS<31>6XQ`~B$V?M|j
zyEntnd-@Ea0=nx_6+TFr)x~@@A}X60NB;WtTQ)2wYHF@Oor5+fayqIeALPAUA1H8;
zd*LI`cA4p)LfvIoq}0}EOhSZ~SIk@uc!4c9HX18z{x;MH%36uJAzP>QPt$JP`EP{2
z!~-A-*0fKXlWo#TrOvlaq-&?Hid99hC$avX7P2g4<bN-98x9_H35%|pU)x|o-|v$@
zSh{-YtkPJl{~kj4W2d$lIa$kGm6QGz?4jQX-EsTZ*xx;kfsYT|il9@u-B9Bsz-Yrk
za&(ByIuRN|X?t#UE@GyLID9Q1iPAp)3}e3*zV3W2&J9<OJL#M|@);>X=a?s%xhz0d
z++!Zc@R9=-#>ybyTJJV~Qq0SuFO|mJ)x#{n4zFGj!w&%gC(khy)iNEOzw-n0f!Q#m
z*9cL5vL!q=8&aff4*X9Dip6cCVPw@EXKuX2(QRdL)$&LPB+P(>+N7oAP$@Y(p;x<$
zN`@9@U4$dQvcQ+L!CAtmo60UMA6GoNKVX|zwzK2PXpI8t5GA`~aOK95Uo)!vbyxU`
z({jPTS%IU2((WOFQ3f5oxm0&8xEf0??UbvRb-yh1lmU}e2$Vo71Xw|jP{`b70D?6^
z@Psj-%jPo&W{(**CkoZ1WMN;JJ5{g1*qt_3WTSGCp0*t`=FGi>shdFi1t$|SJgsff
zuk6;$_$p3j6rQ+3TT!jKu(fOmCyjY{NyFok%)bAVBz;p+CgI%=jycCTKLA#4x*xVH
zI_HBvRt}NpN-)Km>^+>hG*yvneXT8G(OUJ4a2XZm(7nZMj%469t_%KX`68bY*Y{FT
z`z$uqO8<oOk<3BYl8|QpcMf&nDRo`}Q;~^|m(U$V>lR<s@nakJ28J0X`TJ*XGUB-o
zH*TM!%Aalq&7zaUM!~usT>Omi5|i$lEeR@z8}PNGW>w?ZXc1pwfkPP#MQU?fHqvhd
zJS)e-&1%G-)2ySl#;c>jG^DaUo5~i$^?*OrYNY4kU5bqlT@deK;h{uzRQLx?$Wk0^
zB(vHfwrlFgPd*j3UDNd1Te+YNPrK(LzXp1{3TM2`na~}CtrQ%^f_qvHTqp*?{!8!*
zRpz$A&s->&kc0SxeOl^ueZb>>^K@bI6i8X{@02Vl8~f=8e=J8sSu!@KpT#0G?k>c0
z&*{v#vlGM1PqrFjFH6qChM!}F(r<z+|F&GoO+PaMkqd;LR1o6ynKLuQM&zo4-e%bf
zMQnJs)(B7yc3^=#E5J&NFGdV41o=ho(u{21T5{#hYnvBxQ{!&Qw~3s9N*th9mpBws
zYZEmmF&gGdqS{zb&J{)gsvmGo^b@q5U0$owo;j6}E@=iLCnO4D%e{;wpjbx-`~k$K
zxeyFEFKpy^{=0xD?Ox~EbSmk>=FiJJZ*m{nwZnG`c*=-P#0iafB9U%vK6x#WtR`j3
zu&^4Y%fz$`YcjG~00WD)7~abQLNx+8LWY_P_><6q(?18c4W$Hj_K&Tl97g9mn9bLD
zAMRf$01w)9W`USi?V46ZE_`xw;LOf3x;jTfZ|~3FC5FX)u5vPm`dANinH2T<(Y#6U
z@)Iy4?|Gj`%5LAjI_o3_3?WCf4%O(4Ve1fk@*EXNfYvHbp2g*QT&(DPRrWZuE>*ju
zbe_hgCYT~{6GvzJ<>%K3qtPxv9449X)}VywYGx<@`kg-)N3G3-UT;a8MaktG2h9{j
zEgiU%t@_aRP%MGEusK#`&J(vt=llLWIdW2%UF6&b$f|)?7jkRm$I!4->auQGfdQ+5
z3}D9;sXHebz$L-ywpj<vYnk~)DZHu*V{)}y+)*YJ+Ec-0Ac6UNgwx|-nL>9cM$_7N
z{k-Yhh#+RdAI;i;o(DJ0&HK4IQsI_YI#P0A-P;_#qXEW-{r0b|(PX8P0w-u{CX!Iw
zpYgD*aygQXMVsSWz*1oXHiT+CnZeZA<~)Z^1h-35I-RmTpLkbz=D4URpEwHo$S~c@
zXBAF*y>5^8)!uW{nZ99;)_%MeA!GQ5I-0&sd8NmP^?$~M1{m|&A@SrjX0TaXVl*Ob
zzf$Q^y3XX{n1sw`Me@f3q+$+Hv*XSN7)Ah%6zx~Wa-MYYT$Pt%Y9~J%j$tUHq};|%
zrIrfCA2D@DYCg9A-IeOUyQVh8g{6lG8R${b9Y9>$oQrluKQFD>(S+~pCRArraHkW0
zU_G7!K8a@0SppyLk7*F+iKGd)vEJ;(u3;W0H$r?B`u`qs68bK`g(x71&VL`wig2?c
z?lQxt`m-A1LqHJs4aAJVc#Gehh_g1g?q!{MT?glG$48Ttl38P}$cyg3O*@!MYLw|r
zdkD@9ObERo&>F3V`a3z6sp<_=+1mv|3b|O%#M}CKDOH#G>bAvHA^_6=NeZ0S!rN<+
z4iYe(Zi>NlR{a423<Wz7jR@ERSdG?l0^+>BXuF^M7B25%i7{+`MfJ=a80Vhft5O^E
z+16tBa>Age0GI!_Ck&8{PG)H3wynF7xz?Bqe>DQ#_w8<mks2CE_?hz7du*}^_nWIy
zH@iq$yuWHDY9kYCQKJ<CfWEZFFZ`$i)Bj?F93x0=u6)DmY|V2}pa$TyDyFg`Am}K(
zS_mw|)8l1Rg&4}`5yt`?Lr-BgUU^RXLI-1L{@;aQ<iHwp!4GXs3%^h*Z`}{yp8Z$#
z^IFFN?Rr%zTzNED+dp^FfsM(qtc)CJVZpd5FZWLW8<7e42e8df2|4|L;h+$a|1DI~
z7n*Iw4c)M~8ky)(ptcY%`2~b~Lnmxjx^defqtV<<hDvMhk9rLGzd$eorp1kGF=Qz>
zNDI<R_}9#5;8JE2vq0luAt}aS>Rg_(IlXScR9FbwQkt5!4a>_gi*v9G@xQe*C<~aX
z7j$$FbI>AKtE0znb^X94k8PhHmVsLH<PYck#L*1SkA!daJCFZ-E3^;RDI&BF=M`!#
zLqz^NFqMjx>Pw<b3F0z)|3$4E4Xv!JvTqZnW|jcyS@-uN;EY|ltzO?)LB~pwx|0FC
zcoy-$=S4*|I<Yko@XdghlL@SqRFlgq=B=p%J(Qf{vyPE%0jZ`F@|o$C&k)vVrW3}V
zMowjs<*FFXe3}@<|DTgtkc>LY;gGzbD44Q=w3n;v6jt02sZdVdZ?@9>!J#~l=E%b6
z<QrkiB~#L3myKinJ3jLO+nAz|4d9<tikTxtACP46x~hq&Q-ccIAh7-$!IJBrlN$aW
zR%&D0WF@d~iXoYCjArYf6muYeU#;W<(^%sSffNP5pc2*V6xIv=*n1R}%ws2-&u$%F
z+>FgoOWWpd1ZJHgkx4G~bGOIK4;T_RWCF(I2TWCN-XX#~hXYK?xaFm5!N^5qVNI+o
z4kJKcX`cd?rPH+^y5O3hpG1mi1}$2<Nf`<ndT29;wW;41!2-(X2nFm4i!w5gLy*>h
z3g9wfnZUFbB0nd(mnSlz3tql9tRa{`mM%oZe}2T8^T$tY{ui+*3IzaDU9p^FW%)05
v{?BT1MtUH<8)kZJz9Ih^<Ny78iTV~WwVt3*O2DxR1o%mdD~Q#L7zO`-hPt$*

diff --git a/images/UV-CDAT_logo_sites.png b/images/UV-CDAT_logo_sites.png
deleted file mode 100644
index 6d568b8256795e70a9f578a17d96bc36d256f09f..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 116663
zcmb@tWmKC{w=Rknw^E8jkV1<)1a~L6I~4aK#kE**DDLj=?(XhVEV#Sgbnm;zIs3bR
z?~f!SZ^l?t*PPFkgerg}(2xm`p`f79q$EX^prBwTprBwy5I;bkwAUwRKpwDKh=?di
ziHHCce%P5>SermWxk8u6Rol%9;`Qs#OsV;nC}q_KD!?lVW*Zt%*sRSce9B={{g!IQ
z6WZ-Uw<=}P-?iDL5L81(7IYQ}-8a<Z8=X~xmxSq&M_0shKEb8<h5R%B;e>!d=kwWF
zD{J(yd4^dbG?(pxy4^VK{lYn|aWGy#B}-G}bqa~AWBdrQR*>j9iqiHhn3jAIbC$`i
z2mDKrMy9t}d<?#uka-c#E=W&OUb$*x4)x0T$3mrQ*sl6+sQ6eVyj|amkiCRmNHxN4
znw17aGzs-4)0iMN?ob2P0lHgMM_lBr(Hm9_IQHz3i03NZQ*ka(e!vvHsd(nyI;%Vr
z`p&)U=1eek8G6DScEbrl>t1kyhMkXO?z)ym_h^pa-iZ2li&fW>$<~3@r`MIJoZQ5;
zXH3`4*;Q`|_vWrSi7ox}%Z#0qF6uKsvdM#UqD8Tmrbb+NgHxm`*}uS5)J4vE6qTYy
zWJ&O~RG5_{3Yyp`JF%Rm63#p_ul8!})%!6~>-cpnbN^dit769}7``D>Poww?k(1iz
zj}_Ws`M=QQa0p}HPED)R?AP_ns9pqHb)}Y^)YU)US-ZWzkFjswr*;c()~=UbzHGmE
zx3;8Q^e+6^v`ez|v8mAg{QhUmA~65$N%0pvE)|ivLKR$myEMHoLMj0Y;ih8bzyalj
ziWIECMmu|;J)Q7%4DYki=;~FuIIGJ3LvV^_{_c@_?TGc67J4Ui3VMa7m@mC5#EX#Z
zB{dzPpir>?y`Z5|((s_508mn*LMpD%Cte6?p9bUKDCjMQ4TtG~X3!r2zhFkxdK!LN
zXVPol4gYAE<OgTHKN2~`PF`s)q<<N$Df^C(5C2IB;D-qWH}#qL>1r&&b5uQ7F0$&&
z%gM>hPg={bslS-E9?g4H>Ur^vc`HA+lt{JHP>Nw<!ofic3I3lC9W3*s720%vUjP6R
z5$6AV7?1(_G&d0c&x8N|83FDWmj{%T%KttD4w`J@isV0!<^=%ymSH7x$p6>DkaHXo
z?*EhA7Bu7t62Xr+|2J>{h=kO(A^ayX49F38P_ae-H^2Yb6HeRX``=Ok5M2c!M<7It
z{(mIjraS*%ihvO5f`uHx=SxZRpHgF*xncY#F%nG7?GtE9MXdi&=r68OkN-=*|Hb=-
zuP>$<1|anJe<=B%iedkg*#93YX2V7Nf0f39;FrRkGpl+OtaudBkZ4+%AXgkn7v<_F
z?eyQQ{erM2&lDQ_U#x9leXXpZrs~oGrD4Quv9gl^?^Wj(lyCd(rHMAkOE}@k>`sJk
z5g7bhHmaV4VIC|t>X4j=tc<kT)#oGKpk2mS__Pq7u2R^uC+J1o8~py;Pt5xe*^1Tk
zu_pBk82<T!pbnz%r%c7&rk08f1|7;<BViqR+3@l08zjPF9I0qI;@K!r*1FSiZ;|r@
zS4;YteqyPGqJ&tMHk*a8&8Jb@(gf`*ZMJItQx)C+)@TGg#Csz4K%>0y5G%66faj$z
z+{8lU^@?(}`V}T=ggy0QjH!lALmTe0u(g*CcIOhZKenZCbBCJX)asKC%BfRNwP37M
zrKAnB{#L91=H{!wnGLf1lBz7?US}%}p(Dwz5LXIO(JnH?MI84)BfJpp0c_y}qh)Y}
zOn;2^HIX-2mSKy|I@Ix3Qfg5bUD!OaIj=oAUyZ#v>8?kCGU)l_6ib6*9CguQ%13*S
zx`P;wH)yJq(ks<UJsPltBcAEiA-+EZf?O~&uvc6WL=pf9E(y;urC;-Qd(DVy=;))G
zQ!M^MHbIc3Mx$~jb|?oD0}t~cn^*sA<@3+$Tx|38PFrlbvv^)@N0gkrXCtgiX~#FG
zdo{V@a_QTHk_ug%?u7rkjDiIefIi^td2}B&#2yY5U9`ipe}Lcz6;)rRRFG4w`CYcn
zNB*Hlg7FP0+gS7Tt)4<)!r@@~=E>~qv9}YSz))?Z$c+OG>)hZ#+HEPQfQOcRU8Wq=
zXkB0>srxUu_}%=Y@Om#u{_o2N@-E6`B$i65Or=U1y08zg1LQV%delDAhl}RkX(vP`
z=;%(dqsGZJ1*Gkp<?{E(1-KY~3$F*5B9Wy)jG@~lXPw;n)dql-YK!2W>}ZJ-+aJ+?
zo>VL<7~)7Xolp%iMi(xe;Gfa5`R}(-q)D1-8||3DCVuZW{{=f<X~IU*{5u=%>B93g
zNt@Muv2aAM_FhF()Cz3f2KE*TwWeF|^-cyeftF2k_seKAT8_8u-`H-Zz=3IFhuCUc
z-az&IR@#r_qABWmU)m{1Zba1~GQRwaETmt5V?fL%NduGS-c395LZccSfTvCBL7+_?
zZTo?HIbZ`p=6+TH*G^=%rNMGK+_uw{O)g_?(2{4_kFICRx|UasAox)I{%^f}rU%=z
zHQ8iWY^7}8a~&Gy;l>e7tLwqRe<?K9&leN?J&T4P;ucxmh!@>tX)X?0?GtV~N*tki
zi2x_1!@SO)o8R2G1LK`K1`HXwO}gOR%{MEYcItbJJ?>vZ;*IW#<5a^9N4CkPVj3<8
zfC-VI{Oaq$6+5C?xyL~R{YQ>TwpBc9K7vJ~!g>U1qow89SGk3U_KM%q`5gUzM`mXK
zKGI5IY2uM1S&84*ne8-$4XKA*2{w0#%W^(7`a%H^zkh&f)4xKhr(X+2d12BwDjm@?
z-43ksGf-1gna!1_D~!F8W~{I{>4G6x>O^rqn+p<;Vz>~^an2bIpbQW^b{tqMtFF^L
z<Oc0(E@CXA0WVW|?N(irxScFz+Mk_~(TwIa%qBB-RDQg$r32MXZh|;hOBjK<?#?;*
zxgJyW>NmLz#N^60J@k2B{wvaOL9T1>Wpo0B&pb?MeuwkEM|bW5wZ!%c8V{sN%G_#e
zC-S0W{>L$`m8SC~221<)ED>$X_`=9d<$RAtm$?#?EE4+tk`gT%pj!fZ#@8skR{ciw
zIsQ(Z`tzxj4>&H>Ps{R)wvTh=+N)LZwqB9Tf*Danykd-Pbv+r5V04=znL?Mj4lgSi
zwGt+v`!<`Z#G{O_&4E(#OlRp+&1|4;+Cp&9jTJcOo9=%l0ys!op*x5B=TqNc+I(0l
zX|9Vsy<_)Wss4&8ZMu5k(reZ-T1t!azRi}eR?Eq_Z=ocvVlW<EM+6HFYPs#Z^<?q~
zk<w0%3M|QymT$zdd_^dVKP?@+$NHGg(q{q?0`OpAZl|T+%jJBAt3ie?qay{XOQ?2J
zKPx$`1%me1pq^JZIcI*`1h1=XpBZ1dbBiAdu7Qm4E|72jNWEHY=M5ozcBT__S=Qw(
zooq>u6VOfg>c+sMDa6x&Q}~B1wJMU~Kyp!UMq&miYmi3|u>vI|%fUf}i~a;Bm=IOU
z7A_ryasTB@^jDD36^#nX_5IuN`d6*9?P7}OrQ*-&JU?jsMr=rw4G8_r)zs8x-(Cxg
zPUe6#d|nero%Y!Y=$|ulj0@ZM<a@ux88f`P|Ey7Fc0Ji>=TUg?5oBQ6A8-%pF1f}i
z&thEPeJo$(nD!H)Hs1egm0egia!8bh?KMdspv+N#N(o^tISg{G#@abQCIh0K5iel9
z)BGKj@{Rr!+wA1TQx$92qNAh#IQTSLK>y|DsOMCLh}z9W-?4%~s@4=8k)i;v*J9t4
zh`V_&81o)C-M`t{X1xF&4R30><hX@yiWI;$I5RkX+BWo;wneuH;Y~OeQ)<yfPc0+!
zbu`%NW*6V8BKawRlrD{>ipT3|rJK#f36ROFY#iV8FS)ouP!5QsVOtZ^oDI=_<0|`i
z*PTPXP{ADLkF{TItX6y&i?my9BQHIpsTS)SeWWi|<sm8px74rGA}9y3q3z!U1Q`8A
z1451*{OpAb;$#z=ZzYa`D4#T1qN2r{&dslw1iAseuYq)A7*fK<2Q?bIZpQ-@b@syx
zANURsc$FPa7aG2-bLU6%d-VdAlPPd~NAdgxKR|FHFu*qG7dhg0Y?!tdow6SZQ6=i^
zNs8e}eIg;oO}|?S;XG#dKf3>o81Dd+-XAO_xI5WO@S&_sPm!&gPx)TZ&p8Q?bO$&Y
zk#kS8JySdxe1Al`ZHd5|L5?`3G+t~WJvg|8ky!{Na-aV08ush3x`yT?;OF#G+oi%l
zfi)a?#EaWrRmM%V#jE*UJ|6{-*uR_x42D=Ttu=o*eJaT(J#!7Z&trf>q3DM-b=0Yx
zdI?*g`4>I^wYg^)$2u|_Aa-5*^N9KSzLEAoyz(BnSaR=Eru1J}j~8D9A+To(&>1R1
z@tNCkTIyS)5O4E@7<Gc0HnzDcLc2mGf6$pg9u|K^9Pd~#j^-A^%a;IN?Q2zm23s!K
z)kfm{oZS4CAtf9*2=E$z_t4&lqln4-1c4{t0(qVhu(#7*@0nuo?X`MByw?x*kAsw>
z{2Q<sjIc}uLLK_Hg0}h}wGZ2!pV2IyQeOg%x05!UhJOia1%qUP+?YzDN1~?0&wx75
zUu913E$7kV4@saaCQ`#4I0ElzvqA?lS{_9ynaxe9)-dV1>3kZ*JW>ed!%l?5-;hMv
z5<NNN5XfbA7$L)Ct;d|s-$6Q_!tcAX-&ob&gtg}?RYMv!Zl*eMwQcfjk*<}`o6;5F
zeb=X?g3nonu<rJZN6V;1UC8Un7JIyakP~{5%g2kj*~0%l!G}vhedzWf?Icz%1BedV
zadmq;DjW7&F!*B`Ne*!r3A?gj3#9*o?7tx4ji`q_(W;G`Tbb|0>F`gNcNZGjh&Ser
zhQ|9zOuwd3(yiMC&0C>e^47ON5$duN*t~@jE0A-$#xAgj!<^`hVwVVRrC309S5rWW
z=~MC?oZs9(tZ|!UHWK1jtH=6<AXEgBo@ah{^$GDEl;Fc!Y;|=7*noa5+lVdGO4$xY
z5^@WYR`AN6)2*gE=Ou&f^@wp^3*D^DkM6qkro7xsypo=s^UmNO7e3)rV1D01p0T`J
z{$XOV82@p|QTYqPWrQUmQPd#vBVs=kDd_cuAKER*$E-ct&MNi@;l@5NtunfLa^tm!
zv|;Wx@4o?W2}JBPnmiLCA%F<hR{Z3pi2KZmW<dGNp6d_`HeqAWuffS04?Fz!n*p^I
zpSCsmMvL3}kJO(lFn)EHzW$uVx5F~uD#SKVpR0kthc-P!DMcV=^80XULikzY8p-F0
zYbp?fr1vB$k@p&x>7)+Y=T23Xses{lxt<Tl?7@37yz<jOaH$K))Znvrxx=yDNwsf*
ztBKvNJk7oF&I<`Yx|OSXU&OUFP04pu1h3MboHW@ybO*~=4fR@!lM-zBC2{AZZ5R@B
zu=j3OsS~Gh!JvHTZu(PSdP(}pP09*7*x~RZS_Hu4+F<Eqf&47#8TP0qu4n_<Zt~#1
zKbP|r(E-x!p>d8~U&w|^8(S@gv{0E|Wn22Xx&VLdzx+2ZdZQHXoA<Q<?*f$+6yg}P
z2|$v^ydZ?;>^Y&<{BU_i;IjqYNxst7`U|A0bK#X~Qt9yBpQO;?W%HW$FmgH9;%JvY
zW0sD~C3+^rJ({<cXm@c2DR!HwHB0tB8(VXUXSu1I_n6-R<tn^kHt8Uht3Fz*!5T!X
zex6@*)iu3WmN{;6Ibo#r5@tkDjy~IbKVVox>cp`r8``c2DP)@!*G!Z10D?|J^t8C?
zs)f(0^Ks2RC!$HoWl4dd@^#xYCaI#e`VHGmeD8Prf6AyDm5RUVuz2mI3}2_n1=DY%
z=8QPNia6c^OLUfhsl}~dX9y`UqpKy8ftdj!vIny^PFG_;7^Y^u;M_mOsHdkjKobwq
z^L**kaU;ncxX+H+fx#$0TgUH^Th9(KQE|VbBj$79K6cq!Qu>k7aXTkX@EU|^{`w}P
zi#f#huZCD5j&X&9&cy0MzRn&A`<=wZj)W7(6ny{TMsP@2O{n?X)XKCq8&2!f%c!=6
zA@aR$hx5LD%Sr6U&6ohkba5@Xm@YuQ(Zj{ctWb(cac}#UOke>XtKSPPPZv&0a-He5
zd!R-g4bx=EdHXn!_xPy*qt1;kj`!GWZ4#d4fryRHzDPDJQPS@Evx_zdaZW%mXeRj+
ziF<&?$Mz$*r|xGeDP)Np!QbhCXi5mX)JPf*w1|0x(3qzDpHtP7oM+O$8k%lztF&R7
zP-{=<?zio!eA<X}-~aXIbQ%v^_Lrmy{PEcR&YJ!8$wwe>8+Gm7ZK$h4EgbNB4N7yb
zg#n@1DZUg$HK~*G)tK{Yj2k+KSisFxn?b{EVF&f*m(k)*!?Q|10ik+69E{yD*U;3U
z`bHoukApP?ZSw1M&>kb+5O->b#=@3wsurWb97;d}=+n8MC;;){x35%^Gq@U&6`QD*
zD})Y*T6amjR><eF8rl7EoVd>AoH-=+$LVr4z1Pc1%v`RA_R%s$)p#mUq1DCB={20}
zX6~49tfH<yc{fTOBO-!OISXrDkV_VDJ8!mzkjGl}8L?7T9j$|uZs((#`oQ#1ZN;du
zZ&Qz6qTd+LZSpQYcfxy?{!rD?nAg{XURU$n&Cz&e9^_!BLQehVXy-v!3Ir|0x<np&
zZhyh6e^}2m+yqVtV1h`bTsr!!O5#{XWJ*QqZm4AR86hTJ|5!|Y5qg?u<kw?4ah<9B
z4<yzoui-6{oA^n1wfI|&=oBdY$?Geb=c&KBdpEYo%Hm8Nv$u89^jt36bnTZD4BM~i
z%}&a@oxv+R_;9f%fHIxMJznb6uchK}#puWP!Mu*BI!rFEyq2i97%T@GPg<TttPqiU
z-F0L=>R{W?1Xk;YbAgV9Q>E7nCVN;LvRUe{`&{e1)dF4?6ZS_lWRqS!GqjKlF9m4^
zIVu_86#5HVVsW`;XWXT?OU@5~Y%VL9ST&hZ;;{c&BmOp3+PphfcaEwOD2Kn9Xi6je
z`)VQf^`@t%+??vx{rD!^X)r?MR8`4~DcV(c;Jf=7ajyAoWG;Sf>R@DVejj6dxVnV1
z&znA#CS2^EhSxwMbh_kG->ekL1}XXEN#ZMT<!i1v?W7@PhB2#WQGx1~Iw`4FQ_AkL
z6fVm{fKAyky(87k6^6^KMCpB>(s<LG^ljHc814P^@iF?;8h*gx@z02%CcG~YD8Bg5
zJjXvZ_7i6j(<&+oVJ!n6W#x$J|G|8%_9*TK8?pDh{=+#2fjaVjN)bj`jvFI38>9V#
z0Ddajw)nNMN}F60Z441v?N5it)8%Mrglo%3neT7tt&GX>&5PkV$CX3|XoeHmJN_@(
zXe?f@b=}`6?(ujsj)S}gi}LdPk;7-<u>=qqpZjQPo3F;aJUi))2-~z+;6E4I*h3>N
zNEO@U(olI%1*GrEzfKLaDshx`WMRxwuasd&@EX%CNY%-v0{d9T#jnQvSpB?>E<x@l
zs#sp{aXTg`t)iQVM%0LuzEJ#j*i1Os|4JHMn6~=0$XQ$qM@@QqZFO0Jms?@2D$mCH
z{uSoi=+)l$d$LnSj16Y2zqrV8=-%0VM`nD=!6q-=ev91oBQJ3hn-~=1Wxe@1H3KdD
zxA$`2mfuco#Zv5=e)tmCDxVNx+IP((E4?;(FF>iV-aSPvHi)h%!<G7E?eHJt2@X8b
zf&EKW7e8&uU~4`>w+phbaR36+%gbmkx_6i9qJKHbMkw&4Fj$MER9ner1b+%TAHiCs
zsbvGHsA+01jlT|mT=BZFHif62h5*jrzGYnCgNS&GJqDOIT-NhH4x)P~0<Saf+j0A2
zJv^c>Z_V*+Ua|r$*A0ps4t^wvA?qA);<HMQ<f7Fxj7u1urm{yetLPbkjZ`i*{PjYV
zTN-n_(o>*{c;S44g9(H7Z)Y`(*-J}iwG^8jgMJHsjZl+#(U3%-c*#my$7Q;Hn^^CY
zOYU?pN|Dcdq}my)n%^UTbH43`PWEV1U+opDR|f41*Dz*)GO1dI;beybMAYUZRK8ba
z-K8V1>Z7!Vm~!Cc)0!&^&xYSlfrHl}PbLAy<0Lni5-yylIHb8T>HC{U2%w1wh-$8D
z`@Nx67rYU62_PiBsFpk|Q*~?2Y_UL%AYG-fZjTBL(v*$C)-)w-@KQVNe3@8zEkGH(
z%X5Ig>D;jeO!&Fb!XH65ZSbW>k!&;<{%JVjl>#%ssZ}q_zv~g3_avRs(C6o;YL{YI
zAS$qdN8n<WK{eSIxR{{TEnMk$TjGOQ=JvziqX>`!ajg(rI-^$Xk7_?H1b037VXQYY
z1ax4#iDyb>q@QG4#hM+ElN@|;H@RxtT|cdJd(JN7Ve3!3_KyfQA8yFKsa%ZA^039X
zc9GGBz(mh~T^tO+ZxkUwGNQfKz{IQ{?|zgTals<)d6qImX{9M$n@BIIo|cGwc>D61
z(M&dOt_2ojd**SK;g82vYW10oofz8n0<<`i4MseZ?Ix0<AYBmx!|kepi*DTz{c2=v
zk;0roKksx|M^wFSUz-A5jG|6nBG<~aiS){B!U|%`O_5zb3Cz8%@0t^;VG0P9yp0;q
zys&1N8mqx229T60lK7e{Xu6V%#@8Wis;~Nm?#)2E@lq+1-2+Z8nLmW13@9k}9HpH9
zBoHSTTyWsH`vR8uY@PjcfvCXF`v<BVKJoK=xGkeI<O145(pW7IKO~;!;Xs2%x3J$M
z<5gy4Ds2l+2N+QF0(M@*o6U(hrW(A1ypw3h3f@*CzT6c0R9yf0(XA&VJ-Iv)7L}gg
z*_kc*o_y_exWvz9`<?S>p2E}lv1n$w=@c%V3#ZCzwFMsSU|lGmye?v@o|q7aN_84}
zh~!C*WgJ80PECV&xT{wI)3NNhfp73|;8>XF_tLH^MSM6`RS2NM&?|eLW9$Y)opePb
z1otWX5=MJ2TfZ~N3C6)_UUQN8xWK1YKrOYgbf?XXm&^4`hn?QbF8GOdFh-*Qe|hqd
zF!TLYsm1X~aJhql;yfBIz8q3UPb~0IJAm0>+O!8MuEJFE)l#dqKNRXwYPmZj5v`+o
zP~p3WiYNU%`&LVGVf>1*Nc`L5eNT=jQZb6b<{+T{Z-WPJ!fY+dE#h|RPbJ`ZfQNGq
zp(%Qfp$In*f$_ZZIU5-53Xzy<2G--}h~@TLv96UBHz0FO65g$WSp{bRXUEw3<{HOK
zPl-b?L?~mrp|WJ#sq*&nV=-$?$Ju^0o4d8XJ2+iVMUCX?V^T<`)AGfg^SKR>ywxu6
z_`^`s|J1fD+1BE|@~^Aqh}Xj<+$*i;pZK0f#J6=>2$Zzy&BYt_1=fCae-~k#Mr0*b
zxe}Nxvt{zzr9W(F!|TAU=J(EV2B~V6qbvShbdLKl)#>T!^oy3t3?xRaS%8e)ky=3@
zD^qWCo96kwCS3i}O^@4c&sE4kNK5krUugw_p#_F?2;*nH84jn;lPHw$cT3VAG815q
zi+n)Axm};#pKu&j)miT#o+(u~G3c`CxQV0rib>|OnRKP6{xqIgIMf>u+JWB(!c;&^
z>g0q>uOR6F8|LL=vjn^TvxT?C8;ASD;T{^*IB&e2rzOqpQ86fOcEA7q^7j2jR_URC
zZ;#VuIv|SChRFimRpxCnKNVfc!6A^HbPPqkaGF_N$C)@NE6SZKMxJ==&i^^$dE~mA
zZ_#9G7L0;lu@+qS`N4h;gW$krDuDRu1}`h1v^apHlG}sc_O1PC>h^)G$9l8FcHS?}
zX%=I%pipnTqgVgNgUne7kdC-jOP9_(wB#?CEAC7BwyNMB(~w!A+6p4dB`xAh#;x2u
zN1^&z>=hAQ0xPNTAkg-fnK2v)VoGA`y!y*92^_mfoj3-@_b8R2!#mI<ReH1E`a)N%
zO@0F`Yi%ezyS*Q9qF7`;g~7WPct<(BJ156w(9ilZ4cDH*T1GG56$uLIk|7^{pftBC
zalzpLOq#DQ8o=AEP7O*kM4R#Qe)SM|ei*=1k3fc&nm1Qz)TT<cKqM;J3VuH-da=fN
zb?kO|GG8^+7bco&hh2A`{Yt>{RWL;;VM~YlD}dl-i!^TEnvR!rtoNHYA759W|Lx@#
z;o|b5eLSbvb9na6q{(t9!g-#OBL0$Al*8Z0>$d2E9=xX0257!jjsp?h6Qd_z23iBU
zp{XE9`$m@Y?GM^I#v*ehu%dV|+J)it^(A?eZg@Di^;o)5(MwZt$)<vKS^5??4I{Le
zRzTd@EHe1;I6|WJI1}KPaTIv$6kp|D`<56~pL&-eE4Si=5+_CB@ee1p--E~}jo@IY
z--{s~*<<rfZ=WB@_>qxE+0Bzh2q{$IXz3vZ*kV+l-KSF-+iT^{M@m{cJc5<Co_L!#
zU)YO`;Xb}zrUp&NIwNeaJWX1hI1QuYCeAJ~``!Ct$H0ZcFYj`t-&iLfVPIZ#fe4@4
zwqRS_Nsp$g2hpBHI=urKUXoAEF_4e=V>QjLc+VeIRh;JDUtYft*gOqPuhTZfPrZA3
zXo>IhA5&^jcj88!=RcKnU6VSSffRHDS-)|B(<D8VN+nBCXzwME$uxna^&wVqZM>EX
z^-*8!RyugtPcbD?V3$9~5<_baAVDg)HToFeAH;sfM{}KCmZ{AvE$&Z)w5y&nqe=WF
z80(MKF@Gu>RCaRWgs4a$BENv|IX71&BnTfcbIt_jZ$<je%H{{mcZ}e1UY}kx3$QJS
z8yOzUYZACJ1@x`34Ks43gf&g$H0F`Q({3~pWr`RDGYsu+;k|x2!SGKv5W)F$p?i@T
z5otItIOopi#(<+4F<+<Favr5cy?e`d4u`1|zQ1OF<NdkO66u)P>A0$q;wbknx+OWZ
zzx{Vl6fBR8=<phd+~R7KxmS8;zLgTagUV4-SwZDt`$U9RvtA|x&2F)rkm5`zDW|oE
zjeSg&j#6xq7=zE-I)Ige2_A6BG)chsg*s2F-nkK@QRXqd=os^H_pWEec)y%8l_wH(
zJjXK`SEBZ&o04M1#%Twb-InhyzgA7f(gzo9=G-}5&%^dgGVPZbL*a*zH_GV<=0#=v
zm`D$blI;Kyt<EB2?EWDv3ec$;qpB{!PD*k&ujNPX{P_Tdza&zj&y0RDz2K}Mp<kxU
zv4!Gt*mC9Ktgg=-yP?wr)Fpf1W~k(UKgqwn87L9yIBhfhxU(XXT9#z9eQ=EWV|2NH
zrt4cNhZ;e(snU4q{CEpSP60eckwvA0aFMd>b0AkCrZs-aCAobG0Rq{V|Jem7(c^ph
zIjv=)cQL`B)#jdAiLr%7x;77^PCYIws!&(*1;%iHfc3gTA$cTp{O)S;XQLKE;o43W
zJMkpicf9Ml%jzd~_yH?Ftw1mWMs2nI$V8T3df8Mm>hX7MXG~Q!n8$gOB+l;*=|@m1
zRBjy3e*-ET+Dq(Z?~+mkbT^A_pXBdLu7@1F4tfH$wA!p&m76rb?WA+^UtWu4+<Sch
zcDNb@j*y|3;ZgHAlhgX$GMiJ)zmzOb>aW09kV!)S#W^Z~7b#xuN)|5g>eKo8rB72r
z<jCK567hoO`mNS-5g|UKTSE1HFVMM;qI>pzO_)c~WrCc1%Fy30M|J+Fc4NC7>@ve+
zbuYqxTxnOaEMM9{nm_4dO{dGk?<v!Kk&IA)3^PPeOhDyA*nptsS%dXwIWf=$o_V-{
zUHvuy9Lb-sEUE^LBh-L~Z(M8)Hm!{6c6YBHR1xOO5D_c1SiLjSuCMaJC1A;ET?gfD
zf0iJBGhlVFBy&DkO4)GrFb3t>T(ni4U#G8rn29<gmbnG1pkKz!yGX-V5}^PGcmS*w
zlF&$H3fG59QCM+@`|ZJs*WNqD%OR=1B%hs1Q;G!YlQVtzn?G|lhjb<RIC~!$UcYZ7
zgw5JarO{Zzl#YQdk1ehfKDu8<jQ6z?Q<mN+Nr*L0Sb=hbKTVw^2hQ;)9fw~!&9r&%
zFuR*vGOT#C5v>D%zMh;YI$g8F_U`Z9S(tfaZd~th*4XwZ%d51VW5=08x|_B%o>hp*
z`y{cyh8Umb?s;1UKX}z?b|=8sUt0Brwc8Mub$z&kJu+MdgUiXU^JMvl%tn4evt5sn
zxkw{f&+Wjd39WXd{BVNXxx4O>aO!+mlhE|&0aP6<-4Klr>r26p%PV8t5=<X`t<T=|
zvNhCcrXuxg`<tk=QD%qeXA79C*<KTb>es=v7xryT=E>5-#aT@P@4)(kjD>=NpOaAS
zHgtsro}c#A5<y9B`4{gQy7HvoFNa-|?8=Uxh`(U<C94zPLLymv*rRGC=&_DEtV81h
zx6sbZfx8~X+t7$5)YCA7RZZu1-e_(*7{`Fx>%g8=dWiqh(<~fGzp}HL*`Mv&RW?sR
zqdvIdoL5?1kJ&#6HL4vZBIz%oP2V{<-ZA66?U_aEVgK`r?w@v_axUG)0sp;%ms{^1
ziVhYizawJWBKJYvKDd@Ww9aVytB-BeFUep2ZH;&e`zo_?#OtcGn(EZunZ8vD&|TXs
zhPw){6(Vlt%9kU_j2m2a<ugZ{W`L*O(nB2$#&PennPPfSSUOnLyjmL<#qf2_W?pQ>
zQf@n0F4~%z+{FJj|3H$0v61=wiThQ_x1-fd5arIcC0h}eeN~)Y{!b=rYwyb1`@Q<&
z%&a_pVJ`wm(<{6Ag9?^ik|8pIY=-0C=s(nxI#QdwcR+6KMHcv?Va#H2UnHk~QroPR
z@ySB>gETyhOm!eLe)2CsGze~@WG;OEXy`=Qaxq*kRptQP=@IoCl*fo;G@fC+z~LxM
zE=Vb8Iuoi)GjLWKvb)}hS_>|Z!jF(-<#4TjqhCZVAUgsiPtd%Q#i0lwy1o}uDL=dT
z*&sSp0{1bqQKc?PeBsm=BuKrPv&Ox?Mt7sD-yAO@u4l1WH@)|TO0Q&J4NsP;1P6#g
zlb1b*s0oz4*}dNO<J4`%cY)9#lF{8TYZx`;KHiE<>^{~E2fW(HbC8ynAk64WbV^w<
zqDpF`ve;i?ZlxVKR=&kDj6LVX_hz)k{FtoGv)tX_i^tE0hW382GkX*}PP`_xTtwx+
z<H8<sL}{GK@+P)O;&vgVq{7~oc0pB_|5g_aWmp4Wl2h2?Z|@-R!vw#SpYQrmpp$QQ
zqENT}FCO3h0ta`j(6cUvO5a=Qy=?vc*?tr7^%poCc=a+?hn7K$d+?Va`hz!%+y09&
zbta@a;-xxL-B47jLQP=1X8(u7hDSKq^TDXu0gPq&sD{$A|Ad7|OHnavik|K7f2a96
z1t_%fJbmE1dAtid+=vWgg;7W9(T8)recTRhEuJ42C^Ypdcs%BnXdmwN1G*c>vD3S8
zF%fBk#+ULDuP;%YRg(L3S$iZRD(A)dRGio6glNud{@fJ@pw~Euu~*iTlnm`wcCxa{
z>Q0)VA1k0?$||FOL<2rARkwzFU+o5C=HGJH$m2fUy(TXA>1879myR-#@-iV3XTaT@
zSXK4(;ln=_M6L(rw4`95^xW0MubQ7Oh^XvcbNJBDd%VQ`lr89Z<yqGE9&5r3pqCH}
z<H4VA=aDggJD!d*0}Uh%UjV<@k@79Qx>+u&kbIK;&~bP-%wu1d;!Ikf<muBQGW>9P
ziQ62Gp#t*>js?x{+Ojd+IUiC5Y;>M~dOZu>lUlZ{$i6nsd6_h6H4j_)z_=*IYV5pj
z0!t3qQB{7;r@gTJrq`0DZh1Rz-ayJ+R~z4veVkwFeP3=|mJMWJ=$6Sk1N7=dU!(Gj
z0v=%br@%KTJ#*EoEfl%vKEY9GSLtlm3bK9ro^I^K$oss6y@mBaa}DTq`*FMx<y}3-
zwG6ffCMzXwN@tNyk7jl7T*UP`uefTZ5A8h__&S_Ywbx9rUw21vtM8F}C0i|pu|&00
zwG?AkU}Jgv*pDg#wtq<T2fu>|hH)}4D7^$noC0=dZuz(b`eS)i<h*3G;90LfB_eg@
zQU*|4u9fwBA=)mRW0|b18#<A|wo7a^1MW$~-xBN_3&^;W7tTB74sVke51wPawIMfv
zKhLZy)4Lo!7R)n0;&|?zZcQi54A10`W+cIrtj_;6-(K?PO7-K{bf9BK%TGHAI*~*D
zrT7hvs*DLrhgRwE`RLL-TevCOic>+aXXjAg8n?}AtePNX29iygTh(|&+t}t{m-&G}
z6%SXA`nSIG@2f3Lb0TO`^N5CI>==lWB_F0cSe>1!SnPAw+SD6nVsFJrqOox};gXo%
zJD&>`H}y{G>O~|bXF6Zs0-8F*Zxk`$`R{T|=}7^-(*yINY))y`f})z>`JeaGvq!&r
zWWWB@W&K@|&fY~eLWI4&Xv|?^IuaB|d}27tExGDD1~FS15gq6oA;KK2Z)(Hm*-LD*
z!#wVpwe$AzL9}Bz&b5yKCR5pIBZ;=dwk!TAyN(*jvPwb$E9#^%Sor7zXmv-5(_(pn
zyJw}O(XtS<NoZosd>k|T%jO9%mRF0yaOMqbh?TiD1YgMYW1YVtm5>C*c=qYZPWhVI
zkxz?_gk96YmiJ!EbVU%3E7Przn`3pvObvsp{o%@VrRrP*w*5Dd;v!vM*C@`^meudA
zN6z?Q`jN2duo?A~kn14;%~ed15g-?RSULlkZb7^GyKnCR|LQf2o>chSBC%JdF9UlL
zuZkYB18%AwDM3KOugqd~8z`xbLwEYiC@bN7WGpz?pg003Yau>*Md48txZPID&OmY9
z2gV!y-J!V|o~c#)EoTt<w;G_<7b~T{F=9Kh_U-2GIMqDNeTzARC9ldCGUy^{)a5l!
zYLQ%btvU{4E1vhR<vBey)kUUbS*!E<X}OY(z&<@>Kf4jPv(o$Elz!c<Z;|<($=6iG
zwoYXFxPeKd`|`l~i#AR*zl<jgWu@u$<+^Ei1tC30C#c-^`Ml`@GV0RO4sonlS4@^y
z!rb!QA1`E3%3t{r`4b-U#TdV?Ral8CoxJc~kLR~IM%YU&_yqrTn`$ChW<nzmEBhEO
zQYubQFkEwAJV!H1nT^EVQgw}X>&ex7F)Bj^P7$~lZxF3U0=tXJQdiD@8^FvwTpJf@
z5mmhurD<Td`W2pJP<EN=_G;dc{@%xsoA3TbYo$ftDzOn5nRpiyXGT7UiOW4D&Kqsk
zbK8<Ss8me;-j9B5|GUyLm}6mMdTp`B1VyX0cu0!=Ta8nJtuG!0UykOxOgXRxYcS6@
zS`1We^??c5+5kUEQ1_6dgpsyJt?YOts+eLvN}G0`{h3p_eC4`%?-yZle_bWApPD_A
z!$b~e`>zxdI9H$jTWYIv&;8dUuC2jL{bWTr8or_j)}uNyzJog}gg~GO$RTOU*f?=S
z;O%ra8+-Ds-T0F;;+fqTLk^Y&^~dr`;m(_V2JSzG`hV<|C=uz0ve0q9gW#qkAOp=N
zbPo@@Jq>+lj@$K>i~K>C`!%UL^J&{B`*r?J5|X}`HKXCVj#Yl<<P0zO<ia#?0=;nO
zfY~^SkVI618`Zk^cqWGIvS)VZd8;kkIr|hS+Z@v4WItVs+SaANvx;m!D}+>KqDSyb
z5sz0i&bpUWCA7<8xcj9$EvvW+*O#PrbJ~l6Z-dZZnvo&<!x3H6BVBJ}bQ(2I0`1LC
zh0&&j=|Sopp7#$`>9hoAF$eAA!PcA}=!w&?nRxQKK_ArWlFJU^MN@`}kR|InkomHH
zBIK}74TT@)3Gtl>S__v_`o(WRRhz)4XNgm^O0I?g>V<qx2I;U`ECK62=4(qHJhkVg
zl_bK(12;KfJuTwv8$s63?dx*oT}S<qB1q2G)^1F;>s0H`pw*_!0;HR(m-#*nzLt0f
zU6@IDLGV-Rz_hYFqZq5nqNp-w%6_Tm_T74jdSncUY?sTVk$zvWO*$gp@+)idew$_7
z_gsO+O2`K2EBffmM{$G6<S?hKoQWsLhtwm5I8K3)a1ptW845x8=*rLZKdnihjLER^
zJ4d^f<8Q7D)u!anoBs^J%?e%E6Gi1mS;U%SnlD%_7QpMX<mj+8A$Yw6{hh0o=)j30
zp+;Fl%$9OlS2M8@7%2pC3C)yu^Sy<{*Z4I-g1XGTmn{L`)zbDa^$diY^E2j>F}0m^
zp|d8NAF@|t7R2s{YwVd^V7H$!0>z9i=Nit9a}iAbL^^!7Wqq`tBa=7mJ|4<OEuKI8
z;(CrKdKg85Ye>+|Gd@zlrb3eEuT9_>5%KNVR1XVpcGc}@Sju1YSahp5x}STYzZvS(
zH^e_I27oA6A_P51*?}pWZH_E*c-jU$QR3z{Go>_7G*vc5G*i%S9O~)Lx^qJIkd9|i
zWk>@HT*d{~@k)aYiPv_)<xS(Gm2t87&HdR*hADzP=B|T?BTB5#msw`kok;s6Q|LJG
zp>pudw#jO)cx>h-caGi6F0=*N1R`89Aw2y^ab5xJrR7bdErHzq8VerH4;;9f&(Lo#
zasPIKgy3Ak;qil3^fP1B&~JZ-afDNpFU|~l@;e*OX!0fWuFmV~(`oJ>aic1cwGH<z
zn5%nGX1}up;*N4L@O$&Do9q|yvpJ-#5c|TiAd7+^`$z#eD=jDkz9?~K$Ci!drf{~<
z#n<u=je{s9WVU^H*@Y26&+pJ)uN|}pH4cGuBV1g4ZYt!U>89(7Z__QcYW$EQQx|9~
zI`{B=gLFy>`-N(4-;ee{W<GxS$|@e4Zf4Awzs3N?>og|5m_Tm;3obSYGF+lY({f0&
zL1ePf%O!y+Q{gdZvhY?pO;c1N>A3W482=WxL%h*hD<++8<Z1ONv?&kN>3GI_4JMxy
zXb}*txS3aomMBy;-yoTc>eWo!KN4GzI(3AA+o_HkNiTznP#_c5-n@wiyT)-Z-c&Hd
z%E8YT;lYj)=_Rgij;C}_O7i1^84lYv2TEOLD<)zU4*|!&6!qGrIEUDSv<E|fXH`5;
z!lgAW<O70S3OQ)fKWH43o_OltmLFnLGf7f5sH`%%+0K^$>D_&_H6~c&cK*^Zt`$2S
zPI2sEAaVC+kA6O<YZ$uD-ApM7%V$9Rjsx>>m8mKuQ|0fwiFh%AKPqiWH>0DZM3%Q~
z=S%#8b~HOp?)F0Vy`#fp5^kHA^MwI0c`-_f%s4cLW1dhuQMKXM9-N&xVwL!!HV3j<
zA8^PrfLBk<#vna8=sjY1dPK%|y6ErLabB>=nU~YYI?W<8YS=+RkHjNCQIXKT&00gO
zhvAcLU->#Smal#9Nx+$nq5HEJk$@+=Q7k1uq|b|aFmhGsH|j%P_v9w$D0fb4p|sRr
zT(IDEz^`SMB;L)d_MgRdZQQ9n2^#RwgwUSe%P_@=FcE0bRO43j(yWzS=c_+|zda9|
z)Xb#qatdkJD{-h2M-Z1Y`8_SeDj%rDmmdB!gp?SxJapTLIRnN_elVfi4N_T^DQk)S
z7T?-i>capnM2I0@w->+xg5&&k)0O(UF=k6kiw0lTI|XH9?FNE5_VXv%36EuC3&)9s
zfBLwz+;0B~x2AiZ7iqnJx$+{LTz0*tG_}f>Fu;T(poa;bcX0%aw@F-Q>P7RVC|{a3
zxqAv6tqGN9(U|Vs2&1;qq=zfHbqu(KU6L8pKrV=27GwHSoNdQ<s5vEfn{}G1QiQ>g
zFRn;)0m4IBJEE4;W=A<Tz?%oy4kq-K3!G{6C-~)iYg{Kpuc*JklI7~<YB-t(A{+@T
ztfx?@q3aU*Wrx0|Oh;V(<p^KWOfks2=%zWA!AzSjarQXvxOT%IL<`U~k70{+_ru7#
zF_DDvMouTkIq<M(Q6W2n3PxY{M)1x$xx`n7+3#&;jhRiwCO2`y9}g>$1m*iwHv;c&
z3&YV=AiA1aZpD5Nlbm`V{%;-tQa!9+c6tHREPq*Zr{fCj4@BSL>qQcMkZ=ZZ7nb4x
zx$oqBIGO?sHe9S9lqBo>$g3c+_0{gE)viI@cPN{O?=vsT@&4U`)tvfj5*LH}=_Yla
zgNnMlGU>)mhNfRn{RQA|F@&jfs1CQe6sgd|GK^r-zw3<SxuNz2s_GOv7~;;LnP;DV
zht;IJLQOki_6PkXe0AYDz`Go%5g|frHaeNY_4nl*BHUiZSZ&nb6rJRIB|BX|JntCw
zbh<}d43~O7BdBI7QCSs3c8d&*y)pX;^RPCOmqhtl2vCf8B0#8Nu`iUw_U?GukQYfk
zF%PRpQ<!`m541~03d6P#l7_43^l;$2WK<Q9No=4O>C=UWHX1a(2qx28+vjg5b<k(d
z$M{)TS7$>kIdA{e^&tU}o9%Lg$;_yXYZE<oK%z7o;EKt(Z#N?6Pb%*O`L9FxphQlK
zY(nD+uhFSD^ZTvyO`$-CMCF9G#If4v>1IbXA6ImYPmv@GJkyxUM%25&FHEBVelo!7
z^dnqe(=a+5M{$6h^hD_zs%iqV1!zU?c(s~dm%o7%?Qk{bse~{3`5t`}0r;8uc|Pv=
z(D#Y8VzH;MO}@~R+q0x6-nwzz$~Ml$TKB_tpYbJ!sgB^(L%P5B&Fy)-ye<y3fTpI&
zVZrub7e8@X6OcC&&4G-nuWPqi4=EY0hGB4myFpF?-Rs|6{E83RdZ)m%#K%<N2GBT&
z7-@00dTM|gGG8}qEL~?CWzy#l{BpEt*D8WA+M|;JR2MUw3-$3lA|-WKS}H}+<@Q2k
z-z5|D&WcvgM8No#b-%GH%_~mzIT|c+o++*@{&Tnw^9z+wmoQ`u+lEDe1|ypim0ntl
z2vx~8nHX=ndCrHuZ@0lNL~KXw{Z)i6XG|1fufOqbx-}Ie`|yyv&WzgqL3@^NT_ST`
z_*(tkv2KT=Nj|f<A@XS;NvOj5kQHecMi^OXkfr^+CZ_$Me*RfAEy2G@Pzz}7MdGe7
zRc+(n65YCd<gKJC!E5SiZcKs}I!Mynl|#lKXQ=H|_vg1BO}>;dip9doL2#BUzDKk)
z^U-o6_P-fx{m5xQ)jYRshQRxC{*MuZ62HXDZDC)5fIP@DM3isz{_ds`DB$5*z7m~c
zTsA3Pk`){GHSpkfq{`IhXE6aiJokNeePVRKt9to}f@Nc2;8^rdi_K3>4Ox%_lhh6_
z?pySnz*kLv$l~3cR{4*@5;&1+_0d}gtip2aPg&*1DYuYv@PoC(`Irjg&A$;CSN|1Z
zjmUU7oeN<?b~Q|_0#5^ma_^1B-ShfJ``~Kzl@%wBrUg-2&vdl^Jb2YMBWb#M(sN`>
zx$?|nNTpv5Q<4DYAx&llYFh(Sk`snd29|c5D7cJ``rLLbnYr%=dJXDKzc3Z$)2I^a
z+m^uc&MjEIp}9coz{TC`u)_G}8Qies-SPg2<n*WjhE;JPrfZa4-~MG6&18EF*AllP
zZP&1(#261>?Yq|O<;t3&lZB$aB$zSsus%?;gZv@r0kd)0<=W&fng8ZKzc_V-1I#iQ
z$I;!A*3s=Yh4y8t?P)By*q%nR;4|4|vbz~2gQp(SM}I*Oc~{=@goPf}A^vdN@aake
zqW17!b<&0Jje&LacO6YVH9fH?{?_D#Z!l@*kKhuon^4j=F&TASZOGakqkO)1`igi{
ztGnfSNRnVqcG=)qAIfgn^JO;hzQAyfDzJ1SB%(3|qZ+_J<g8n3y9&o6t6hthbGTSi
zi!V=pYVH^^h+9=tS=6^fDYRag;kr3u4C*6-bKkfwCAfhitK^?d<_TnRyVV?WBtOTj
zmg0bUc#*o}+SbRE1i~yxJ6;HNVf<pFCVua?f^i!j6S(Nf0t{;)1-_iyo#ivWf<LBQ
z$2Eq8+KHeDRNcpQme+XNt|>})zV7ciJ^%4*(Dp8>WO~RVH=28l1!YZR7-LG?UL%+d
z-np@LR1vnsxvzg*o79TxCc^Ye!l**8mkYb8J9YK%r2xIKuz<1T|0oXgeYo91EeYuj
zkXoZz)Z<YjhMJ?Y<yrT=&eZojqoCo@%nsYBFuqNWx|rze*>&6KflbCs>6??p>M~QM
zNbiv4QOAO^*w^=$P0^1*fR6JnT_om}(tq1~?sFT_$%!$cDop)U{CYC^d4_azMVLn#
zrWo7%!w^5In<1a`=~8UR<sq}v`)w}$JvM^N&+xEK>KEZ|8rsHNekx^v%FEx!Wo_{u
z%0u<O;LB;3XH0}|fehA$#v{4)eiAB11ZiHse7LNkxG#{@&5hA7n}RULs7bU1oZS+`
zJ<eMF8B%$hgqjCT_P__r-~>)C=K%H0N4#@dvv6|-UT>qkXLs{ADj=m8g6NTj7$9ac
zqwWJ;018YsCX|IKAK7^I)Kz09(y{vcv)4)<I^#47Hcp$K{*UO6w?UIW9v#Zss&@=E
zc#@73j~&3%=+DZWRy)L9Lt~w2kJgCaE#!J3f-OyGR|BPsvvYnl#SD0)%zp#c{uFXo
znD3B*SUaML>di@VMh%fRr`ahytXGFB{zw);fJ$bIM@qewxSqd-z`P|j@`RVI`39@J
z>-L;*%Vmqhw`33A0E1F)pdk~<oG7Oz;?^~X=39RyAX*8|C0;+z=wz0TcKlmENx&sz
zGpkvt&319+8MC71C)q<^Z<b24$;Bnw0obQH3U5Em;QoA##13v0;r{BtLyK<zqSqT4
zWpj<#sJ|P;Yv<}wGALilem7;6PKM!3M`652>vhT1aoAt991(r~GZ;jy%!J7Lnb9_<
zA3VH(w-_cT;>`VXys;r=C-^q#6O}hjzW#2qGN<cvt1PW>2J2}g+lK1}aNZRwIvI=#
z6HIetp2>|9?Q?&+sSmlm7Hzft)RW7(*RNe+fyxpEwt+5xkEXn*+))wHDG6+%Wo?h2
zS~3{SK8{po3fLz*bCbVN?jZM!?l>~9Cf^xGx{h&;yBKh6c&9iH;^~!*J6wu&jeq8_
z<@UaHXzBa6h1l5X72vGmLJl01%}8t`e0Pc5CwjU=*$iGkkyqCqg8U;O(ZLyR`*Cgh
z6tbxP{Szb|HD;g_O^)R~hT^K5sCY)uCmEZ7dqSx9Cq;M9Qa?<G_@iTfxgBv^<ShXC
zCVBo1>~{W$L+b_0aRQrPyOMDRzSj#f9jOd~Cp}1F+<g$nv_ufDyA8mSSb^s@Ku9B~
zDY5d0-3-BEPIk9FjmF@l2lFp+-xo-XNSHWIFrT|(G5YQEWxAEN27TiH`b8OK==v<c
zjFnw9ly(qHa+j56kSylVgzz7qX(jbNA1o@A4q=pLf_*{af;vJYTqOdp=AU1m(~z8A
zmpa&3#3%_I4vOs4LR9rtO^MS5Q{6A1wQZw_H<CN-*S|VFy4%dw1nyzr8R^t{5tl(v
zj5S@LP{nW=8b959%#b>1Uxuq=s=!VCer&%2hkf9Buf0#v>NeUc`g3h-{#=$pHly=(
z37Lkza$=Y1O9PP@RHhY>n=e_df<i%0*Dl7iOpRW<BkC8M*?_JYj8QBml@LIfG<818
z`zA2-rZ38D`s%$oKI8Gew79dkV-zhtf}iSHSjXKuB=k8Qk!`eDP=3)b{8_-^H7k#o
zZmBSfxU~^^d!Wgbko2g4=v_jVM|-o>hYRs&(9y3LUt7e}wVHiUE+GQ9GxQ)lb4kFK
zb;Dt&xG-^MwbIUV%6rlQBaJ-Z_2}=WhxcPgH2BkB`rcodU1p||<m^HK8B7T1zH&9l
z$0tk7Z1*kkI$bKr*G-hR#7tr<E9JwC``iYAyWFK6$?AB#A=Pi2TNDwy`GeV)g=P_F
z9tr&#A`yj_hwWYvd3J4vAuELpS+nkbIBZKPZRzUj$k`^7EP?^MdNT`g?o!<OPK~q4
z=8A1Z&xO+SL`}uX(}j90M%-y$G!O4S(qb@1eMlXG_sawLPNwB?;X?L(YarpNLqp#V
zLs3yt4gn;3hQtegVy4_(4jNX@FJB=D`}zAI2OTxE@xsDAH`v*MGDlWLwx);jLuL1G
zULT{CE!W`i5`h{YTn~6nQrw+xOxmoqh36Yrp`$EV?FSQ{Ki}ib`ofsUIrx|4fcpk3
z%TL`%qAP}`m-;rF-9pR_xbkSBy_9#hJ7iQk>rS)NgT>gPw(5hIMx!m@jJN2!GrCK*
zzy?jhG&gc>Y0&H<iRPgND2Ed9!4Ham_T}OWKb+qmfP%6_U3Fc8rGFm&_pSE+R*Hd4
zmK=--5?w*rjQl=vg8?1XzwZCD3ou1;v-`6mN1e1K^z9*l_Tad2u(8mJQ>@PCHE^^j
z%v}Rrj5;%#S*ZKpkWukz=KrDTEW_I9+OCZiXmKdT-3k<UhvH6)7k77eD{jRlxVyW%
zySuvwf(83>f6wv$Oa3LZXZFmx*16Wugen^l(h|rmWWQ}Kg5qQzpT<z~FR`rzscyEk
zLQk)3Fi~=eVPjG9Ob218TpCAffA_pZ+x)ulMO;`^yp6g%VSltJHJr+Q{)y?w-NTE4
zRuh26l;1y;V9P(IO9;-HFBClh0)cG|f*v79-Ca{-e!B0yw##2*=57zJ>WS?xY{8Lk
z-{v2Qwa$ykeFFXM{)_0N$T%c^flFogQ~51wW_4a^6w7kNcYT#tNDwj8P*wRg7rcT(
zQ2|b6`7}WJ2a-pJ2o~)^yqjF&9h4Nst5XjIWa=6;KNDVg+}T0=0m_LBEGDtu0ZBg*
z-;d!AA!GFwo^0;Ppf_*ukqBtZZ(EN3-JZVl;rx8DcF5x)m7g<0+4^LDG}QuY?lXw{
z{+xAdf3+I+_s%kPr3myv7=ev<3&1hqDSvxum~y|R4`HaOy2SnFlj^{6?|_=fMN?0q
z;m}#0<%f`eG{Bbg-r@ZYLUe1;5=x?GMIdIHbjW1NoAeHef)>iX>S&F_Ib<=yj_;^t
zi@zHjW5Q^SSu)I3OrKUySLM2?i&b&P5^W^h-igv@H*euWjDX4oSVVtzr*TSVC$rG%
zHC}@u`hku)ekDGAIL`|-LCqmbIHr@GF%qfQHcLyz^wO}7lsfrLwH9L&hrea>xTZfy
zbFC{LPn$e148Rp%8|vxK5WoigbdW8{;{|RH`$lYhS2@6F0;^GRfRS6^Wh>2QVlmS0
zHIK!oWtG}6fB))8>pIemxJ`#~A&sz`-_K=W>@N6jwN-170;y@HBN)MPcWH!>6&Ko@
z)5JauGo2J&SpRMG53i#F1CN!-Y8@nrGtFaWcSiNbE}z#FA%^2BJNe#g0Bh#nAI<J`
zqAqtTy&Q8&z=EVO-eNQ+3_kDH6ZiAF!M7^Zq#=_t6gFN-mBtFg$^at#dk@U0h8zTv
z*viMWdxUH~*=h#ClM|%<Go$lbhY8b-8#DrCp@nII-MfaDK<o#Hw?m)P)ekqK&ElqV
z*}uZs>Tt*Q4#CkhdyD(^dx(Fr=H?A+z?ZI|mLR$;Cg1@kKw<J}(zlq?Is+`H$zS^^
zmrduu8!&noIqmbc$%V)+h|5#sx+3F6^o}cza56Kgnb9iH_JK{cj{47M_DjT9;5|_c
z3;#{+Zj591UxY>7Nv|omcju9A?lfyvm8F@;YFqma-xw13j?1E7oB{fPeJX6w8Uo+j
z7h0Ovn|{AopX)DHjHIaG>@ks<s?>!9%S8kKpvA251O&I@>GH)Fy9pxM`5|nj;s*wX
zy;3avC&S?%%P&t`4&7Dx=OW$ek4q{uZ77-LC58ECisQD0MO9hJwtN!f%gIVFY7CY6
zjV$nbKtx*a{uE-^$5l@yC3OMUlh9Lm><sV@yr2Zim0YtPN9rGCCtmjp<jlzZ>0KN;
z#<>NDTvhjh2$npZ&J!}hEZFmNr|_ecLxnDz^&B#9zSH#oKEwg!rMPwm<#G0V;BB;B
zv`$EEPYPbz{H~?x_%bs(_U}|V8VvSDcs0F~!QWhSztLS<-ps(@3pUUs5%S1mAD7Ju
z_)PELnf2z~OP)NO!j{OFn`T{Ze9iQ|rh0M9(&C>>(S=Jdc5}zmK@W@W3BPk`_*H0r
z!^948rU7;~#kb!{PPnvL_%BlY3fCO=M~{~Wh%4eOykBp7%51}a<b3-R9DIP}^tGrL
z-WVBYgk#x-$62%MQ7Kram`5(@*s4X^Myokz_l;Mhd?6$BLdCuDrl}mPRjq7RcG3#b
z)qByQrBXH`*p=3*iYtEC)#WD<*aD}y)rz%?sTEGW+w|xJLV6LYY5WF^XKvM02)Twn
z);oRR854odR3GzmUc&K=b9l|_;jC@B>dLKZH_6IuIw8CJUYcRc!P2T!gwP-j8v&|)
z4}jx)ph~Q?W>)@(Khr#dVVEu6*{;XA!(Y$5<-OVfTW7mm;t2}MaNYime5-<@VxN~K
zX_7EB1zMl0ADa-Ac1a_%{YVR;=0^JOMhZhPn$)g9RyOz@+W?KuO!gJi%=EtmWc{^b
ze(Y7lTDgbKsW@N*>R-wcWYoEimXGIpSv|)>t%xdsI8yLDOOvcD{Y`+1m@}7>!@co2
zVHqu|Cpi>0z<AET&p0TOF(`|XNx9g=W=qrZ^ftDUPjOC`{&Cf&o>llza{9L<vYwX(
z3H}_D^Gs;faQT|v`vKzlASL*XWBBB9G1UA%AfYW?$InllMF6xs3EG^7cU6WKGIksk
zr<pmK|BQ)9c2tJ-yD7A)vbcvux@5B$B=7amdnoG`ojXng%F`?7f18teRPD?S&V-c^
z{20&{rrsyZK@9FbSSa_qfo@%2BwUuq@u?A6Km9_)R4a`0I5tXRtotg$g||%B?}<)Q
zoO#iQhI-o{(D@P^v)N@bC3cQ`ZB0JiF?=_jt*sdAYh_tShV>!+Gv|HdbJ-?uY}xWu
ze#h+c??}9D={YH2EA}2CiEduzPdImkXq1l2j6Z7odiL5857~KsWeGKTar)Ggk3Q55
za~hhO^4&OdgizB@;ftnWae2Lzfk@2FXQ-?vr)_eKS)L{|Z$_%lc@tpIT|N7kl#7C-
z_1IZGKhMI&!j~nN)~&*zvC6Kt?)yItd8ywjxTVS;h+?I@G-|H=<uf1@1L-I)&%$Sn
za+cA<yad`2mrZ{)DKz?)8F3*!vLJz>W6o=jw->+mc;CBYj)q;OC!ByOy2BE?6jyd9
zLy|U23DW6|V<^8c3|}&?b<ZEV=O{pv^|D@nvkC_TEZNu>79qv9FAUW8x-$GM?!e>X
z7zN`u(D{MY<CKxXK$=S`f3=pTSY(Qg8|$bPo}Ka|iQkr(P@9{z#1M@L4~4zMBY{f2
zg`n!Sh5;@6=y?7Sme6rU27D>{F}pvbsHdpE;wOGBtaqgANw+4R_#@N^eS6QL16i;}
zw~EK#eTaoHO(#HsKl<=Z+}08}y9BnZmrlReWRu>-?9gyQ*V2LS<Y>owNnpH=snxiQ
zakwuOsATDQ)z>)@dRrs*e1F??=5KN2Gtqa$b|U?WPBSoapseS!bZ8ymMifRn48wDN
zc98UvV-{~VqB&gYY@IFH{WQeR>)!BF0M0#`F>_@;2HLoLD!IkO@oGN2jhJuGrO7dw
zcXC^kjWQTz5Aj}X=_j{j)gEAtY_msz<>j8!aK>cz?1rFHX!%fMHH*VbY(<4efuW%B
z{9!ijJFTO^7~Y)wueV04zcNSw2nmZEF4brMGpEX#V6n<CpsblOuPvh9fX;)$^mhX1
zW}oq?H6-3o17>wqf#WO#Mb(w@0^sWRMzNA&Xkc&Y?m8`#c)Bas>QnsoOZ!rU-d#Rr
zPUWGw)8Bl3-T4|_z0E@=MeA|Th?vgFNqO=Upt>P-2evV(;u%>(k6FwX81^_xnc;Bw
zXXZK}_E=9sVBNW7%}{t-zewlDo4B?kmLnqN!iwM+{9LC;b6b8MVC+PwEiADbY8%UG
z&BD00v0bg+bwB4+F}tm@$wEDC-cPc}c1Rex45N1wu+p)zzF+=y^NA{%XehjJ_|0K3
z4-^-IHQCIwZ0`9ZmPUa~$QP89DR-ItS1#+3gj<1)iA14e99~A`F~*rX@-~ylcFQ8)
zF4jApmA7V?|A>4UMDDK=mCj)I>vHzQuh80I)#nst0aqui9y*MDK2~^km6tci6@5qV
zk9HtVBsT(OZg#?AF14GlcSdDU7S4O^FDbSjMBQiHOGQ0_zCRY6Z52h*VFBG$R%iCv
zRgVW&C@*Y`LJN19#p-i3Zpa0(dK{#dQ}x6N*g@1S#wrI)lKcaq+S=pPPeX<<l?iN_
zKLU({M-2Rt`*rm?gJZtQER+dKs1(Ax>LYuhj*%qDjC8hy+Gul*NevI$WVpW$V+Y|c
zy#Cz(CRrgkfG5k8_Es!IpK!W0%XQ3mCmp1oMJ&>>Hc@ER?5cU6LpnT>P5%MpaxvU{
zJTwp)qS*XYip%k`^vkF5C2N-fJ}S3JN1}-DpavMs9okD4?M%^w25T#%b50wjWg4wg
zP6kWvETqwvt!B$dNM<(?xN1pq#N_Lp=n_x7-y`Oaqz1@DbP%Q}I4|fo4ne}2ZnSLl
znC!hF)G22RAn{Z8(>=tw#umu2;-kBm)y;feiKZ@7{+xf=*J~V&bTs7hk7j^aV-&?)
z-{tGyANU@oLfXPntN0M$=pL3;OOod>^b^HF2*Z#V@+@eU#AXrCVZdonLxb$mkf84K
zzkPS>*STmK_?;t8&ez?o)-yVjj_D0h{H(e!*qe33^LEr@l8CpdNi7TM%bHi+C_9yJ
z@?N{2hPdLpzC=;*()T?F*+F|)QoGyYjkftYngJe$n(-V3#G8Kqaur=Ry-gX}Mn~{7
zU-67Tbgt0gIapThY7r3KHf4dQ*J?vJ*-{`W7V$}{9!pq`y9JJ^$wnCe=`#_vpZ1oP
z6QvRmc>Fo3dZJp@@)4POPP^#gTE-9j9u+1uN<q8?-74}*+Q!Sns-C?HFP7}%?_7&E
zPt1B!<6DszJJ|+Y@w`XL4j))XCvdV=0FrYS3o3_b$h9i3M^^b(XL=h4SwE%ff_BGr
zEcm{79YzRzb7-35)SZSOAU8Ug#_-T!#!NAmu~ZXMXlzgUJ^4QAfHu$GO~gUqXtd!v
z$_dFgSpOaDHiJ_mtj@i|5#=*a4WW>5DP|!+G9NO5&5u7~(2}7Pr$8huHsycVq%W=a
zyqTzEKK*aE{HtJ>ub0B|c@Ji8;yz-Bcv4%@RI}9gx$?KigmP3O^4rU|z4^}#3cfso
z6@p%x9<JnO{{qnSP%y)jv_}HMSK&2d&m{32FHVNb*j_za4dtQ^2jSal?hW^YbJl#5
zj5e~(y8;Xve05O0ozpZQ3-Y6GbXhLvS8eC1W2?FhDQ#Bl{Ua2!q&>d#Qy**IZB1qJ
zHIOOnj|}ypAt?~8Mw<>l6N+<Aap<t<@poics=Wty6d1M)t~jolK^A!lvdA_w<2}4E
zTH{X!Jty5&PdLOW(;EV`N)At>yjGlqjwcowd3Jt|@$%^+zCp}%nBPj$9Jva9HbFB`
z3xB`35|*~~O}p!wRt`dYJ{dp)GM7qZv2AfxggyZo>pxeWg@^L@qdK{@2R;E;_-*-{
zY<*i}k3&2L&(|BoGi;6ehU5`^98T#dHn3Iz%kCqb&hjLl1dpOAX)a8>Me=p)6pOyc
zuX~P=s2TR9VAgN)P9rTAc>vE}&-;_a#(}ij7vT0%NkjGdf&Ewb{^Gt;T%2a6oU+C{
z3fDm?W5lh%s=<R7EEf5;X!PR((I2n_Km8TqH!5T1kMx?pmspWIi)*`N%D-T)e_`me
zNJgsq#WR_ag<;3yz!lB`cIj4N?cA;ge@)6`F>*$0JB;vclJ&XJL71A$_|P`KXK8&O
ztIWTwJkvm<1Z*>rT9Vd*WoVr<5pk_WU7_`ASY9rLqrZUkLu+RwgoaoqbO1$Y>L&SE
z`}lh`)XvNb8pX5zRb;N#p1@=C6{y@4md+D)(!YP@JwD-!@v73iV3Y8e4Dz{4LEEgJ
z`dO;==?_?ppdtAl?jC|(gX;GhzPexqgcKO2c7a^SKc}VS1wfDz*w3v+pP;AAbUpgk
z`~6M*Uu89!GK$E8&B6bOL7QHp>yzT~+fa^iw@{uv*l!A{th4)l1$}L6=dA%v+d_Kp
zyNfJtD^J-T>lX&kA~|*+CR74)%9TQ+j`#?Mi|Q7uPzMXq{KKMIxR^elR>mzEhj-O>
zup9;ws-Ov;><JKjYM^sNH%`w6nVff)N70OD#Me*HQeqOWtrH8+jJwOu@R5k8oUS_9
zz2-UGF(LCB*)FF*J6DCllFb{mvaNTK2^|(p!oYOJ7W!ZF1?#(ydUFWz&~VT+aLFkP
zN|nhOW8LUR*adj0o+ib0Y{yd>lA>!)W~hbPg3N_vjO5l1im4#`-ca~8XbVR_3BCw|
z{NysDF7@(x*vZ@S6wVpx#n9AN5(gvQAN?tC)YIQ*f*96j5Le-T%%FMQ;g6&4r@GqO
zM19`2Z9D1bt_j|_JR20cvA+uv9Pn?NUmQ=>J=&2TriBtcq9X36ER?677X2dn!a!qm
z!+7>pHqk)TD>uDSeag*OJyVd5iW8>e*m1HhwB#1Js`3naQ)zTCn3Hnax{+nGMdBqQ
z({xyx{pTrzvD0z%J85~MTdecdG4g)gbvs}#aVT3lsPPvXjG+6q3A<_PodU&EVJs67
zDG!;4KBu|lxm!Bh2z8YL?`ZPd>*&2o?)$6dx2?GS^4Jog(Rc6Ud%=D;cHeFfO%Kku
zG@GH((;Cbnj8oDg`HChy08a>@vyvbq8^8!gFHcEw7=YI*F7I-@UZ@`De^!?1Xw25?
zCERfN*?KVx^4DwdP4)_3ofJ&-S>9kym&xWM;P_}UTYg~o_4cz|Eepnmw{vKAxC<x&
zdsgI!-WQ$Qq7vz3R<6&TI_<R`@Bza9Hd}ECDRKbx6wTG6>=n1P7>azS+8y!PAyFJ?
zXT0A;p_RAkZe<;&Ou>G?2I;II(b=K&?TQ3b{ZAvQh)<d~hg9qe`cTBhej<EpW$&+P
zXSa=#YIA#;v+B{@)c=TLgotmg*K#N8U5g!|zX}fs%NUrA3xGenBfW|t>&|g76I(kA
z&7ReSFHJit^2*S{Z@UnX=L$02f$|b&cJTBEJf@qsIK#6%JuSr8J<i7OH?uQ|$(RTi
z7Juof|F~GBieh)tTMQOn3#m$pb{Tbx>98u58Vn=kAhp=CBIP(|WBB7HF#_rK8L%<e
z7O~g8`_ib#hJ#)kLVk1f!iRrGZgpwbX|1%7WZEpv^1Rz8*4GBl_-E~>67j`w=a<ud
z(HZ7J9EItY<CJTxV>T~ZQn}iZ`qS|SQGgjq@;IS2;6ZoorgZpdFvdmyT%3f(YxX{G
z-^7~9$a7X2j)2L@eh!Eko1jBI;@-|(JXfwINmJX{nPC)4-Qf&Iuf#67TE$Ak$IeZp
zf*EyF%Qm6I&UZcMFVp3VWlS!*_Q?H~Up@Wg2H78@y2V>@b}a9i^(#XUdEXDQ%WI~V
zDx=-5k1Wnadi+{#S}5c+Y~W)|(?jxq!TgMJsyVQRSqcXXh^F(z#y#46nw^)%H71qK
zUEEpmTXTyLEE8J9pg)9qmm~Stt~2?x>3shzIWLNw<(!nD@llAO*8tRrVIe5nSD&zT
zFm@!VW>hCAl@;)EllgAERp08?Ewv6S`N~7Zo|_Y|Kkz)YtZ*u?9H^y3Z4egaJX(s|
z*#1rVJ_g$&HWrP4bb;Z-+5YKHJc9HeE1YXam((Dge1GK))3p0oUSa$GuW7tZU(4}E
z5<glHDXD!VmLL+^ia%UFO7>HZFv0Ugn_%@CW%-35uEJRN<I10Dh#5W4^wAaI9-NMJ
z<j<q|b6?V27n)LzpaF$VQC^<_pQ1^VzwPFTN`i^^=J+szd(*iy>T}e_Ye629@?Mx*
zm5~Gp(t7wOB<-H*0)J_VK&~M_$-xLL>3q4Vi77Hckyxj6L*l1p=BHq6@RTKbB#=_B
z`|(Zn)s{EY>plffB%SA}<CnTumMAZ_$skYW30GY$z2%vOdTVpXk;cpp&KhtZ+j&RK
z<Hvo*MYpR=E(a4AjJX2g0Q{2&8B0;DbA#V~b2R7x%~V75?hFWt-|u}1BEfv!?;rx@
zd%>b{0Xuj;l56*?9n9GnYt<C)19GQZI6-701OBNXhY3%q=J1GZ0mA`v@XWgJ>4{*1
z&Lh34{^_@B93rk4mB-0+fxxH1U};7nxRVCUs21?!l}vji`3L{>ffvL$EIE8ln(rd^
zbY*)x3phcrF`-9Hi;uuS5jyWlu+WQM%KYU>HihTH=g5~DtD;-KH-$nc#pR#yjMS8~
zc2d><CDZAf3u&shJ&Oa9)TSS+<_?TuOw+ujlQ5fykb0?6)9~xLH@?nHT*O$PdUj__
zoBk#=crIfXqc}LAB#<!RU+{=MsR`_x?(qD*wceO?$0>eWwz&&at;c*gQ8taxnnl#C
zA^g=WSTn|Jw=6%=P~h(gm1#3eU2E*4c8sb!eLDpE`RZEoIpK)FE`7)RPkf&8Pa*`x
zx_{k?dYyUdQ-q^F|5M<;LltzA?LG|f3I)_QZQ|!s-&-NqQ5#V!!Nj-5du<Pw&Nnm1
z;xu@VbY=Cw6?Jv#@2?YO676df{!zqBR4_P*HFZ%<>)0**@HSik=X(xHqQA}MUiWW7
z&XAtG${Q>yWm4}NvyP`1(veym>;HyT{JxMg?deW-m;n_8E!i;da?g6~JeVBe;8D)w
z)6Ub(;&#P*wv>7Av)Uz2RBX8LKlyum_;jwaG1H;BEj7s86-73{1C8rZ{*Bi!{fsG5
zS!dLBT=ai&QN7e=aM@T(mcpTp#>CwJhttWNogL=`F%}9`5o9YZu17V2D3^(lIajD`
z1pLQdlxk|l$sw8|WAXmsmaFp2rDG#1vVgD!2@slqDOH$j&5{k=`6N?3RmU)Fy*J@t
zzhO&D0Q_e#fdq%#MdsEHc>tt|)7W@@UOeVhZ0>R&8g})y<<w<%u78|MiD2m%h*QX{
zOTf;0@KG>}LLmY)m%Ln&sI8tFpxmc?@hUJ}u=a?wj16&$Q;FLC$CCQOe#u%##`xQk
zet_r8#%pwCS49{jv4)7t;c&^-ghQMAt^J(B*e)~S_65cV^bwx~^4w;{y0Cb~U8!`a
z?m~l+kmXz}^QE2CmU#bkpBE>Rg_`8h)sMnj#_}oZBaL62hYB{-{Mr*tKcFoRru`Us
z(P<}yKNox71clTpkh6;p=Ms|!YCv%Cj5Xjth9|dM*fbnJU8$xN9`|qrhgO}=feX`X
zb&{)mfDqG4>*}%RcF{dhK<{6r`;FNkWbki3#qq}G`9*55WVM;G^p!G=v>^jHXx#R=
z*^F-Xi8kS&EuR2eTGyr$pAS=GJNI#!%y4KCG_~HrZnG`l2}8f4(Q3<iAfddt{6MWR
zt3<$MRLH7(Olse@*_1g9GR?2N{=s`0&h0g72VZI$xZfz8ay|PHw0h@nIy)P|yoc;~
z5-gRz(zArb0!MaN=Xpl;Ey`x7p!w~;Ta2JBxeF7nC-B^4O7}G^h7INg?O^JCa*7RE
znk2nD+*JMw57StRfk2rhPh@z5a+RNQZ6$n+fXd-W$U=LgeP*q_=HX_kViZ~iL|w0=
zGq~Ere~E2fq3stdT;|c(W-*+#(4hbIIS#&$7N2iCznb%oX<uxt*QgkaWt<1FZ*oj{
z{1xpp{IiR*F(U8~;XV2)!y?c<U?kwiGfkhrP50GfHvAY)gy|h~5n4BV&8t@SQ@|r8
z!WM<Dvy*f&OM%77BN8n^+mFoq^u<`^%d0|Kfp^~3I~$%$wo&3fw~y!$?mMoANC;Rz
zd4-m#Ozs`Z=|vc*0w`bX5Rj{{@{yuhAOd9N&%4<0%iAVR)8g}=EHPMG6*{SA4Nbcg
zjv>u<Ck~|ZeeA`m-V6>!UM*m3o?M?n1z3NlZ_boD3HN}6@sTWUf0F0!OEh&x9pW5?
z4Kw*E_;y6aMer0GA{RfKwObE2ok*^my=>$SPSX;c{&<yBogC{{;w&~4>yPD;xclXl
z=VDgDUmRYa^rQQVNu~2JvxRtWe5E~Uujj11ypjozH&=Hiw4|j8mZ<Vlj)3Xw+<Q8)
zO5U1j?QixnRmc@v1S(a57$-v8TNeq<7j8}?E0|+Q3t7$`-#_j4za1(+7cpMR(9gfh
z4vp&^+MHf>m<k8Yb97Yrg2iW^k_XmCXRB*Bm_eAK>p0L{+dxpEVHI#-Uc<S(9P^`Y
zON05=C<_83|KSrV{vgldGBj1Qw%6a9&!C)=N}m2pK`Gzkl)G8)!2`7VUH<mqJoUQg
zu!r!~!=<fk+IVgDzw)HG2rM_R-RpDXMori|Z<n1E+>!F7B&EgAPV*QE<ctb!-QN0N
zHlKIM_xdGRTW^}2g**%y1U_~uN(_cXCpu18by>oz5i(1ly)JIPmjAwOTW%OOpjhY_
zz%WQO3`p@x-upKvxRH1obTW~47ZX}oAkadHNQ$G(vSBM5Iez7IKDE-7K|b|bRIbaX
zsFpGha@|j|@XgHeCeL<D_Hf&^U2`d5muDNFFhI+!D#)Y2X3vT3<CB2ib@}ad?lc!m
zeZEE(YpYs}Cs^xl#-0=vcjr5!*x5#7ZN*!>Bb@cSHsXxEi}nr^!I0DD&DsnWcz=rn
zEyQAh+_ly)N+I~;aHG|dZp|m-;etm28~eEQ=^$;0`K@3gPMO7$>ousX+3#OtyCEfv
z97IRVckQc_JJic7pqo)osE%iko(fq|dIl-ms4ur7RN_604r7ms8I`Kg;*(TN#=NeI
z9jBIT;wRl*N@R3il<b;V^W&ZrjT>7Qgw7*xdKc7Pr%|<^Cewq(GbmHdk#!NvY4*wq
z2YEj;&EgsyQ>mR4ou{L)6d?heW5?k<4_XA5#u>g*uS^SeMoo=4(HbyI<J10%tZfc9
zh?hs<|45K$S+8BP_06L5<;tAuzDtGVQIbGvoN{lq*U<J65=r2@wnQnotv{M#2n%IS
zbzc&8HN-#AdA#|(Pz!ex<3qDh4kx_Q%2SxoeJgi{`*gsc0^}t9KR>9dM~{J+mA{cV
zCErc^L`=OuSn__nvqe8t?@zXUHQxN@3<glrt7kkb5~yCkOSlftTpPnL$PRBxP!Sam
zL=C{=)cbh1RD`+8MubF%YVJBmGrcZ*vFF;UpDpKSE?WE$GWb6hK%m7=@@%t{A(~`s
zRYlR$>n-a-rdYEEi0<>gGgE``seV`Sb+raRQb#KOy(j^i6+b|v|4Yen#6vV^W)KOe
z{P`qYA|+z?p=bh-=)-4mayQDmr`>r&<7`0nK)O^+e(y}KRL#k<&HUuME$V*UKQWzh
z?=$zATifr15;#s*4x0i&2IuU$J$PKMX69y%`1SM&iMSCJGX$?mC#`3jNwh@JgzPPF
zJrrCAbWug5U6KET{jg#BLm!LwOrmMf@Q3xr+0^5%ng5%G7=N_>JdheqIPo))-})}-
zex!OdHf)Au@c=z;+_J)X|Dv*(7g=4^Y{$n)i;p4<+9{O*Uv6`<_XM?<rmZRcsUpUu
z<2$!agxEXS3Lm!ss-^xwJlw3miDGc^z(bbsUDL|*aAhRgYVrDt=xy2?i1~hHF$o#U
z9Qm;Pi^%;g#gv@PoyKXYIsSQ6ppqMV$8<2l38IiIymM5b$c+YKd)k%~N?X>g^+*s8
zzCT={Rpktk#B0D|Xf&PUc0L{H{>Q#;v+N?QIN!c*3RRGAoiPv~4Zn>9GN_$ozpU(7
zX9NypK*7{DA<Fd>rizK^pY#wnm>kD_??l0Awwq@yL?e5Dj?P>Xk@@cy+x|PN@z$O|
zOYWr%p>jfP)O>Hk@2Z9QgMi1SqhNhK0P6_QU~N@?lQkm$XK>4Mf-~&*|JA14FR817
zZ*5xgx~PaU@09`U0Ao7q8E%5Z!_`N#5ANbr%=_W(HN(y9cI4ZMg5GNE=IS?Bs=}AQ
z3_?A|x7t;%4$o&g6tDI;i;{JZ(lNX>&WJD;KlMQ?UD6a&0_{!3?LvH+5Vj>3LNTXf
z!fYNb@bbQ&`28-gAD;Vb{(v&BHN#;s&+r1nMLwmmH?h4p)eUmd8_8rjZeSOQ06Y>|
zMQ>Mp6fQMFp4jm2B=&-gK#stkFYTIsxq*yXE0Xk&pO(z+pGM<-^$MtozE}$|VORgX
ztXZ>Cj}St9{S|i%LC;PA*~I&?qLbqt;t1xWQzJrJmUNn`K8~XbYg*nRzAw?Gcm!^o
z6^Tp3{I^s_yGcFq{=$$<Y=v|58Re@goltipHj7Z$N|_6B=gsdEH+9S+!&Yrofq(Dr
zXRhF@e<snU4%N9DO{w74khT%Tmnh1qjA`$y=W3Hf)18c`tqLm=!@!LOeK?NOh3xeE
zv}6n}J~i~okiW;oX*NwCYuzEl6Bm_ThkmAO&f97X_fOp2&kYxErJ{2?U?KI(eAwZs
z8O1K<v^M97yMyU*J#=C(BLI!wYQcaWob5+%JBVo+xp2Me%r5jQ7<tJAx_mMa-DaXN
zo(~apwiFi;1Ma><CFu+7gY%hN`3Z?9BIXt7d^j}FL}$HcJHNOj)j3HZCsrl|*Z)Nt
zOqDqz@{hMENDFto+v+dz=mst{*n2MhJJ7I0W_nZCxx54VujR-lUoQRmeUB92f}l8u
z<9ko+vmfKY?j)l)ZJWl-duh<2BtV*QDrxPNRgkMuK#cEw11UMVd6jpbA9(-I%zgi3
zT(T<9WW0b>s_?K&<JyC#3k)<1Q55N8L&s&QQ~w=S&w~7)HhX~hJ`fCbf^mD@PYDUZ
zsj4;L&(<sAp^i(S<?>r_+0s;sw;AkdhwK$Q<?8xGi4I3yFgus3R&^tx8%pEb#s0z-
zpH<2I=JbEQtc6N}$BIA9p54Rf_s9j{8iHaXO@HH)3L^9D+b&!A2?3gqRPllWuI>n-
z|Gdlil*;Tm36YrklrH^ZcJfD;Bgxwnu+btrKgO>?|F;NteZa=DIIPrx1;nK!ApDK1
zXQ`LhyXCn~xrYFsp!0MGa$~Y?4%vOxIf8lC`}nuEaqF|Eszxg|k(M5t24GV%Ms}ET
zo|~W_Cmnyy4(!%3mHiU3eZ7=ysg-+fM|jz+HNJg|7HK{!qiLh?r6pv((enPe9L-NZ
z&g?28)V^}-k4*N*A_7m+x^)lMthduV*_spcZK4Dd<s4Fzt}}UHgz^rp8k3Aej8$2&
zKBCh|%X(=Gi336sEPV4s+6kY>)m)(t94iC~m@gLO>z7%VdQfwpW?C2RAS%{P{od4S
zCV0600i%gFNQ*kFAv-SJe7LXxc#}(Vo(^-H=Kh#dU(DuYwF`27A6mJsYh3_ieMnA*
z>PpJ5Kgx4yTM&K3R$u3Y=fBMV!w|UGFtwHrA;Jf_mJpwyG>Y=^3cKElQ3>4LrZ-Gg
z*P{4>f1>y_bfs2hEQJ1Pw*2O*NO5f@+(%y>+tHM^(o~z2Fw8X5nDWq6L}VkbOruRL
zvhd=s)1YdsgX5xY5J^AY8TH9WJO3Hdu+%s-^Xb>w?9qM!@$^%TTRJ@$WJTU(u#K+u
z^v}u}E+qOwx-z{nhy4B!0tZL~@42E1m(JYn>Kl}Nf`i&-Q(1gU#S=`Ot8QU3t8YJ?
z$|X8a6-%Z=tf(JZMQe!P?F4U)61jF=E&uq1oQ=gAAKqzUVBLM+5g%4-e#hakz*u9M
z$hR0OGZ~0*S_BA9v~04aGB>{+R9DzVt@@5dXI~5I+H_qw`aQsLh7Zpbdc7g5dV7fZ
zJ9%`!>XDRq_uOD(ZIERCdt1L&n<KD1Y^+UQe2JB;_`zea8|>~e2h2P8DhyMnf_!L#
z{PvdI!_J>pV2G?3z-~w-`1KG{dO`4481lk^lbCgCtZK&gB+dvGMTpCA)q4G<?eZvm
zV14;n=bJUZSUZ}85LCoG_idHOjLg2d&n0X$b#(q<PdHw^Q>xx0|K&iM;#%oe+-s##
z2TT!`X|WDf2Y6Jq6E6j|Pz`na;w|;McaK$QBP<pUojYgbPVs}wUCr(5i=I$t>90al
z?p}L^Lp~-A{I^I^{_;wNcdWQY52-rfPYo<QNV(e}?h|WqEFh*0-Of+Rj1^!`%(}km
z@o3^8WYw>=1k9S|(q^OHt<9f&r%`nGLJh>(=Z+4_PS|6Q(Ps3h?}~Gpm!)@T`pv%O
z$xR`AC`4n=D%718%*?kPU(a#RemeKFeg0Z6k>>XeTmw;u3OS_m!;DQ4O(tXgCEin5
z;ma)2b*STwNKVJA9#rXr<5M_JU|8+uRk$9n?w&(rzic)RvcNNNJT9eIthg5ubiU3(
zMq-2QaFlc)h9p!n!ZQ=x*+zLr-e()h>(z7m{R(j~$3Hv4lH>TJWRnZ$S9xhO#@#<{
zy8pSb@y#!4Z8b9V4po}(FHs}+y8yt)A(zux>9lc2u3BbE^m=8A-o^!J065m-6B+P<
z=-t9-|K-~i0pk2RJ?e{)MzZHY@yQ3+^P+)V9~Uh)D4Kv>_cfrP{kgZ(Wga4D0RrHT
z0pv|%T_+QgD`Ue%%WdyGee%MFj@@pyRCh>_q7c&8>iO#Z)ve=`1>q8-4qbE*n%sDB
zJlC^wD|2)hPeC;Iou$(v_$J|uJWsaHzkAc63d4>j1!-8A8mns0$;G)~ZoGQ!DSt<A
z3JK>jY+LDvx1|0u*|7~2{q)O+aP)O=@yxE*d0w#<<xkAc6?d|J+P^)-W6|2}_p7M<
zDjgMbUcEE@N>y~FxcwadAJbcao>2#t$9=^$FPF-OKhotdHCI{^@eBl-oL&Ejlne<W
z@G@Dd(BD)77#2UogKH9@pQQSj`%uuxwUdXWJxx&z-datR=Ur5?e~9Qc|5P^cK5Qku
zzj+?rcT|;Lm0q1@oo1CTNv^9_Z>d(BxpgVuovA{jf*GtLXwW%{1+k>Wm`&i6xr`UN
zm}A$*4_y{D7osf)-vq%Gt;2Lv7K9psRZ88R(**WI<hv<t7l60{9Hmqr?~+9~K`77t
z>_3Uk8CNdGDjuS4TBjDQWfm9Q?tQ<yiAQYU*ttgtuQO?;`tGc-hNF}_RD98->T&S)
zs5G0~zt}N)Y<L&e=Jar!CTyx&mkfv(Y0cL~_DnIncTcW0(*RL4U5P0RSX}nY29@Vj
z7Ey_z+%|kaxkJzRX*+St$Ew2wD#-2kIJ!E>?U!gcrYzGAhX~pA;3+PRE(hxcf!c>h
zOc?*|lk~^zvltaVi+OREPoBJF-&;DZ#LlGn!Vv5Vc<9-ROPn10{r-?i7H%9Hqa6h7
zRkIpk>vCjG-29S`?F$i4FJ6CT^-9nUnU>Ecj#PkqxaNV56?~<u_|rMrtn6O#Fw}_B
zbto=U0V1<i+gSdmvCtbGR<`4@8IX`?x$QH7!CCHrds^^>!ovs_f9tcmL-P2=%G860
z#@FiJ+iESF%Jc<pa*@PcrP$OR@iuwhvUSpT`{03OvMaeb^weC91Y>12hdwLJVBMsG
z_hW}E{3`_#E=?MucG;`KKi&A;uJug!pt6$Oz2YyifN7D~(x?1^{2nnNswe7TQFUeI
zI9!_$xYw!XHayz?Rpr5qi~nPcQ5c<g!R0wH+@dVY(=kE1-_ezB2Tzdz1<a~Q#XiS<
z1I76xDblMx&r@|mk63jX23XFH&On?5y7FDi7h}G_XZMPwsN&4Nkm{`8AwVS3*wfK!
z6BF&nJ3-uym20Dt2Vz2d@40V$7O-npAz<=EihgXi+WITbcCNqWe4qIYTBi)OX<l*P
zr4pdiR}2mXm2QkGa_M3NzEW@rwBklakhG>+;c_Zuw?Wx)H1)($6z8@+dy@)l9v#av
zUBmxi!x?}?sw88x;Qv@iWFA96-^FV#0KsSA_iIUF9oST(%xa{0?y{&4>`A`dmqrrN
z<*Jn1bTHjeuJbz)d1(&VL(}1kr;g)Tgq;g!>s_F8?t<mEtb9_tGKFgh76D|*7OSJO
zj#4}yn;uhE-3mfF=k?o_tnLlSFSVOFxmV1I>?x2*hks@~BW+vPX*eD#Po$1hDH%97
z{lO#g-da1?QGfV(%#2G2l<EX$|LF-hyws~m;=3w0xYAY3!aftFn}OPrNK1?)Xm$2|
z9gZM|0|3AP)?o8Hi#NsngZmbJr*0T~pn9D;^vmJgZK6#7m-K!QN_4PBV2n^g2h{PF
z@;eu-{c&eQxw?1f4oOjpd&v#8BjC`^&9O|U{L6HRP3Yy%wv=VW2K-@jEn%k9&w+Sw
zVc>X4D5E<+za|l~a&^X76__YGf5nBLb>uor>d%>pqdc=J*5xZIyFe@gnr^mP)z{I2
ze2rlYb|ZXvm`QFuu1#J9#%#7N&QEJmVM)RpJ$EMiAu9)cS4#Mji?=UOGBSOf$7S8t
zQ(u;WKXE5`?7Pgp8~>eK;<r#VDvjs;eI^IhuU!mg3-BqX>IHbj&35cc?A(N3IHC66
zGpDcBjtvD+95X=4>HZ=48IPC%l_pLjf#wENu6qlR9-2h13`>Zu>aYzOWx)n~vE;8U
z`R|t+tFWz;-)uPXS-)|+ebeC>ZK~Vwrn{fS3_L#MnoLr#o58(bU?_zl8MwMf6FIN$
zMw|h3i=*;;wBpvNO$FQ)&Z7wGV!n&pBhXz*Ev7Uhwp4OD$;;T)VHQ0sF}m-qUTP=V
zN|M5WO6s6pF;XRTwYfd{lcs(;fc6og`Q~;HH8naYD|Y-5mKxCDS?_gkcP*+|1;i+X
z6@6B9Oi<~3f4v%Wh869x0l)2c=4;uoc@P=~Arxo}+65k-ohC0_b7Qhp|JnGVA>>S|
z5?cc8jW)Ts_%_y@^OlC-WHL3Yuv!5i8vbHIdfdaa+{@P8Hqtl?Q!}g%2=dI|vD2RB
zuh**P96oT#gOM?0O;XzQe(CcB_+tG1MHsH&1Ae=`<CLu{7(enC4ZZkR0Sl0Ba^Fmi
zH)3b{q(<6l+d=2_J1q!u!=!*5Q@^hZU-`#dezlHXPpXmv?Z49f3I)K`vLbd%#Nb-H
z#MupAB2B)>ByUu_y7IqcnEL^7lF^<mL8WDkCz#IsRXhk&ouRtN527|W<IC_O2*AW&
z;`?G=;B_%xDwEZ50CkbM^;+f&|6^iv+u=88&#BimwUk3HyQ#@-?HG9yLBuUV6S$2g
zbMg+G3#C*8kX^a}{5!DeIho+A5P3;ODrWV3rZXm@iu63G4y7n>*p({{9v3mb@~kXB
zoo9FoX#7e9IO1&e#}%UmoDLEdtD8^tJ3VZk;8Nza=V%p$nlc)qyiIecC%xUJ<Ir`V
zyLz@wQZMU%-aQ?^jO`+6b3H~?ZuRgi?L=AgZG&MZYM!a?NX#EsRidwYm#UJ(PRt}S
zd-Yok@uRMZN?X5=_|+$?HyZ`@mR5Wmw$oY-6dp!7Mbl_*S*ZdBz4Quo*#-QvWdt5c
z0dvu?7iff~eDI6iF?22x2rGMjXFrJxP+q&@xU>N0a09&RWSv(t-A7`gLd^$ceVW@N
z1i3tfjOaL3-CtK2Rq!f!Sv+3twRoCarm^qRKCb%5e<)2H2hpIasU(ZMrS@9v6DZ#I
zaNL_@2>jy=JZj4BM)L#&mFs0z8u^Tk<?e+reHF(D3H&tW*|@r04ek3=&^>Ew=#cj&
zno^m;j3%qIU7;tgXb)EZR?5;ck;1?HLg?xBcL`pqh6p+OQ@EcGwkxraks3aWhoBFN
z94>><WF!>r<UATWwI)!(Kj!r|Pb(4IWgJXNL4akE<t}a@o&8Q;dP1cejvpz;QjZuP
z@+DSGE+GnWr;M#SC?hh5omdbS(Z0XUoJzB3yEKHoD}@o)^D}&+$sdJyi=_D*sMe-o
z!MHneBuBgWg)S<kw=RT;Ble?=V#wK+*xlPA_?0B+(9WwGjg}!W`=2UP64$pl$!gAW
z2`(Y;&}h#|IE9l{j%MfHT=I(j4)6CC_L<j_gqPju{^E*TZ;gm6ukH;?!5<Mg&>Aq4
zV#PFc51&uM9hyOupDg$R>?n2OT`im8>Y!!$kZU9Ry?PKG<n<}Mr+dy3XvN_+RK&le
z9t9+!)H4Tv+h!%Xf8}=IPe)>1S42oT->AQ=Jqa+~=ehTK^bv<m2b!cg?QFO`$;Eb~
zZmj}q66RLjJaK+FhAZqpzLdmf_I$baqsHai(9a+(R0mMJ(TS2itth|P>^$rFb&bMA
z#JLKR9y1H<lWg%I3BCjj#5KL%zr7AoSg3L03xh$LFCn??)Iq|+sY5Elgh11_1lze$
zpD9Y&{r|f@>aNFRCv4?LS<otE?(0>dIs>)R2Swp(5(#X|?nsXSF5^w2T_*is`R%)K
zZaO*Ek?74D-gXIQA*?60Dyubn;)J_m(|lx-w@8s?qP#YGc_=!zS#$PPfbAdDFTJHY
z?J=P_s^!SMFlZkhIE3c>NOwiZ!9pF5IVFyOkB9Wqo7(Oy)0Z7#pt!yJ?N0<3o5#?P
z76ie2u6hQ0Ir>64sLvX|vwLXY+J0pZWj<pOl6Ozy4Wh1+>!}X>W~JNsOdS6!PHC5l
zI3fU`OO;gs6XDSW-Z+S~KvpKOuis$uO8`t-xqFflckLx|##OKEd!1#~cQTuWeV{C9
z@KDeJpa!|sv>j5U=$?G1)H7a?@kl(CWQ(;4+M<85*Q`Gsy0)oo`JY6GZmo#vL=R7e
zQEtZS!`UxBE3U`%aFJ}QsN~?$8@B|QdaD%w;VV?Ua63m$rN8Gs`4lFT^hvB>TNCW_
zRirHfFP=(AhE<v@-X(#I82gQm-#8NVTWH;D23&Vf!+4wO<bBvT?s#++a~aK3e>gak
zRewhNPn*dW#@gp3WF*j(pC%u-dvp3Z!ijN;nP~`N)Nl2*!Hs}}MpX>}L=kfVz;?}=
ztcjfrt25+Npo_tsfmM5I-c^}hpP(RBpH<!GYqc@-lje5wjE@w5NjyFIUi&mE^_X)}
zEfG}vZ`LeZ-p6JwJm%n_#7Sj$<X)lD_Z01wgrbF{N!wCz<B(6Ow^f`AUa1(;3P%)7
z6(|P9gK0<e`($;?o^ty3dpq-t2YE*bI7$98?!$VlEayrXeFXesoU)<NTM--qHM_#@
zcwIE$->aH!z^%ML9q;Z1FIWV#Zax>N{-EtzG>vkb3aGbO;7+bRUI|dj;!>NRNqI!v
zQJ2<b^cEFSin9Be=HKuo7H2(OU9!gx4*kcr!=!P`d9pO8a7eWojP#RdVWcqGgGI)W
z@uM0nVMoU!gVjklGV|M2FxC#qR?0|96TIUFsj#$D4v(iMk>V(2Mse4e$_V50F14tq
zs<$k#%|Z?=JCUZDbX$bIV4iFYccYZ3t+Fa?-*vV4R&x<@8ZbbtEX2##1}!^f;zh*n
zn5i1T23Z|H@Os?1*%1r#WBg=CbV5W?=A8;kl=XH%|4$@)=}h=3-JnRxpsZ8SP1W9_
zaC~?E_|r-y6Hx%axPD;m={1gCm%^9yQL1+>eb`C({#U%lfuqy+mwSZVTownFYDCgf
zMb}a4dy5P@3#IHKhIh(<6Y^}D(1eLRMr@)yow;1f@;<RH52Xteqa1`r;*P}%HnZq4
zO>|Cc8>k=hI0KCj_gn^xXErqsHlj;sH%cu0+CSzO8^QE(8*tyjI)Xm5pwcR=QSsSn
zBzs0Yf~}*@E{+EyyU0CSQ=U<u<Bm$s@d=Z6C;fv^d;k}ykm6q~;tvjmmzQ1~OpY{*
zMJy3wXaP`ocL;;<*vSar6x6nF6^$;xdHtcJ<P2*=p6p$bhLU&3${mb?bhUZPNgdy_
z1S>0X9tPSdx{fQ2uICoyOX&%Pu1~Wh6xYRJ9sj1XB7TzpU##~FVw0;(K^N$lY+f@~
zL5Li2+BYzzbwh3LA3UM#iD{(YOcFD0xEMo|97xz`k4sUBjt`fOP7}l(vv&G*#rSpj
zuHftVfidsRU+sRb7&)HtC-}cKle+|%io4w2-&3)nZpF7<;5Nvao!n8PSR7MQCd)%b
zVCkY3_sBS1Q3DbA$zTzbGGbntDuqx(ryABkC9c%bRka$QQ{&N$L#>swhw61~T<gqu
z3TDuK@&unNa{6SVb3HubKBq57qOq%Eld#7+!7=+7ZUcsQwE^BimGuRW7!9>&kAU4K
zJ{%7m1(}Z9Jzc<$DnT=?TG(x?YUVEcYV_ZkEZMF5;nfmnl)~!=!H`B?`iQ;1!Uk(<
zTo66Tg}7%sp%lMW{@YGz>4!@{3yenK)OoNSCE?fh(S3riftNuD3az!LEJV=L7%g6_
zDlPj(;mnjCb=f8UP!z@c28>#d?ER@=Xl4sJzf(H4Tc;W^(sRG8)qYhE%g1U3!gg7~
zI0f6`gg*bYQIhfQ0^d9@7D=bNs|6*cpA`-GSJd%)F8g({aZ#C;*BG(ct?L}8S-1h`
zRe;a**(=}UMNhd1tsZo;P?ZG+GOV?IQ1do2HPRVTpC|TU=>YXVDX~?BAb*2Go6~~D
z*;bRty+&4o>9e*O89l#!uU|TRS8a(c`o#qZbq0y&%F}jLe5dPiBGipQmHA0LCw6`c
z7a_rVYZEzI-loRngLZwWlWCKa<h^f(tSTMT9+EG~@&!HFuK19*CyW}u$5kJ|u2Q!f
zRB~c5Q+XYPX7d$V{F|oZ9bp_*+Lljtml}#r8R~eF086WF2gO2Ftt@p5&d3BHZvA|~
zU+!p_u7Dw*2SH>*ovzE!oU_V2-d);$e7XqmdnpkrRYHn#^Y?2H*eR<0Z!mwMG9qtj
z!5QqnHh)bV1KVz*C0(d^zXSd#0>z;e@7bWM#}~Hwy`kUF-T3tTd#O)LLsQGW7XvXk
zXC?x$5X=x_vO+&}?usHg>_iIy7vVXpfTo?+QKnhNT1DS-cVQBIDM=01ylWX(DXC!1
zJ7c1NL3QoqdR4RPQfxnWZ=*FCwru7if6|&EOfO$1?`Z)CFeS1#cz(vm9-znJkwD+w
z!A);*<A038ZB%(Eh{4p{2df%($>n4HWq?bt+hZyiS5%;QLjp?VZhM1w)D(m^O%l=>
zI^7o|8qPZ|KXmH5CqjjoSUxUmaFRop%4YF>qm{o<bU>bPV|V3R&zxCMAN*kopZRv0
zz&8PkrAMiWNim<Nr1tywrd`QdeBr><(I|~DIo_r$Dne8{XW0@MHu$q=gS>@ZOixFn
z@>q=3w!>470p^#s%TV-RdcS&I3o1gZ-_u^}+}n+Lp^X(mb;Vt9o0Z|wp_acQ;c=34
z=(V0^&AAgKnQq}T9Hum;D5IRC`G?pzi>BT50wwa1Gq;hp4^-4sfbzw8<kd_Z-SZ2M
z7~wri`R}*O*Sd&z#mj$Av6w3OP`7=(W$~rH>Abf<H*MQ3*bK>PQ&H7~7?E@5)ow})
z$ME}BQ!qeK!q|!+H)|r;>`t$)!jM-ot%D%MbcO(au94})Z86{#?&M-Dd^{PE?NWHC
zv9?D-5as?*`S7cxK131icKf~lR6rFUJ)jCa(h~sV@5F7=sdF~w6%&I$%WZ<NzF(sH
z1r>~GcigH>FkO8`iBQ*_l{@WtG~ba^Bp-HLl)+Ei=;Ojo;y26O&!CO>+@OVN`SZGS
zoZR<M<dAw?w0jkW$yy-2wolwe^K*yUsL9b8E3`meuSg<d{1fgNkzi&?uUy^g#)mbb
ziEeY<m#aT4lQ2MIMNrT>8F4xS5L4%3UeYj5p}aLOV=$XNopHDmxYXoK6CH1Z&9M3;
z@;oWys*=jCsE^T;@b+#w`15Hy>0A_cdSBACsyzk(RqLF&EJL#HN(6Kskuh;&o7jGx
zR@%Pt)MuFp&8wq&JnFYTus$k`VH1bux;4o2__sWGlqph1uc6Z-LKZcCD(~>vwQrC*
z&x^zlgXsG)4>cKtG80u@$;4mtjo8;A7Sr7Kgl)M;a*J&;0)1@Wj~tV^zl%GBw6S-@
zZi-Vr$a?lIh3Pt2sM9;O&BD#05$~-}7H;yh<-0E?b0Kbg_EIMP7e-8aXfK%cFL|Y`
zZqlZ=khX@4Uviu7_Zr+7xE!v`Ke{b!xyMi{v8eG~wzV0xEgoRge^IRt3nBj>3sA*t
z6pWH&rL?ZA0q#RELQAiO)2StPCl@^KfH8BXJ`uZ<EhZS?IqO1)=BH%_$Oq8b$$>)t
z+9^Q*38bl`9M@uI`Ar6YMDJT2r;4Pb>JR(;O1hAB&{dZ&uN<L34@^H?q7uAbHz?cr
z#=-k??)-5CxT6Lb{kFBe633kkxR|?6L`-juTKQ&Ds+g1DjF~BHQI<Vp4x~XT7^i}l
ziZP@B=g-j)<)L6}?qRr~Z(e56<8hIIG(6#$w|Odpex;{xIE8U_JB-}PIDwHjXt846
zzEyb~iOzIWy(3AQL|(gQTVq5Y5pf&k@la<VxagdXR2i!%`HE>OqSJiZdp(&Pv8g)O
zv-5Wu!YBdHVcg&c;PoT)GK^y$?rrfq5@&6BifIm(<StrNE5cd9{Xd$nGOEq4*^-b_
zC=R8#6n6~{!QGwWF2$wAwZ+{nxJ&Wk5(>rL9g16Vio?zOt#yCq-#I5U&&=%EduCA<
zyDRkY%P4>=mTGdfMaBp|tgBWm@D)yt`NyUGyVIz2yh;D7RBj9FDe-hf+QIBTXRCdZ
z-$-teOM!kKlt742MnE?CfswIq&Y3#{l!h<J=*=DH1A+rMD8qhF7_D}17JVo#{>(-=
zO{|+MOUPeSg{ib(vaDXxq`w~VQe{jSaBMT`Z2az-PQVpQdr*?1so@{rL4sdRYvg<@
z!VsZBuAys$(it436vXLwCrvftw%*i=o#Qv;N%r6Mu!Ag7`*k;6cdyWgiP$^M=VvAT
z3(-K>SljRJ85w_!Zh1D;n%cZ9=AWws-13xhU>N@KHuBnL@pQJ_i;1*IvS<z1`pk9V
zydi7Uq2DnCm{xGl)TqccX$<^PXkZq&Gl&l?k*lk6EXwkw&h4OAIy|RL*`)b&mZ(P=
zj%?C(b#iEKA^}5XPFJpT8&ON=@Nmz}N-xQ8BV<?NlY?nsU4SJoe$7VzuJLD0tQ>8T
z{p7cgIO#EHao~NG?Oa}|;BLIk7O>(|d~HUv=l7csF%D&4?1#W@&Z=?Mgu3l|yS{h+
z%7kE2;y(~>CwAv~<ZIHrQ2I>O8-gH#!bqf)Xh8=Ov&-ue3Hb&vFOL5M5dmDQY!ogh
zcjAikgB_PZk_YHywQ}cDUrd^{7s51xocWexGMeZ7RVytn@GRg(FWsz4ywbot6tl4o
z9?*EwPbPhBFw8`DjO{WMs==2s*BU47jMzxOTa(a(l$P=SUhcEvv4>M{gR2jFB;mDl
zZ%ZsmLe0Cg_0%;ujWm+@-EX0N`ckXS3Kw{nlv5{`L#Ne#3s@Y#%`l-0(KRkdmIU%`
zOQ2PZ$~kFBeE!;&@*#){@PlWD_+4j0L};eU&tG2I3(TQXw|G?NL*1En!5@6H{r-j>
z7l)+}YIfeXF~EmN$NsTPdZqIRj?NAU0<C~!)ZecCQz8oM>-?TO>S%5Hd5o?F106ul
zE$^N)+F8RGLu#@sV=kkq*$~GL5RE{m;cz=vab60T){W$ylE1-y0tra9X>455++R?Q
zDNmyptaPd$e}d3tWBI8~iC6yFd3&guh<8#kXp?gS2UW%ywb(oK+MK&27+tT+NK_Rv
z48!e=RY;=0YjIT@1UR{CaP1jk&XF6~uB?`3HW;{gxscbe_&g;bj34N#R*vJdLBkXr
zl<HeQ2>+Pj>;0IuJmv^^zfr{F#COtPJ>U)^p$hcsRluLm+P-}r&|CD>&9P?I2&|Fl
zgHY;H)+%mdTB0!Hn()}(QF(nl;}z)yt#>*D4Y<8PzE6JTB<$XuR7`Ju%w_(C1-$lg
zpL_6*fA=yEl^45?KYEug>{VtPBI-6><VCeHq~jQMWxBGZgn`92CH3NaGekbs{XSdo
zh)mA~f3LdwBc90Fe5dYI2P1YFseow67;an=yiKOQLG7hZ&rUA#1tW8zD$;%q?TNPg
zsJ}ZbQoozZ6Ju|ftD3gmklwg$(~&o|fFw#XmP<NEJNw(<m(#Uwy?~z-Ui{=jxod~k
zI4QA3EYjHueuv0+5wJ(6^v}RwY{Yr@yPc=RzLL)(k2A_G+^zYlB!!T-!?F^S-w!D^
z?#_5ORj<5S9!m(FuT}=Ie57IaM<#=I(SV$bSK~D>!%s7NTM1asf%BvySY0A7BjOVU
ze@q$|ibE#TA<ZYcNrp?c+$R^kF`6#$GnnG8c$!9Sn&F?8J62rTw>^pNhn!dBXhK2i
zb0O<lhW5{PZ|Spf=#zFE1s}SRJjnbo9&RJngJ^Y+nlI;TS{;vi=XU<#2lSL$*qIB2
zv5HxvMl3~)Msj)M!pU-H`8S7LbMF`3cyu=voutid;K>2l**vt%9rPr*Dbs&3>*4q1
zr}boXQ>LCEOYN|O;15<@0-v3T&b{HU4If|szOI2<mM{lwNWNSw2m(|R#_v&}E<}gU
zh+}0*tXp)Xv!yQnrFHL}3Q~^TPM?0V1SGOzwUnk$3q5%rfKECO>YoZj@lD5w%aLc}
z2$^r-IaW$Ajpd+d)!_=#2EhZts-T4ywHVjDLG=r)usV;hRG0Jl!$L+X+hg*vXB~$0
zJHBnYtYM~xjn1Xc0Eaq=%7E84_KLMYuNNJ~kL%Fpue|^nk?X)%(Gp}5$jrODjkA!j
zROdWJ7~0JryOftCe^ASWW0Mo)saK(VfxWBx@MjDs=)899uneDVHVxgT$s*~UoRhfE
z7Twe!{^hp=sE)lp=MX?m`$YeRT0}a0+al>8^<^iJkM&`@)M*K8Z&NC5SLcI3$Kp)K
zDK;!~Tr9OXOUR$k<kWp~q2Lnnd?eu&L!r{+%?|-AKdpt;H!01}s<y=~PJhwpv*P8&
zwlOG32X0cxf@A^K?lmVz6A~pIS=Y9U4C(t-Dp-vJ)MZpnfAwa31fy}>y){g{4Zt)W
z*jSh>Vu6yct@t44T>pF7<-@?NSnA6P6tn`8U~be}!gsdO$F%VgPqL*^o?Jh`NkSJ%
zLhBLI-b47~5)Kcwq8-!<oRV}ez8fsviLnU*JI#wa7dpxLkl(seVS7Ukp#B*7ofm66
zJiy)Df})G2+xev207r9>%6wID0Qm(M!6dJDIbnF#!}DJW+ECZ5Z+zMU_kSsePSV(X
zyOXOU5E2l_?^G*NH_~nz4%coh<^|r#iRs>tUH41ZEq9a$Wj<6>#it=xfwiNuVxYTp
z#&C#FsGhXT>1kjVwF8;i?fm+I=f2$NdRbak28IF8NbI-cc7A`sk?sAsb&HrOhk2L>
zS=xE99Yq1V5q^&H{tX!$Vt7Mo$(5h5MSUc9Oh-y#JVegsG1YWXh;P`(bVS|OwNDF#
z103)ACbgnUVe$FT#cj3Up$kOgTqChPDxo6C0mA3p<>S7%E?-(UgqgTXtOlQwNyWn`
zIxH?-3EN8uvBO9AL|?$?K`Ba}pf*g$Z3~sZ)qf{EUv_qfM5EJt6=(9;H;KpX*iOGm
zo2S*m9qN)0Q1#2@Q75;lwbu0+aBy3RqwBJ3>3nOf%sp_<?Xzcq-_fSh1-Rf1SjKE7
zBz`)eJP-2&>Vl;alHy!mC`7rIezoI@?h2W$B+6mL2Y~U9qFh@~qh7-OGG%!DKM92B
zS`=!GSdjx17h0Mu1%zHBU%}y@G-P^$e{Yj!=i|7>qWwZ}MCdcvD>F*Z4BD8rcKbsM
zt6r<oXJkxWwfy;oN*I`6;3Cb-<k-emDWT?_r6PJZ#eK-dDlX4D4PMr3H+kOsCPiQ<
zyxSs~H@8^(FsR_ODPjD`)d4jKGHSp7UicWGw_+0eF>fa?Q4}A8(u8(0+MQ_oNi4`}
zS_zi0!cHpnC`SorP0&b0vw9>djWLORGz2@YC83W<*R#dyKNmS%Y9+h5D0vOm)7*)>
zscRPzx{(1uoyh*r2I&jFGe6u&q<D$CXLQAtcZu{}iO2~4&A>6us?0xh+!8z<Ml*y8
zlYbZ+yP#B9LwMHi_({_)g?akr@AqwKq}vu=Mi+GOGp_p`o+Jf3xkI-3yYe0Vx8tje
zu$4W3&IeLC7Zu}my$lNI);oeoYz5BBo;B^ls7$0kV*Elb`<RVTGGs1x_&)n85rb7?
zo0I67cBCIUe0XOH-#?*aP{D7crs%bA;>%8f(o4Lb-HY6|<OHn}RWVh5@<VHSZqvs&
zgy4-4TFkcwq}p%VZY19RYIk&lpku}@t=C=zuI7n8NBL)MZhy;-v|#0Gi`3#`T=(jp
zMiMpsJncAiK(n{bxk~<F*3?~H;*EGTvvdF5)&gd_Yf$BYfAuLMasDqpwBqz*tWd85
zlmJ?ve^vqC85T-c1c0V8CNUC`NK3zyfESeK((b^Nt#srTo8pX_R@P}M;<PDba3c9?
z7jh;)=1_vew36R=R=E=Ayzo-3J@w;O%bW@kMF7{3<)w@z)L#FBmR$2Yx5_$Ype^y;
z>vcJ5t<S#8!O@A_mE~eBfZa?p#vH<hk{7$5yJkz4qy%0f9T|Pzj^B`JG7I&}8yd?+
z--W)bI5ZG=#rR|cj*+66h7msZ0oQ+EK0YpQs_WoipLlkZb$>;+)1dG{&j?t|oM>`M
zbUa4x696hiCjmvi4%idh&~8fMQi+E80SgytVa(I~B8OY9VAeux*p7h4)9kOQJ}CiN
z*6eS58eO<?yIHl%irelgwu4FvP}DIs0|(ky8zJ6xTP9N}1m1g=PXbFx-k@RhN0pu)
zM7lq^8d1~D`3qWqgV|<ZoGf%QIJgUs{+)$rQ)1W&q~$K+XZovPvTuzhydA&86W`s!
z+q{+R8Rzb%|Gep2l(O#l%ZH|GZV@NP^~Wa}iZE=LdHtE^zGk}c&w1b&h<XSeXr-Nt
z?TVf`^tL037(`&%eGjI+RL?ygnWZ(f2zw}E`jM+y*0#)iG_(^p+)_MqNi$V@FsrIo
z;zfxMP#n;`c^sQ`uKKjpTvo%mEs2GgV*L7Lm;c_h?^;*!by8;!S(1!z`~CTwndd#b
zE_EJu>r(3#^L*c*3H%-R+btGFWt&smTBR`_!4E6qUv>r)X7Y~~hQEMZv4hHck$RqU
z^~^Hf0oe;Vf0-GO2gyU=k4n#dpBO1?9GrT99ditG>FDD)qLcnBG9)vgwMs3x^yg)X
zJ7fme&*PeFNcIU_<J>W=SBMidSJ~tP98)v4gsgKR^C&%`t-3rL?@{;thRz?+!0Hgj
z`93Nx+4!8`1|H%Y)l~EPYrTgN_HdSzT%Y(XG4AScuszjth9!kbdHS(t8p}{HViO3q
zko)eM>tvn|ihcmFTTo2OcKLj={IDwrUo8a2kcUI|INmtI$_n?%Hd6#cGoIq>;}TaN
zdg#+!J}3E4<;<zPdoXm^s}t3-n1BZZ_Ys-Tp4jsp2@weM*6Ry-P2`Lim7^*~>6n;J
ztDr)kx8KGEVn;_t-KlwCONvY9pDYXa7Xb%rx0+%fu2X|7UvLBkj;z+vJ7NN=#(zqk
zy)TB*Rp5QF0I_p#XO779#yFANF|mXt;khIu&qEO!=oJEIi@1tscf2F|gDA;mNIkI;
zXkI{LB=U2f%I*##5xMT_8~xPo;R9`S#5?dX6?`PxUJx)_dZ)3`ra`63)>!W=wb{D=
zl|Cc(f${1-$Mx=CPw~0^Z*?~a3!UC5V)o~^ZvuQ<)L~!^J&+k9bjq!&e#*_W;b&$h
z4>7*@K9k7zBOpHt9j<N(yZa3>vUO#494Sqg{b?32*|4<1$Pw}m4N$8;p(D4|4`5K2
zx8A6-`A(2Bhtfng))i)d$px5iOsh0Utg_YTrx5_C)2%V)W%%w2$aeFQ@8sV{RgR^i
z>7?f?JxG1J%-T!qPkUdxlZYNAV#`aDW|HTR)O_s>kU==+vett|^fF$#mD2v2z!(dO
zjiOQIX+&*Pz%jWGxf%G=bUc7}jvKu&YxV}p(U`*2g)01duMAf)wH&Q>uJa8DmxOd`
z^>7B<XA}Gk_mHslS^AD0iKu5g1@Dr^@cJm?w_PpUey}M)z%glfpzI;o%=-kghh_)T
zA8q+UWu)oi52$4}aR&}5g?W~CC}LdNQw{3Hx+3#3?(VX$6IhOtUK%0z<EB|pa$_u}
z&;wilX;BQn*^=NWSgYIL>fx>0;inm@t3N3k=2c$T$2LK!{=K6wQTiLxu%$@F7veg^
z@$?R(&aLCghhsMHSR)c9`_2CToX&}r*j~--yYjXXtxtXzrkVd`*q4Msg6ohsdV*~!
zL0r|~4Yv{dmOn>zB`cgAl6jGPzk4{R`&_EO1riOp8jaHx;u7FF#FM-oW6=ARsKjFn
zwZ~#xPykPD6qR{VksasQJL^L?ekgS%RUKq$Y;378s$9@O<!*4%W<9Q?;_y^J=hxj6
zb1!79H9`bSay3qbZ$v()W3sIMx2281dEH;r3D*3EZX$aTU>&U!k>IM<diQ05=BW0U
z^ukficcXcl73)ZukX!o~7Wcp2Ls!EB&3+xHdT<AL-h-e+5iu*Y%SZrK!7dAtw&#;<
z%?v{nHZQ|#Ixt1;&#@rOz!6jvX<?G@u6`h)gn<ZUgoLZbU3KBlE{61BX~(**@t!z1
z5xM`t9Fy5s>RBhaDtaPA=_uf&uG-VofK-u)P^nUBT@0Jfd>PTEEnk_}h0W8<eM<c_
zLbr_HT2^FbUBaZQu61v{A5+CO?C7di*3qJ@=3GCljcb6+P@o*zx&`ndLjDu?<Jnm`
z!Eh*1`V`y~1?OvdEF?{~n$l3fCjNac&jOKJ{&`6}{7ke2p<y}W1)6_;=y|o%>=CV%
z4;)j%e>>K<Po0KtBE4q7HejKAZ7;?(l*E!j3U5O_$cY+*#`tKBsGXMN87ck1ti$Ek
za@w<&@LzHlG<+J*pl^$%_vLqaUR$I=dtBwWqZyV1o3DgVrMlIeQ7ZWX`=9vqQ#{E7
z93JJ+p}!G!%gOaOqc48>?yr;oI0u32poaSuk9n6(<a-P|UmMaRr#k<lOpqg-=uP|3
zNf#eYmBd-8s4Yb&bmILGlc5j>A_sS0=S#8Mh_?Rxl{s!%YNC7<&Cs7u`+fYRw`tXq
z2**oG4LjY%KViW(8uoFSnv7MU^5>rCG{;si>yTRpbrv4+xQh8sI~q8N6>eVfEYly`
z;kA$0Wv;~#eiYkMUYDXZ7I^y1LIjyuPv(Wvd6^HsHuH6DK7hryZ14VwKe5NPM9WI8
zqliw+-{Tl?BVi~h+(vE>GDC-QY_uLVIeAr|t^R!cLO){eC17B9x^%5@uzX>ylD9EZ
z=v2}E3$@*P9i14{Gj$3m09=v|)Q**jBQXK=R_?`pr(5Tv*NS-}H-ZpXtL%Z@Xk9D`
z2at7LAV5P&s!2INwa}|84rMxu2mz=kvb!t)xPpMiw&eDcB-JYlct%LkIFWQ~_p(n0
zoG@^NTNo#J_+ek#{gQu9g#i1pod3aDJgScs7&VLu!w-GII8vC9p7$s<uqyBMd-&BF
zRQSi&GKRZjdy%m62l0K3XjKqb=y=sx$4T$x%jmZp%B7dt=gHTk5#b?=k?5aA?eiiy
zcSrRH3%~2%@qNNIl8Wi?wdPEH!S<W>BMq{rVlQd~LT+E0i+_-8%U|Lp$-s^u*XuJx
zmp3-}7j!eK*$+AMq7gxD_P=EDkwN|8%)>7J%^bjxvaqYAU0h+o80Hb3Oga3tk0=EZ
zZkrG+%r&_WXFCm9=;Lt?z!&BE0@fq_r-EOB5B2wNiu5XnTtJNNS*9<00ioSQ7v8Zf
zm04G9!>bzen97($<|zeK3P0951tv0r6ZU;hg1GuI6-Et(>hJ}ctF$q!t9jmKTJG??
zFxReHrl8~ya2tn=@lNE2YIIrt%!>`oMGmLXomCSo8<x&AvnP2Ph+_uZa>5iT#%L|v
zE%M|+Sp`+}y#x0I0P%W;ZK~P`A%oPbM6#t8;+TnE+uc-OuD^6CpZ>L6Qd8YaNz4oL
zZ_<u9wEooK!CBUL{+_&8>ZR9(+?B|ZSlHDyzy##F6-G$<`a~q(Ug(2>KX3W6J4I-V
z$5oWTqn)82bLplT9P0@$5i}O3ImpPmYYB2bEtFkB8Q8r#;H7f^o!06D>4AeDK)2@8
z&5>?9j(o|QeYIZYaM`7)fb^}M$aouPXoc{&hqrDE>|RsjGZj*EB|M`RlQC(mLpe7;
z%XrIGg2ACRB3f&MJHvJA_X;FlLZ`-t)uRxQ0!<|Gzl6IwZi6V)@b|X`ID@RRFkB-K
z8;<4KorxmC^D+)R>HIGfR;VHWnc%mPwJjx{KFAX8;SN`MiObQk&xNFk?ubBo>%Aa*
zi*0}fokPyc?H<memw#De9KHh6AI-75MlGxN{*|Tuqbps0n?#HL_8$!FUxcoIWak&@
zaE0IoLHfy$gYSvISGGHd1dw0zRHn%>!F!HyuC=|%)NggyHM@Yl%2QWW<)`~p8F68b
z^HLFiY-g_jg43^Pglf*23Kv?z{c(?;YQG4*(x|<-!9GWTWP%z9zGdkp<o&BjTMRB1
zwIDR@_)kFS-|O$x3sl4tl7(K_2r2hs0(S&UG?~!@1vGSWZWq*9nr1_~Z8+#tP9s+J
zV$ZWea;#qXcQz0?u7?lbkWKXZPqJT;%>PvcG(h8Qpa1lM@BU65?i~g&{#%LZ)GK{U
zt<0Ws@ha7=bpCsJV<~i)b~N-d<cwG0Y8lC$jt-lielr?dVsrW5BAw8jNVP!+<?-@(
zdUy1Ix6h{wjBOMT3^IU<(dz)aroKS;^It7(eO~w_!(j&7U|7w%k0nI~iY7k;RqgKq
zOPKsD)n)iL2&l%kk9Uu3lj$o*$Jec-;<~z{#JKe!8#`m;PkGn4lypGT&Y4Xab24Qh
ztG#EPX|JAcq27k09N}LYR|-`bOR-40EsCelN}T;cFJp3)p9`B+5{YovlQthf@Z5x%
z@hxU9Pws!vFtBvw;6Z3g(`v8Fm=n(-c@8KRDyE_nhqE!weJJbQ)j23WC{`f-CgMfa
z-nd6-ggmKHT6SbH5N}tX-yE_q24>JMY4S#8=C8V#=l%EB#li$@ayExR07|@%Kpjdp
zKpVJC6zg7c1%x0MedT5Z%owyTO|tl`LALKQp9WvyVN(r)Xk=>ez$>W|e$@@ywD%PJ
zZEj?r;YbnR%@EnpeQW7nX8gFtOdL8((h73lM;1h?%Z)I6Sd`K>P~UP-&9PM%J=g!(
zv8C(vgRWlZ%-1|r*4Y~pAFOlDok$uFTZA!0e6#jVZ775G+A;a1CVX_@D`4_0CuR6;
zp(Hx{OveICJ9+H<n(P|v%yabLFdYw4wGPuPb{?{UmHo-%-hQe&Z>3-m104-$r_Tjw
z2xYyD6r#M_exh{h66Ly=e3>NOmoxVzlca*wIStc%lXRo>VsKJVD4R&EbEhE!-ANgm
z<>oXL^OnDL8dPvUYA{`kHUVuVrUudMz3dN?@U2g&Fho{VjyeuQIA17Mxh&Jbr0-o6
zfA2Eox%aFM>-LYOk|oLHnFMhu<Hs%Z&sof&UWHby--a{!XaczSpSzoDO0DQU(4i72
zmD<CU=OKb0eu;lnX?4ndj~G<+UaC_nP=U6m2QOyiP6`6_Q|5%`!e}m+52;y<EqU%U
z5bX2<(`M+UK|-xU`-MDXNfRM9lyT)uhmN0_;XAVue_qgLXmFy?q@o=*-i33$If@W}
zD9_Dw5=WVmf*jE)9qNOqu)-SAR-bN7dc_x=`}ui4p*fToSS4(fZ%iycJv{_Gy-CMw
z|0;yBwvwx?fc;m)2m|jbP3AJ$qSWhmgpqSOk<gtd^X=~jCpnP}7szK1@>Qhb8ei1m
z>KCDeDnad`gQuD2`~{OHv<I8Un4E11`KSYANjhL*G$RqEjr18ZLAiIuyLe=i2ye?v
zxXD2h@hl}@^2E5B2OQL@f<a?onT{lu;~aO%=&%^xA!y<sqO}#UidyPJ`91A5mSM76
zbAikiX;4d%rRUnyzGm8zQ%7CSRafu6>E)sT7k4l7024_?C>`~7f$4rpDLC*Su?>D}
zFw!`mlaSuymdf17?9c8y)C0j6#!5+2|B7SpQ{tS@lI?whv-!=TW9V8Uj3TalYONIR
z63e1$(s^wivwC9a0tlco3vU#Q2y;$CY>LSGf@oY5X?=s#<6Qwh%qdIoY#K13FQY!8
zfc<<Vq72p|%S(=9>&yHx<sqa0_?My^ZECW8aN<$v^`k=qYxkudD-?P1Bkc|PlyD!&
zI<|P148p16bw$oMYl%S_sehZp6L^K7fR|d@8zTM`w-M>#WQel%mM2pR2OQY+>%DH_
zW)PQj#ZjlorDjR>^TkBg{kU*{4-IL}^RPxpEG#D)Q|j%p)4z|qN=5g{)qhS{%raLL
zK+>4R2O);T3KWV0gW<}e!*V&IWaU`9Qv4X{YN2s%pQ;xkWI}S#ZD`2{*)oI+rq-Yu
z>M*7CNS%5y9%e!_)s`gY`}j;Z@$>^q@*6xwn^Y2MllAdO)01_rgzD5kH#;I|#*>w!
zTU4LAZ{M$eedbHB|EEC9#*0d-Nf6~PEX^ph5IQi1(@7+%(7PL@`ySRBr$$%zfEdU4
z&00w7{MW5hsDkiXS8yg~zCwRVV8fh-IImw-IgLB<vgn2M=HQSUPavTtH79@$Xiy*w
z^nkJ+;aF+`xQ=!nTFD$GLUMrd435(Y1w}By7#DKBm^}q>H5GSi-X;Ge!J3rU(F|Yr
z3_)>v(E|!=-Gt>AK_l#R=aXi3zbnkV`Wu5H(p&dS^6@@hONe!Hx}b5ncH;Qi4rXVH
z7XZ}-v&3aHK)@KNlg|w|sGDR1H3VxYYgc%M<11O?%MPME)1V7njnMrM8_ubJn6uo`
zIaH9Lh{UEaDc;ILLr#REArHGp39KS;9qkFU#JCIZ$h|vo^%_?RTq3b1=O6sAU(6Ge
zk9OZ<3x8T%o^!f!8ZmJj|JMKK0?^#U5rFy*ifjW&PtOT03d1^x_a?&lYiVneKRo9S
z_shUg8gYPss0L2>S$dJpb)?XtWPCW@kD-{AO}XbyfgzUO7b3<Xdk91tLf6G)LFc)(
zZEX2&W@K3Np8vc><qu-tm~8GGSORRO7~vHsagez+>mq^`);d<8>-6uQ+(9Zt=K=^Y
zfjSGKOcf#6JmA&Jf+qlwnPse=rnN!w=@@f}Cx=RZ<&SAB>MxU`D#v^Src6=6(EFOS
z#c4Fnd{hV-%YJaD<M)fx{@*d_Shm$SIQax^z>TS;uCZ>|&tF!uOsA^!R7&hCCx;Ad
zK$C`oFIC5)KH(N>@KO_%G+teHlTcxTgrse>*Zg*1d47IZMOEPtnO-_?>`od{46727
zrZ&xFL%F$Eq>Hp;!4nJ|J$TzZ<iJ0L!k1Y;#BD@pG(9lDfCQ!8Zm=0Lx!{iy2<IL8
zydEz7RW|fWRUh<Pz<klVHWW+#iMP{xGyt>}KM>t;rN@G38O+EL*4AMphylGogSMg%
zhp_T8y1IA&Ge7YBGq5Mdn6mJUm^|K<)Ns^<6;yyY&KH0yRC6cL`0@}X&bKwk9hFU7
z`o<9IPYmTxeJ-g`Y!LC6%q2=^koFczN5(oI(#|4QrCI|um8p2pk@I;<8YQUA(mlZL
z`PoXdya|K24VkRNU*bucC0?8}qbCaT)&C?OQhb=U8`Ahj5G|OX-huM&hOlyun($##
z)DLbDsv9`7F&&Z!#~dol7qUB7=@(c@8)RA4Ml8MIR*PA$nq-SPh1Ob&oR^!t7kJJ7
zZ-;B_g2%OP#a76>0L4yE?-;9J!fR%dxrmq%LJL6ScdOo*Qtj6<D|V=upBaqr{Ewyl
zs_-`Ye{BqUZb>%sy0-jQjQdRo1P%YX)O`buXp*Xp-li1Ay@|YX-rhGiz%0czQCw7x
z;k&?nOHP-|r_^sfrAtAN#3&$}6H$6uo+NpSxZle&go}B@fHaH0>zQl8uhiTd2RkUS
zHo1(=g>9C=Dc~c&#NjE?U2dV95UE*W<yZjYjQSEqSwWa5dko^pZ7QiDV)n#KWoxZ2
z&mZivmP3fB^N$)$P$UMQx#8YF<F%x()5?t0Q}*zzeW~w_YS)Q|E!)<%!?pH@>a)^J
z&a)l#1;ihnlT?8N4sXAdBhc|8NQ)*sQf<T4l-)4}X!|;(RIu<aRrzmAR`1H>JY1kp
zt_``9aUvR<bM|y0vSCB<Ph$UCjELxnhOpUUwzKynI{GC1#+jO=jOy4fkNrKd31*w{
zKU6)+?*O>=I9?wXnLX=lN!$&X)i7jbv{)Li$#at6w^H_6gWn3*Yt(5+#AY_g=rum0
zYwCHDRF&h_+g$2F;ep`H0xRkrpSj$Js;TJRy)D;rl5DU1uyxj7*>fSNfBz6oE=NIt
zxC3Z)yN^hu@rvCHoVC%Aa92)Mnaz0Laq8ZIR*15UA?M^t--IPU=6$~{r%}Na>0b>+
zS()Ha5e=7wLmHwq1=;*@$q^JVF8TTpek9_}4?*shc*lKaAac6($sI)U837j!d5=TH
z<%!eguv#+~v&0fgtcv;9a&DLtK3Wz^K`>q-$!xA8aK*#rSLinK`C$15N7Lxu=x}Jb
zHPcD+<UP7`iHeHndrK15VdH*$R{xN#uRq-n;gs9EgSrFBytXRP=LfA{b*b~<Qd+{w
zG4d7%ZK5@%o)IU+An~ctBn+T0)EE>BFBP|W#s`q;3s`{ZR#_Z%C2!cJ$S&g^#az{i
z7-fy#lTQa(z^QV5cCj>qecL?!`+fI{!g9M7-^|4P5;l_Ma4efFpE6SoLAC1rjeeL?
zF9^8M;#?ss#gP^hguRA-c{feA3s6tqbK^8`^gEKjDr=aY^{)G&DXKr8AWT|dn=F?}
z9e6l<OmWJ<6vDv#j=<%=tkF5{-qx92_Ib)z0Qg60(;+P;@V3K^D7vD#CicXuflyDs
z-Sn>3e@{2P#sV0m+~8i%f@nq}n+Q=eN_Hd0|L;DI80@Xetmbxff~zta&}8W(v35D6
z3^I6dcIl*MBBs*lW4lz%g(H&vPz(MV$G7A`Et<pUDFgQ3nLgVRDyujz{4wD{7C596
z(B3@om=HhEy=ORpwEQ@@%0(sP^MEU+4wgxuV*>pVxVp+zzD(*PclO;F%1{Vk6KKs~
zo_5-Oj3$}daF2u$c=1_IV|~sP{h>n67IMI>R_^#{<2~`0Hw+<zUP?56MoPG#D$NBC
z7TqLAx28mN>@3nM(nncbCpYDmc=(ZYm9{j2C^02wDhzNb(Qkezhk+CKr4)(d$mak%
zT%j1P+iO%!c_dzV^_$bKV=tr{qSJYXP5<c`@9J6%G@1Tt(CYannWdF?n|GV@bz7dv
zW|QRY<Ox3e4C<+Gv;S?L?txDurK)X_rSGA+PEX*=?uSVU%(XlYKof;^!6mUcg0fW_
z0tI=};rlP)LXO)c?1iMjRI4loGbMe+R>xDe(xj^7ep;Cc(ULHC=WYw|e@9&%#N=1`
z^)V84-?TbYiHOQ?tL4-h{=x`W(DGdGQP1!pFDe$QlfwFj0GzMrO8c(eXUHmqCtNnv
zjUrchhiIgE>WYWNZ&0Cy7@00J##j;mN(B#ChF?(eq-z@5>Ck@aSqDjP{%sf#fA!zB
zZJ^3a;-UE15^Rss&B%Z|nE0&Cf6sD+bI%!r%O|;kqM7L+hJCiYT-Ezb+it%&tfIX5
zE4O^kt|9w<;vq*ylE|uiQq?HySlkZR;m-CVZ8kOAKPmsU309yB14~$g@9C`M9US$@
z;dwD7b8AoN^=&dXw5YK3^fG%pQFMlPL3x&juOat1KT)S_+8>PS$%>R(Q)zIq7HsUt
zD!WPYWs+n^hq$$(EU4S0bfW40S$M`(iUa`UH$&E}=R_umsITy&TPY9<5>7a^FfP&A
z!Sm{K!APHC4U%sdOaC+jjHSf9&0YC9RF45Ca8ITkSWZhl{H-tRDMLtXS@JKrYn2QR
zp=ZcRrvE*#C5M6S6sgf194iV%xXQ9L-z-Os`jhf)y<pOTB*D*Wlm38J?$!BBNyESO
znWU5S`#`-0d?G~QzQ_iO7H>dPo(efL-zJIWWc1rKkfCA+i%juB@SW6*KqM2;iA>l5
zP~MeTzK@D91S9o`3nVHv)!(a$*4e@JLdHt;r-RuqQ`0t1tW@>x_d0NW$U2iOZMUUk
zNSDK3P&vgKqs<~I;#eXXf?UKyIy9o0$sp<W`sTCo=QfGW$bBg6xeSMxDq5>(?QF{5
z){l(L3m%hced-#gnb{UTBkYT}j2jIa7qb(3_77XSKbXw<1Zc)rPP};ot@D<C&kIbQ
z#Rwbr@D)hDWT9JjyQ;1Wn1~R+BQD&J??omNaqFq?7fX=>6LE@;4LmnxpA22MJ|hi@
zk{-nmg?CV528mq;eUsCm!!;QL>#t(u(v`04B#{!wKw+)aZ@3N<59u<Mc*?W-(-}}<
zZpJoHoA-3^q<CG9E=Dc)>Y(Ws*@R`UHLDv%g1Ffmf4ojoCfD`O{C7+(-rgr;mt=xR
zh6f!TSV)tywUe2u(fE?re!@?|tj$oSqijNPvV<UO@RHjJf&u;H+<q`%T(^|a4o&*-
z9b?>*t#WpM*oi62@utNXCEW+51?)k65aWovL!L@%%_u;Jm4pViYcYZzK4f9}L`e=1
zt1o|-o{)RI)#SO#w-&J>O(&xopw5>B$v{`8O0fuwoAQm27NVz`?$gZ>lG~+pO007F
zBuSwhmPTRX-|EUy_3ihh3&S%)l7BDKuaMA^YnwiyIDS9zufrj+j)EUCg+UYPIYCTg
zPX%-P^$sfUHBa5d5Uho;Yqvwlul7@JzY^FI`l@@ndk&c2!UmW5`2pieB^Yl#gMISg
zu-++lx~9ltKMauMK(Uknup`VRW=TR<ONFJ&4s<24W<*EpfPz32L#bt9$D0l__|k(Q
zS1Etm&tnk?T$jVwOBT9enX4X&r5v+z_+uGY`sQ+gR)gTE{?#41+^$L)3gtjT>l^#I
z6;c4#Seun13l!o?)aJwCK;`m9YgTk55W8-|yySlHG;BSxUv99t;p}@wY=ibs^i!ri
zI67?eMsw{S%ayDX<io(%lb{9^irhO!$-j~?2;Os^)8A-SBV4}y|J0Zd;M~AdY@HA@
zi4ynAyFaxxVx{(jW`(wg*y+N4-+va!7CI6GQK+$<COru6a2)vVH>sJnCy>|+MGWYg
z6%bN<_so?!;l5Fxi{hFs*>sioGugN=jEb5qDIv&ei;8rX_X~}nWTDS6Vk@SvLGv0F
z7B8%$+Ki_|z5e~UL1O^2N9Jx|{_~&fh=#zMbhc}9q!4)Z0yZwnZHa$ObXR^c`5Cj`
zOI*y*V6__)Ch`tWg2H<=Z#Tz;w&y$|T@xX2eoo7<b0Fzb_gcY3{DQ8tutN6u{YLDM
z{aB;uO*=ytzg;rYxFcZNTpLsydZiU5qwn__%cP*|<P@pe@<~=>ifMQ_kZ|8+njs0d
zr~WVr9a#EEfAq(1iU**n!Deouq>pCr&Qo5ilR4xB92;%KL*M#i{$AviHwIS(?lAo~
z!fx;uMf!b|NUvs?3Nb2re}(nx5r=hp>anv%rGim&r3K%&a4&E2wjS&>Q+bDpKVx;u
zYshv^%6ux4lgoC{-1Sz=pQ16!_&r?O_sB;$Vs5J5S~uy3L=~f%HDy%JDQdlhY#_n<
z*S)-6OpvtEt6&-D5a**TF546>`Mv-jDS$i(Qbc7^6woK*{iAD0rc={5Jow4|qt#|h
zP#g!(Mqb7v?MxWBz-?s*HzvOuJTB%I8A`%H*5IJ7e$j9!9~*HgSKq}TjR3exIX(;V
z5W4p-a|(Y!Y17p^6`Ugv6Zz-We96caTHqvy5}hrKPiyQWd`)Opg!f@7gyyioU2E+{
z7$$6_oaSZUoX{4OP8+LZuLI>U%zrL_ydrlqAS>)X#c>MO^XeaqOCKQpnf!hznY(<D
zioMGbtHa~B;?=@0+ne-nMZVuhc6H|62gXD&l7~DnKCoowNgn70YWAzZG<i}>!mxE`
zN^-R`TaCyNvkdB~3iCyZBzVTRMIZQcxpOTf$VU3|6mx8fWHxrOVM^Oc|DGv7m=IbA
z2HyXkw#)l7eK(7$2;nBi16)nkZJXRBGpOnr0oa7vQO8@g=4<8fmFaS64lUIBz*<XI
zV&hTe#CLzacIz<_OFa7yBUzkJjjsG)u>C8>+^=tv$Np1dd*m|ZjeQWx!jniG(2u+l
z)^cl=efLwuLiHyEnPwJwK%c+FX7#b>_dNA#U@UG~(|1IhmZkk8%gxxh5r9!!@@URr
z$~?S9qd3)77T0jeL7dGGcX~NxZ-@G!*y#Q*gZ*4VR+niOUWyS`Yvra{sK*B-V4zDy
zZ!~T#z-ovqO_@`!ick&Hd~ci^z?DdZnHz+zPn=lz`-`AArwAQ#n-n20su?%Bf#KIS
z=Z}*@ZS7aSRhK#DC{Q+@GmsFuaIVf=On{T0d_|XAAj_HVUYhaw2~y^ivdpI+qXssO
z?+W|U;rOrxc3I8g+kmUn*2!-8Jno;kUY2-V$g)H3A#xgC0eg9|kH32IV_4M~6Q>`5
zL5TXZWAILmK&%UdEpKV)LupdLSK<iz^OpE%Xq-%p#b1wcF#%aR@|nNziVU4S4$6<q
zSn*g4G-=H7qJ7{lbHI4JXe)#_rr~#S=O84<KEu14e!J>pX1wY13y4;yq{XbJx`4Xz
zQ;9N-babV>>k6PRhsTJp4i|SC>hyF5?~l_#a9c@E6R5&q7aIGngsu)u1=s}kBz){<
zvH@khN2RRkhL}mT7%tbz2=UQkNEMd8MmCQBEWDwCoR_YugO-*i%2ql>5w0LvIbL#)
zq+Rn4hlkN*=V!^iCrPf|L6mJ}_pA;J=s=i-#RGrLmJ+xv3qma@NtHd?=txVYsu<Yn
zJ!w*w%Y%o6p)g_E|2}5Mx)6oT?ZQlW2`77)$hXU{;&@aO%}px<Q+7E#j#c>KMc1Av
zn$)>}&I&hB9!SnJBm_90uUUdK_$1F);i$J&246EHdBrAwg-jSGu{Oa#^0eEG#~F5u
zYrl<dy>k^HS%aqFp_bbcC}rSgjPI_95OEbYmwHdTyd}!+ptx1_J-KNs)>{%`5KwLp
zJ;t+iFdvm;s~jy*${M!_d!#4cucIX2^<|(tB*Y?LYZZZ}CeE@%!e%8zg5(6rf`;ck
znawPc68|8jbC8v)7zR4D$Ew;yUar4a+l<CUyh>|t7IDa-PCxu(qBkH_IO}-xmMrF4
zZLO}dV(MAXA45QFG8z*XP(KrrTdhd;ZQ-x@`H8lJ1RglIJZr~_Ho5L1DNL+2VW6UB
zD+X4EjcB^{{k=`UEJfhfYu^Ed?(ns7^2N-p`8W9bO8<`pJFgDr(gWk!2c=Vq0<s@^
z%FyHAZPQPD!J?4zg7mNK5tzTf+2%8H^d58h7Bsu)o<1Ex4jzX$DvJp$!Pwx^7LBjj
zzB?Vim!)AHPvsu4(Jvz5Z)_kM)8zgdUQ9v;m(39|0c2zbwo=8P7|f5Ri>I}$)Nh8P
zeDW&=zZ{@U-3SZ7K?S|eHUd_mEq***1j-7XDs3o7ykWc1uop85n2LSe{i8~E9K;T2
z-kjo6nqsXr2_1^7?XRpHG5vSVA=QpTRn^ij=X90Pw-5}99`xv1iu=Y$G$3f4vFC1&
zTZVHec9ZS9_3)d2soyi~C;uc>y&~q@f<d~=aPK?MWdcIZn<D8}be;PmS^4SHI3<<o
zR6}F=!~+I-dHm9;t6lKt_Cpg5mSHQnBua7R7AtzK{T;mJsnxJ>7z9q8>&Z;~4bSEH
z>-eUz;r_>o_jw@BV<#VA;4mNN3a}TM68{*n(+TIT(r2BAA4`j!@&)aeRSONg<GVfa
z6f#fb$MnZ?V9U!`;lhM)eCG23OsKsb19w~UDu($vk+BWei@g74)|Jb@;#h4Exr7h$
zr{^J&#Z`LKfm1^~NK3}{ARBsG&@H*|0sIOLb&zRehXLphvHrfJtPT{&=9T)>)DrVv
z=G|nZ+vZczz$aF2aaRWUVWiUXK&!~OLt^|rM5)<+qETETe_SS*y~5D>T~%%4>H%Rx
zG6Gk*(dz@?^N~*<m1@`dGm`U5L4BGU(!>RD{Lx4o!y7BwUyN%>`rN>5*7&PJH2j}Q
zc0pV!O4{%l`P&|Oue<oS&OG$jKj9eDtf>bof}qgmaaahdq16rAsSz!O!!};Vy-9sG
zI{n@e@~TUw#tM23f6UM|?%mE;Nr+nRil=~6B<1p!x4Hp?XcUBhN-k}NFM!rUrhmk?
zA}_;2mMzYZ<LCC*VR8sDu}R6>^1I1Cz0pXs$@%#=KA0WF!$#-6j~KaOL*|9;Z&7rz
z6+XYNT}X;8Ax1KK#-cm-gVCG41MBmwB}P7glN=aN{(kk0fb(VACgv22tj5pq@GSY8
z%h9E&fGqt0#Es_*qPv~2G#EnRHHl&Utpc4d$s1?ttF*mfk7k#QSCD{%0JXeiVi}VI
zlwl|9HGb5=RoEcx-CD_j72Dj$NvIro^A9mMM*j>w-H+y5UB*3={ddV)BX3Pg_^OS5
z8t(IGut)p!)}baKjURlzuObF0On9|o33*hvv8fjy!3kwPxBI6y4$S;Iwwe>FL$=yp
z01D4jeGxkIAJ%w=nKCjI@31{9i}A?3JxP5}w{=!lv4c3@s_{q{Qe8%Fi(RjsL_Mr|
z^6T!nk~q7z4YJK-N5~}q$@|ne^2LQ8Phf#2i3S#ZHBuBeVmCQzj6LqKdd_>f$p!Cy
zcXMg<NnfI*GrSKYQc=^`Fvkf>JZwhr1da1?8yK9JC-47gxe-iAxAhIY%D>@9)xC+V
zY1f`O-wyv&eJUXm_HV;?dPWD&-+ouzYIO0V_cs$(AgL*FSmyYzc?UX^@e@rvLJVlj
zF=zZq-^lJSo66-~iiF3WAllNAJIf|XNCMrD36SK9J-tK7s%`X4E5tF)6Ku1m!q>v%
zc$YPMjvT#m`>3lqke49?U{7L8={GfK%S4hwq=+niThgJ>cus%X_58La$K&I$YWII|
znc&mM<4yj8{cSSx%~f{_x{Gpy4j?-|T{NmOxJ9AyS?8914m`Re;nV$~$ebUu5LcH*
zg*q0mO>&6b(fE&IG$#c(eo*DBk@pC;fGr#T@=8V&wPt-p21p5V&ut@cN&T#W;TT#O
zml*FT9HWY_zBL_0%Dg977&_!J+-Y&y7s?QA$mT7UtO1vreQaV6@EY#Q6@TtYGuQZt
zxL^BUE^n$i#OLqw-3KFy&$RH2H|$fvurv{*3Td1hT00fliaR%h_}f}tz`JDs*`Nqf
z!{}ngr@0>1N4zJ@18d#ay@R#7??}>jzZ{$PuPDCKXMJe^K*>j!mF;d0s=nN51zH{=
z5z!4~b6pzVXmwyOx;64U6jHp^d{0Q2Uc%Ag7C3My)@<1|1Rwg%5G!~XKPJ}CbXk1!
z=HnDTWYm(M)bA_N_5;+@B5x^!7w;o_rbgW+3w(6g%(+aNlV^4k)rK_eNq$J3&uvPV
z{tp|;n~7fFsD<|?j1m4RiKd+Nh`;zVvL7Pj;URP}*FheSgSrWg5?{13x@v!kn;0l(
zop1QdvA{@tlzBO^q=LBrB=Q~Ev)137%AmnLcntT#f(CnbDg6^2F3hURR!NJvz=T$4
zvn1`C+%(g6+GT?1<+xjV0`<f1o5%-fKcC_3&=+S2dS((Pb)Y`URhrAP8qz30C^cBq
zCBIPc!xbZ;#~avkN|)d7v0%dqKdPGhaX3E~pSX9zsXD!zggu3FwDR_V$Ob+j!ZdTR
z{8YPh89L)BGpFmOpr2_+c|e7Pt%{p{N&>@XDQizsDe&hd=Ud!yRpzAZUX&p<LTDJ8
zK#Tg?>JRDPKkcJJ9CiFBi}{1I^n&(z$3GsewC?b|WD*85;nKN5)n%ICLvrs(N|Y%n
zb7-tW%3<y*r%-mSPP){UcW#U@W!Soa6Wx1f%&TmLQV5=&o<qsxZy=v_rexE0dKKB%
z$bPsSH%MoWPs`1#Nn=9ZsMOX1uPYcE;(i^?!kbD<t1rBV5PNs&r2}vV{gV{QhQk8t
znR%koG3+Pl-a-B@&jln4$9?;6z;2{O;8G(z&4AWw7Y+EaW)R#y%gg^TK(wMM1#9%k
z<iRVE{8zf(rMOfE&;MRaAHzCRMtd7i;dEFYlEhw;X~K;@I)QR|L|=)no;_bowaEnh
zhNPJZQ(E1m1M;b}BF=(8^sfzJX0TwqE8fOH<E&4nbqPh(Z)B*42j!L(f`#M$G!(Gl
zQc{qWqYUkUZT($;0?(gCC)20(js6NKN&mf~T!5}&6SE65Pk?hI=EsddFK34r3<M3d
z1S?by;C~Xj>N#T+zbOr#5M6|Q1?#vWU(D~Jm1FL*aun}q<OQ_-D=6eAFUtRvE5T)=
zFyvV@dHd;v&f7>6qWeN%j>Stsz?18h7A;$0ewX4Q0ScMpQdcB%Y}8ly%ZYOBkATwG
z>^}(F#M<%m`<BF5eyQy*i?74mGhMh!ae^sgU=l9pW<D4dVIQnszyA+pt>xm?>?J_m
ztP&s;@x@E4Ij1UNp0R!ZcYHS@Y&h0(XtDOpB9*AoGhcGE9z{ZlompS*10aXbny~hR
z*V8|)rPRY+m^z8a^uBBw{dQpLb(DPpq}jzK`q*Oz7i6>d;NUQyUvfY<ChT|%$AnC#
zyNvs)1(5oew7)IMY$`A@n(ic*v)fn*C#UM%P3B4ciiY*f{7%|dbm?=tXkYLZ*{}T1
z@ckHj1p_+aq>F1bV7Et205W@oaCBv970`K(%J`5-CKifeuBT^>M#b)CRS?^N%a;iF
zoJk8{I@&)v-C=4z+sm3sZ>f)d;7g}@=6ozrP8e`7XzcN7C4+<5kVLcvDt${HFI#mC
z*P^Jp-_?W*M+5{1_Mh1cg#df-Z8&w6HQ~~EKihc=a+cQK3EaTRvHv?%*hHfYB+qE+
z`%*s3h4~wF=6TY2t>rZo1>-x4s?MX0P5YbV+EKnE2S?T}mFCzU`mG?BFW>BmXDpZo
z8DT@{20rLDk-}<pZ^O3;{;l;}nh;t_vfCYDzeh2&s1vPR>Va@4dO$>`koI?d=_`Pl
zt!N8c(RuvUVJVvp{pbPuBc%`3J<$SSlYaUGrUPV9ws%zJxG-@5lBG@n#72ko2JVM~
z-nqM?hda&I<zlR{S%5lOkI5{6l-;v_Y%lkOFS3#GKT^I=4YDNjaMs}@6}#m$dlouI
zlRcue3{piz7$Q$Wh>OQn+{#E_8oI8uAm_6gnk1PE^qUp05k|VwN^}OdLtyp{yn;Q&
zPuq4UTPbUHY1l{))nadSv70qix?=kwjuenk<T8RX_YJAHRu0$nyzGgzmgbf@TMs^P
zjzr`OtDmR_C2`Pt(j(&S54T=<IL)=}A0ej^^_yw7j21AKa6uxxSGjA?MNuteeDTZL
zWTgPE%1AiFHbnWc;oHOwUej~&rDU&I@7hw#84=}t3*YnZo;D<S;XTDPlTUdDN><Co
z<w*d@Or?^OIkF`|Ffv<NSobA)_fstzFB&KWhFk^A<LtSrf2J|yEol#m3L~c<mnww>
z_2QK46zRjKxCBRw!Zp9~M~a5Jrik6U6LV^-b-FGX+a@?wCD@@xqjsmu*(>H+*g_Kl
zH(9E}X9OqdPZ2QVWX=#nj-k@T`)b=Bsl19d#foS|mISdk@L4KR51J1~H}`<ygD^Dq
zVf)JS2q6;otfs|2DI+@AiLt%Z`TuhPXa}B08;9+WOo3GlF#tDi^|2PR5GAob^Z>h&
z551>RGvPb;MqCACp(;6ObKL!P%kYl|G6&Ez=S}b`dv(rO_Z;jKn*HHm#H8CvpzOc3
zGr`*N=Bgx$F8W`ic3<liuFJCm0EqhkdhAfURf<$kA8(<^QZ?E{-N=gR?28EUBs|4?
zlLH@JK8<H5j#!Qid7OSj*1b>foh>dWpr^YWG5dHK{!fZ<d;-v56F1^Qn1mTX&DNdw
zX;$u`;A@;41Aioq<K4=<2P@cs=<F%wEr;Ohf91%3aKN@c=BI$G!*gdU`BS(6&?e`G
zWDTjc;sz%Q&pO&&&af7qFN{KA<R$~;FAUcmR<3QujySX6;{A7Xyd4+|P3wRf!<$^M
z$`&!mkq=zZr1~W!rG!FIeg!>QAn1sla&IqeGzVEzii|FBFo+>7;bO#73NnQWCjnOi
zt*%E{(=KP(Og?-m0ZUin-OgE#8XFly@aNN*gSqfy=1ec%<YPHj^a#0c_30v7L662+
zWW)llYy%F)tlRUxs<vhm<QpygvlhQ9X}$+-zSKkJde|Z(l?Rac(v&+Pjnj^1s!!ML
z#`oHJOj{Yge0;mYlfrHk%$_&Lwgc@LJP6#{tBW1yBl-43RaKFvY*r;LhZmca>~`{<
z#fl&=Fa_c)-yYs*Vi(1kG9-$JZ&HyM7iFkRQ99Jk!neS<07~C0=JudFEY@5n{$@;<
z(A<4{hTU=FEWe&RCu@*B;C{EB$wWWbYtp5Bdf^?H_8<YLD>Yg!XR*m=cBge;`kSqJ
z7A_#M2X`;JPSFdi3<L9Bj8O0jdref#alVD?-GDRvCg$u9$pJLh|3}j`heh5;``x_R
z)@HkAa&6jNn{C^+ZFBQx+pbBQZJV3x&U^23pJ)E<nSPk_J?EoC->yqsol$VJTGJVB
zJlSzHiG7@LhYWN9UG0pk3Ff-4)2puc$_5eCyucGyb+B4_Y4?@Y1sUYmt-4oNB_+9Z
zgp;v&D+so_t{AVK7BjGmKn2vJatB!9)zHZfRm-HoF^LhQ4c<CrRATgu(%-2iW0?T;
z#RZ>QvlvMB{8#4ts0N?2ot=0vCcW6FQdnu+(|bDq7~7r(3w{WN(++egNRJz|=U0~`
z?CeNbx0=l{i##6hMiAixqZFC4T1k%xu0%<U-?dB3<KHhYiQSzhD_C4F`P_Y1m<|3@
zH0{Qwa9D3T%xxFoTm}i>xx#i^*dI=?Ua!6YZ2mcyx|7xJrgG2J<oOuoV}ygx6=GUE
z^Zh-8#vE@fn>GV1Y9cu{HeZKJHVs84IPkEt51yHunl!>VZAf*ZHM(Dv-N5aNj<s4E
zt84R{NnT&Hk|ejF(t2Ij-?;SDF(asU*EeqGvoxOm6?YmQ;w-R1vD5ly1MH6FUn2%t
zM8fd*R{xXRi>blE=d$&Wf(hT`+Y<n)v?ye@qYanzS+JjS+LweW8b4Cr4G#d<F=;Yd
zDh)&7-Y5*IXfb+S6Zf1|w6sH4>z`+EH#eMl$Ht9CYpqWYLlbQj7m-+^cxFwMV1W8^
zNz!#IhoB?sQ&Tbzi{WQwL3rE+z0F0l*sC{_`4EB+%aIuRr$onB{Qmdxyxgd6Sd6cg
zT-d6dNS8asu?U?6n!KvLcvkn&Iy&T8qnWTde2*k6W!H>WmnCviB%969Q=B<a>h6UJ
z?G9}mE|37E&E;DrcMEVIy}|)BHup;hyP=N-^}9(77@*H!R0is;m>Psq;9R+T(Wz-5
z7qY2w{3j*_FA<vApZAJDl5)^2Amgqtrho1JN+4%Gr&mq#;=(Ftb{c>DRcE>y&|K^*
z^6Bw!AC|T7;&VKaXl^e1xZM<(Lw^SGEk}+!3~cqb8`Rar{|4$zVh_MMS3ir%bza)=
z#AEQ*K}_K$L{<nw@b`QeF7|!fz0bVl+?pOMrAZlU?sOsiSET`ovFX}obP99GzxA}=
z0m*A|T@MPi*si>_hYv0-sD%2sO>Gm86t{FLL;M+`pILGRiIHBw)_b9b^GvpBa`$r&
z?OWv)MGZu%Vhjy)AXt&%BsBAVc;p*PmS+b3CG1k)Ni=`y<d5ZJiLYy_0(grl406dy
zB4j9BtR}5WA<;ipuhR_1J%OQUqYIW!Od=S(+tFE2(h?smXZPVzN6}cT3{2WKm+SY+
z*!XWz#Ccs1`g-keA%|4Uuj3UgCy_(3e+beay&oD#!ib^-V7!KXXmlcA$DhWA9Y7WQ
zaOR8Z65Ic@4*QmGPH6GTYF!{T<nmV;Sp--@lykE53WTV#Y}?-gP42&I^=8i`WFo;%
z?BeDX0!doxlK-tYvS1mx?^V?CL1anLggZKx)hGboZq!XI&n8bY$S`9Z?zEn9e6!ij
z`f1o1oCLsICaBQ{%bl7{6x(xO#dYbt^~=Wtp7lcikjoL!Pt^1eZR)N$U0Cy@bm+Mg
zQ!%%VSC$0)A&6S^`+gy7cGynR8=sU_&%;pM-cA^btGAISux5HE>;}&a0jkMBRw_jS
zsELbRgM_j3+Th9@WT4Em1xakEx5V_;-%`JtB)iQr&QU^`a>s4ZCrC7o2(y#y+04m{
z>rZwHRQ!+}>U}CHHUf6(pT_~JO5+CQLX4kk@qwjx*-Bp{%<1)jwCDDN=)k>lr=j%)
zjU{kM#A9;JETy_{wUhK^n&xYAn#e)N2a11y(vACk#?sc8S3Nxh8?)(UU23?J$IdtO
z|5z3<Di@ZP`UaR-fBdpdCF~2G@W|&lsE?HGp@2{ulm9$UiBXvm;g19>$`ZqY++WMA
z%cjqGYrHnSA0*Rb?VCM~^axx5D;@gSR#BggU)XMdUrm(+v%<589ik&b78#0^5fOi(
zp5T_1qRKrDctCkfW^+oFeuKfAL3Ms0)=6^8o-cJ2f@7Cd!;{{Qo}=lxl%4@+Ca7``
z^LwejLQHMc99&kwevFpeelUP71O{J@n3A-?jEYhkQ^@ee+V*}QWcGsfUP&K<*;H@q
z^WCJQv#g6+Z|B1&iGDS^&dMQw93DGV^VQ$t0GiwoSHG<e=CD8Pp<2{Xvu}f|AUPpv
zm@>&QM9f)rrc3Za*Zls)Wd8dcGbD&jSHoJ=hb)dxAN#_AzW`|;>8^jTW`yXKKbr4J
zMw9V{<s!70x6Bzar|0}5`2M^0?Nc<#ik?6`tmO!X-elWC_kh{qps95LJ8^|NSh-_9
zX;?@KVOL|8Wi%xR&f<2wwQo(R|JLx^`Cm@9ag)SkCK6;QQc#dxrM&b8(C41Txv3>9
z3r2pCfvv)FV)NOK#njGnDEl_|aiRm{)Mb`)WX&HMXr;~S#wWun#1294;GzygCB_o-
zbqs27-QR`l>!k6LWQ(;f88d9Ag%NTPTQ9?%BBt8Tjz8Y}4v+g9p%D~V>GZw91UdK`
z5`_lu?~~zqg`t8uKIl|Grfy}rJoYzupPPZ}(ldwwmua%WOjAcZ%FX<CdA+jLG}8VE
zBk$XLvQP#}8;}$AVi+4(c(+nwt*;~e#`6`l{zq3`a_^U#;gFgNm6iazn9-0MBPZv9
z&yHvZe~ZU<?GCsIG+Nld1wIcy`Zm@2UIL;sr_%<`UACH19Q&1HS4$XcZFhR;(kch|
zj&8`Ue($I^9OT&!3POm)tUqL>5p3B~FryiN*MOEG&qGuoC#um7YziJ4lvmLv&jKv%
z?e`$k6lO>3nTmu=GwXA`>nFvp7!Cw*vf6p9|K1V?!%8-OZV>G8uwoWrDWk+CfqL&&
zXZlYjN$}kt^*?Bm)i=G_J0{<y#5s~`;Pm0G1%7ErTZX;fF2yN;eN}R;yNg#sc60Ds
zDqQ~=_%ZCKhE4#xL4%k`+6JYd-pN@I<9~<Du4(@_J1z4M2La$Mzx`XP)tsdOip@H_
zw3Z-T7LG5TeTe2kCW}Kl%oJTfB-(V*&A_ZsZYvMN032ERFl|Mk^1h3^#h$`2D2gb9
zZN*zu01$ry%}}VD!LJDRkbqB5Pxcdxk)dbPHRi@nlW17me_n;&0$@Qxw-s|CvCK%>
z9};Nt61jlLQ{SJnX@*d8+185t^VV(AUoJl_wVqS7GaIc_bX<!UkUP-4wxhq6dtSxm
zUmRWCJuC<<T`v9Z|Jk$MJ5PJ;E~U>T*R%6);}-{z#E!SvcDtnS-AXU)<<Fe{VDg{C
zayH_n7y?-8i>Ib0_+XD|wa#n2W%f@IDax~wN#buc)f)&p8~;@7FOzJ+{wU6%azuT%
zF?AI=VFNVd2qAnk8c0u$yJV_@qOAt#o)i2FOu*-P_U(ef)E-oFf<!dd*w{J99ZaBo
zX*dollQ&KBp7<B*0ZPIr4p2m@uz97eU&sPK*YlPpXQuOR;N*KqZl5Cf>InpiL-9Wy
z>O}N5$?dDF<~rh2;W#1EhgUzc<PYBnJTjF}-gwLKUb;yA*#kGnwR&J|;(Yp-L<a7o
zZqIshS(o{}@}@^IC?HGBmDio~m{XG59j;@L{Quy;@pPL38uR!O{v$=f;-K`mZ>Rud
z!%g;-$~BIwJRKzfi;mHH+3(nnY?nz{;&=B(YMs=lP!XswTcJ!-DsXR26$sdk>(_z)
z8Ci$=?mGKDt0h`(m^#p;kroUof;G6ZZYHin9#1E8R6Re+S|*P2npc>k<XzR}Y?q57
zSls8$ZGW`5K*5ti5YY=BSlzINj~Qj?HrW3_9mjOW1l!0w409bHa!{TirrqNuR}#H+
z)N&?e1BOqXnQ9`RK@Fa)+m^%ol+xQ(U0z@M*m|yu`u^SFObq<Gw)!Chaz6|e?Cs2X
z92g{`XH;d-0OV6TorbQ_1|~6Agn`LlhMab^3%F=}9#qS-myC)MGPu+L3FHz7(~RDX
zGIpH5qx(t6LDsnzd}MSch;kxqg2tU>`l&bJ9Et!EoRRbj4Nixo|Aur6m)(Y6_+NJb
zpyA$<#5OQpMA>=sE&ah^d53SpaSv1J9mxz>_fXihcM~V{*k|J_3YkEg<>Ex@shom|
zXg~bz6?(dLrN(K&M*%F$Alh4O*mD@EV}W`3N1`A2OzLc;IV@CoDn2kQ>)$rz{MUfi
z=xC}zRhVnK6(u&N^Xq2wp|rw^=ag~re2h($0CYM5W5;^ARk9l9JV=J?J+MHh%ebDC
z1kHX-s0L#c^D8GUmJ2#Ri5vBnNq6h>q;NS6eNxg*ls?`=xVPcfK3ez>z#c5#-z}N<
z9~I%kcd}YKC0|M0a$<ML&51=VVIP9_O3|<6i3d5X;Lv*E@zZZJYe0ii2!fePchF(x
zZ-Hjs*CtF@T%IU)6DL7`z}q~|;9J$qu6vVd)%2iacwHe<%L)<hatXkFI9K5&`=R(=
zNX7K@#<kV9$r+1_(MQErFxs9Tl*a!)s&r;8yGp}TL1)Z|<4uPqk6ULWBT}b?BPx(k
zKB7#aJC(vIRAt6VWoE{C2W{G9(%s#YJx_CvagOosmvjUyhP}IjUydu)OodE3M39B3
zADoW+sPDdH!5)j?WIS}=7O!^^k3ac+W})h|OKN>z%@FhTuA`wlUVZL0PS}qH;(52T
zPcY`=r7HQieaMMRY_|<)Aj1~=b{Mm4IP-44q%=DnO?pm`E-9wcXt{asH?f$#1F9Cx
zTIOTg8b<o=YR;viw!lj-b`G@;`^n>~|LWBgi&1L^S1enydr(0LAQvT!e52V-$0*}E
zu)ZdwZR=Wp!{$dbis!oQ-4=qus*UCn^~ji}<16T)d;h^eB5_x;a9|_OGkg<89)5mS
zU19VDpM@Ri0oAVejNRB{E*Jw`QtDqntS7AD+9N~&w^f61d@X_>9tTpmU6a&b^bo<A
z7O{#t&Z6g($;F{eUOrv@pB0k{ct9y)S%oxjH30za(sKtoz0o|uqX+=AZN3p>e;Nw<
z&(R74HR-N+8mjJMq#&EZ&GZ5E-p5>bJ|kR03NDDgJQ9F#Dvb>sn|n+7En+7o6vKfx
zBhv2P_SHH8prp4@A`=Jq!-VgrQ^QWt0JD1PO45q`-IxV2Sx&E`&K}R%K>aAjk*`7<
z2sbFw7gGOeFhapfd1NLnR)|&Xi&N%ZMy_yOb?Xko{KQEY*eE8B*5}mYrRqti5|)?$
z65BJLTukjyx1(Yi>O0-IN|+4US{^;(5=8*(#lx#V4Px$FUU8}dKxzB<+Uz_U^p|aE
zhLtERAgzn#o%CWd5IMihL*r@!dh2e*74Bv|-^9M4@U<OmI8|cFAqSfH%fL9B=*M>M
zyo3*5u@QLljW%cHvGHbrH<OjZr~a*$i8I#<k>G*_X#Nf|(bUn^tB0G?COpT6FRsf(
zdtmXtHMVjPmi);MjUQkzP~h-GH$N#dqQ1=kaD0f~8pxQo1D57S2Mqpl0q>5PUw<oo
z`SwkTOCi*f_c3=lt-&nOqKVsFBAW}D`2PmLMybW$Iy@XEe_xo@JC1L9lg~6i@Hf?O
zk(N>R2sgYc>YzhWNRENkuwX}uiFD~2A?nvmIBzi_iGDVr7^4D#UAK-XE|X5*`|*3y
zNeel$o-A2B1umB;sYIU)rVJ9Yyy%g>;AHJ%eLU(Imo&G>oJ<!J#Q9>ts{v+#Qw(m(
ztM}&dgzlwe&H$j>(e_HYLGG^R^2TR5+bwDe-CZhQ>YITX(n<_Ci+=bz-6c0B5|1*H
zn>)R}ufkfJFD+}2UqvkoB&|#u@`3;fK%Q#7<N-EXB-!kdrc@3TwkN<cgP)Scm>;Aq
zb^&=u-OKR8v@ao@#3nqwu<z>jSkjTf3K9&^>&FK3WJVTTGzS-yu4$3E{mhb@dqhMp
z5<iBzjM>JQ9E>v8%;Jd>j2gO)Nx_U*8G-Bch+Db(SZ4#F<IK~pmua5@B;TMcS8QW^
zn5h)GhqXmBIsFPy)86n&VeK;lEyTrlk=Fs#J+1mVvo*49Sq^PnY%=>{XBBM)QS0O@
zgz1*D_IEE2mqHqUb}?M1G%y)$LHsR))(lUr6wSO)X1Wfth-}{LdBTn6ghZ1zh(W5L
zTBg;({<<u|-u%wAHyNq(xa{&TyQ#(>f}d$s0zDVY7}veEMAvqcSD^!4BJq>$nevq5
z+b5^^sz1s8_Ij3Gd0hQ)5=-wXH}+S6mf0(6B^DU779v@~5Z8kFW#Ve=@Ld2vBxh8g
zJ_>F!jF&Ft^udTpk_%fIfuHusqRK?552cb)>C0GP8IW6Bhxk7ri}N<^L2)@!7zDXC
zPfNbj^k8?`=jf0zh>^6G_u(q{=+P$DdU`l*>e3yOWxG&U0G{fT6URWmKSywWbo8qs
zr(dm`DO}2Vj2|Q<$T6exIoQ9{o7fwJuzRu0I%|z{7+ge}U%WjIgr5xFQMUE6KH+n%
z<bB?^Ru*hdPq%vDg_gEBjL<?z{2cDI%h=?62c-w`$-blph@;7X?$gItP!yR7;EQRe
zq!+@iyR{c(YtGV*6@L7ZBia!#=uq!vOG#K;GsAb*F6a1Ah)B3z9QoxaG7{5dx$2d)
zxZW1q+lcpo4-6x8uKc^+X5uocI=`T{@${&`^D5^O5D$z3dt3-7JC9JRIq1!e=}Kom
z=6gagJv&yx^UU1`Z6sfROb`Y(K7j>(-0Jw^*e`%WsA}Gy%RB$>oNH#6xIQeL!+{`1
zfG}vt@)r^gQ=^v6#;6^Xy~uWv#ej`}Mvx|6`rh7g7e;F`m_@0QVU9W2yX1iVn<nGi
z5d%_<&UrHFEMV@Oj_auAZY#}KOSLG>tBTH6BK3$TT=jHW`Od;hYScdbl+XlDCE|wJ
z487Xj#d8V3Yg_KbSoOOb`&nc<Q#4+xlaMy+WSqAZg$`8uGBJ--7#ON`IaQoHC`CIK
z?82PoQ$f8DME}y8F~zXSidm_+(jH`I{e?j#EKTVYX2IuUO1f|dLBtRCq1bFE^&?AP
z_~7f4<y=nfUUH!Dq@nm1r$@T+Up<*s;wtEsGU)dzw#!2C!%lHSYRPcPLk{<n`@)&<
zxGs=OW#r+~Lns+UD|})C0AlLvGm5=I=GCH){N^r}f)lLXa;{e4mKo;tV{m_KONIqW
zU7?nS@SZ?FY4;MHfDfb>lgjxfee2r!uVS>wH2(-Lb3@KmHBW~n`;Xg^{{4_#lt%p&
z@V?0EK`r0tV!ZPyZtPOG$CqZhKDFUg)ZxM7Hj%P<!AuG~be^ZX=)<3^d0Vh~KUKx`
zZ6SN3eRytLQTcxuyo+2XrCR;IxyBkSxk`lFJ{@ZUOnlLnbZF<Qw($oCVn9I@$>-Q`
zPKM`CBK<7%hfB`s&5vH<NPpYGnRg^wY20|jelXp|G8#R|G2Sq4<!eG?h_m&^7w4?x
zvigJ3rErxVS=R^5HwI(I0Y!$#3kUsP#S`hg_^MT#s_qz|foS`bwmp>3<OY77a-~6d
ze5|WZy%c_GZyzQ+H93Z%-k;sBu`kaYO{YI`vdyP23eQeXp}Jq9<1n|Pp8{yX+seva
zFi-!>I{e!wKQJHJGEusvt%HK1{Q|7qOgeB>MAVhm)#-HHF{w2^F*ad^nt?^h4mOfy
z)W{irYMbt2M}muzvnOSYz3GN4drp_))$(XexUdT@?_AT91rXUf*5hJ{tN;b<DkJN<
zoOzaiu>ynQxE6J)S-D<!0Ol`rop9HW+5K5LFVW5KOYknH0ql_#6UStMrn>m*_Pll*
z1$qvL38f21+CAY?!%W+@G)-Z6Ber?wkG|*4iH@*TE2r#4>1@{hqPUz^&9aU<OQPy-
zyH`6`Fajtf<sQVA;3%Io=}+j;e?7>)9y?M5c6Dx@i><n~?tI&Ror|4~C|uS8#|T1K
zTR#00PFz@&e&JtKVg4EGuOhKT^0e_=c8q;~o_Bq7Cs^`edP7I}3ZdPrLqW-uNMl*|
z?`QpitYfFBoFTo#I`C%lVRCj}_H13Nqvg|P?#RyWb)RdpOY)A_BBx|(H)}P*h&P33
zDAGitnL&Ed(5kR0K3VkFI{5Pqm7wp_TLcC81*Z24?ia}Ii94ZmZl}dZp}{D-?yL7f
z9?Q-Vp4j!5;=Axgy~cmD-N)L9mmt%X)@@GMGQ*ikC_k+^l$Y^gaphW^_l;dx34e0b
z$S-&ZIyHV;PO7Gve^e#eCu1<oetbVB-7<SGj~jSc1?-&2d>D}qWWolT=3hv2Zyo)f
zqE9C#60u2{v~$2#yV)Dd0rJd&m#x;3G%2ZFtFH)Hw`eD)RAH>`uf7htzX>N9|Jl4w
zfO&g5X)&alUV@>nYb@p|$YZ|{W4S?+fH{EUY>E?S`XkpKXKw|ctJc^=b0KjKNDF)&
z<eckB&lhD>LCn#O+a0I-FYU`QHIJ9@GX5%yL9hP&maeMq9av;cbqV>IPdkKo(%&eP
zX_1=^ly;MLtftqC37i&02<k7QO)O!}nS~XdP{5TIDIAwnFvQC`9oNVs!Z5nlwiHQd
zKvg>Iosc`?YjSTskz6nJB1SUv4Y99xshc9#I!&XX(X}&3bj;iAf_{C>i=n0M71~Pw
zV!*P3K-KGL%$hC(PGfxk`AFR=f6}F#Es?3!dULQyqz%aRb(+-E7?io!>K<f!d7jgw
z|Eh5=bhz>BS7hXiXY%}_{aHPtz>2o>OV_2fZtiZAO439{Kly_EJeC|tPxh=RgK9jY
z!LMUq9x>|!q4z#J$psf1wjDr2R2ull(tHQ5Sz!9vRFTgC>)Dof7@>l-LhC8KEt@!7
z>(ixkxedPs`Au)#w2gFhapOQe#E-B`P^eiB!79oGxhPn%qYN`8Wr!4A277;JM>0BE
z1}u@7XULeIzy+oExtyI!->gh;1s~;ksj+%c(Nfp1qTZwu_Zch1-iPk-?g(V``ACgO
zKYFw_K~2*qa54~Y#$|p06j$8%tk@r+_r#X!eL-}shVd>QiXiyhB2nafLfW%a8ClgW
zYyTwl3?Rzem+q5iV`EKiC8~JSew%;=b2Kre30h(+dxp71fOSN1JJp#~)%2cEkC{`p
zzlSN53bA-@6pqm_ux8Ku#Th^Y?^$BZ{Zaqm)LOU=(XIMYE|d>oTWQ}eqRrH<H2yUi
zv!LGH2_G}5@_kV0mn`t2g9qj)n&5sfU^edYG4ACXr^yupRdwG*imsBN-rM*E>oY%z
zptpwrk!P@QKiaT5TvOw#{l2EAM3qx<5zxKU-U_)I=Tg(TkA{F}Xw=cw>=I$v|2}p4
zuq9umg;>Wa=B@>U0yO^(<vuGc@sl_1i+mYp+(71VuT0nM^oaFHg?-l@x$Fz8^bua%
z&%j0>x@m3)RsZJSt`@Tb&h4uY@TRDm#fzC_{OvQp2U6o7Jf3w(M6TrZ0{`+u=PX6k
z|CLU!Q#NcY6DN%<Tq;%6Q6jq+&VZJ0$A6o3N#+2<%+nMEEoUD{tO+{xFi|6?DDrSo
zQAe;WoOnHc4a;WN9mkI{MXe7%<VCm4+I#k@Lm1-ve8k$3C72Gtmzw83R@sQsG&NB5
zJ@-Qry3p~-eCp@0BrEz@z2F1qhO$AS>>`<R!#&v7Ari?YNDYO}%p@YQL<%bc6Cm?_
zg5yDkY?4DyF8f;uO;~j+{BaL@v8s2=SQ<}OEG4L%Re5#rEqg?fgVUcc4;GpM_4n#t
zulX2yHqaCL(c3V`Z!B5m`jcQQ#EpnC(3z;s&V1!XM)mNOI9vyMs7HV7+X?3gnJ~3Q
z`EhdPv}m{v6N-TO8N%A|MaIxGdsqLQe{G6k=kMfbaq?wW<g=P;XK%kCC^<((Xc73d
zRbk(Gz#l_<HALF+H*MsA6e}x-Cv`8;0e|fdYx*QpqzQ_CnO7l;#uzQ)$uh>BL5)7w
zZ;lQ)adc&PT3GeQ)Q&}DUiy4@6@O?<7fpH-^x)R+^S7G~^lb@L$S-jDM-pvWcKP(o
zX1BU%bRHFUI-Z!DAv5oNn3~h=kE$v4_aOdX3qbJZG?koGuzMUp&d*<1j&4@GT$H#?
zm!s6^X{fLFe4UX!x=I};0jE(S^-VKPOa-yJjUf!koFw8O4S7JR+Ow9Wouf?);^>!&
z*n$`MCps~<)mWRC4WwU&exF@aukuq=U~i`Lpt7<?3bZVx^U|NGjbx@}u`g;xR%J&>
z&pO7coc18+KL$ENF@oq#K9>`^@6f1p9!H96ET$fxV|f<`_j7;amQkoF7ohnxg&m2;
zH6j_SRO9*z9znborN@&EqrG!I9qb*O1{!CyZ<pf+LkRAQ<u^CW{Oh3D{9vovpsp&d
zy*U01-KgHk@6nf`g`0Vc`204WlMn$*Jq7O?vrcPXXeKfLP&})~sKny=M8`Fpt&{^r
z`DTrM$-$)p{r0`#2&YfC4c=tJC1{;X-0MZ31BKlUSt8t;=)uvY8PU(f=exTCLie=s
zcfH~JSoRx*f@Rhd`<<pPGstywHis(9$qcQOqXd~yZ^8!|Uyw=EnL@ietJ;vlnF#20
z3=hhh2bP$IFH|RlTG1%|%W}}V40p7wmBHqFSF5pmQ@yxcHr8dZ!?SEO@Jdk~th{Wr
zbSN<<Jeqp&A5Fi{|I!EH^o>qJn?@@!PIH=c*mRhs-#f3k-o=oB)jj@0Na3*0`B@-o
ze*F@bUF|)X*eo$i^4d2X7<IJ|+tt~|W8_B~eir_i^WVRiL<yp!F%0(wrO3wAE@KCp
z>}@b_#*z!c$xwZ7)vOq;X8pJKQDF4szC&9j$GY1&vF1wtIn@)TPK{yfxLawThDbmH
z5o-|zOsi{L6SADMr4NV-U+p5<3fJpKJ;jlmbw276Yi((VFEm+V-S8q>ASX(v(Lk)1
zfhED@qcJ6ZBR}l^u9=sI-yu;e1G}X5npw2_lOSU;d2HlRR{G|U7pH(}r#HZfTO?Zy
zI$Ham3br8X7#V0C?A}Z7p+*Sk`+Z~VQkCI5MzqItX7Sx2-INMzK-4)zjHdOGU<R;7
zAVPnkdN8Q5vl-t3Ca|41a~oLLmO|W{3S?K6itY*}Z>@@izEfj~(jkv9EjxhtNkl%U
z^{|W1tc`M5aFW9)WYKtwCW7=xUdWD7M)O!Mjz|{xLh@p^zTQU29oBh>C=Dl>_M6Q$
zi+B3nN-0;cYZS{9pP`>pV=Ot}%eIem4%itgZV4wN=S}~ipm21L`s&Ny9vZ^nOQ)V{
zQP~gv6OQ>H3cBY$w4c}zX=^ZrAF|Sm%VU=<x`N}Qw|IQ&-cVxgy`?Q<#9EUd4Sk&C
zxF~YjL1+>7Hth!$*B_^TqGf1njLb=`Sudq=@c5y_E*(Z6oF!t6X0#B=grc!PrJR7X
zq-z#V(Lc^al(K(ZWkZ_;lsdMS0lTOO#NI_uq{Q@VVHc@UPlDu+)RQlrjxI<@Sy&^J
zYijyi1XYSlvmPciR^jX#DORs~5-MA)n6W6LI$q6csAG;o{@_62w#w$zGe)!5u%9Al
zypO`}8L~*S>?MB`3|Z=#k~plWNQyEIHg8g*lQNY8xv8h56kckP`Au2o%hLkF1k{~u
z0qZiSvAQp}em_1AVfEGT{0-?F6Et%8>3nX6zJwH%Co7`Z1=o0<9~YyToIn(x6=>v%
zT^s?<&x9`tddgmjec8H|>`&kXHP78fnbR9n8xHa-W5VJZC7Naw@NI8MqvR~wu<W{h
zb!jX$$Jr>EDrOj%Fy~v$pNNzni9&74kYMBwH7y7Ef9~fbCOYF1*efm-wBy4}N+aG@
zm|H`cpdP?Vkfy-N$XDd?(WTtsjF6q(mZo3bR;AwE_4)3`Ao`S+-W|Sv;M6n9hqMx9
z1EYS@c&p8wTCch@-%45T@Z36m_v8EHI&uDqv)}c=5iJc6j=*Z4NFGV%RGta`)%u}c
zo}kBKvXX?(YT;GSvo-UJT0Kwfq&+b37bg1y66NBN{$otm3iAU=tPf(*x2j<SlR@sa
z*cVDHw4~qRXT_DF_k{+A-Y_-YuETlyLdzgG(zopu*S*`uSCObooHiyb>Pvzykm+?H
z0o{8B>KL2aAgRK0f*jPj@Ymy$Z0Sm)B(~uyXdAqX4#G~;zH#q4GlR~99@wq%ax1xP
zqjkA@o}-#|a`kW{YdWnQw!`?46{r1I`y5Y|S|RDGnf89W{zeY~xH*if#L}!tblx=_
z`+*k=jB+Wu!sG)gH6wDpqE?<`O}@z36kig2HXfTjE$qv>o8G#{n`Dv%z^&KXMv(1#
zSJ`QkPVEz17-zt4@jKAwlfV3JFk?>o!K2AQ6I;aQ?s#+UTk^*{utV>stFeq;I0|CL
z%oGFYzm~^M*&BUmtRBkRw;T^@+&V-tFW<tno<vSmdAK!3BaMB<jT=|()`V;0E~c`H
zO?oK{FoJCHRWL5Lmk{B&;d9NlPl~Y1^$GtTIB+`zU5tFX<g-n(%|m{*B}5{#w<-^1
zyz1e%9JR=LL2DIWK?ORH&*P@h4ed-4s0Zy09S4)|=WZT}J&VSn@&QZP#qWKv3Fb&B
z9k<Fjqj&X&FQFw@hrDLi1wKb7Z@8AMJN&#(h31(FJkd#IV?M^EsRv#v=9EHU1zI>Q
z&af7k%U@z^^SeQk1k_5rm#j?vitiHk*aXTt5Y(XZ{xM!@2nfv4N>N6dZz3b}lx>FA
zW{U^YwL*fzZrmk{tJN}*c!#_n9R6wMUc(X7u^2}{)}s5=e^8LnCy;7SdMIHNy#Pnz
z%a(4O#d6s=5mB|+zSTGbzdY(96h<rsL!?<aWH(gFDCe!y+*>nZ=PYYn#_E&>pVWWM
z<#ySQOk$n0E)-&q%S%_Ou31<|c3r+-uktP;Td_4?(|YO$YG`)tTW{W+uEsxlf5*D|
z875iU5|AlUWdJWr2_3Q^Y$KWQuj<ZyA-l!CJ|^qh*H>cF>dGU)&1@By=Du`cmyq$o
zz7VhK+OzIKz8m9=z)PmBw7#>hl<w@T8jFC-$G7u>qd^qtynU#4p}%?W0_R~&_yHvj
zcj_?J6)tNt8`rdmD~V<g(#A3OBNr=Xo8PPs5UI6r_)6kE;%V(>ST#D;xz6$%k)God
zUcds^NVWFM2x?c!j)}-SbNR1>VwasJD*o*-O<w=n$bJ3H>WCB0!vuTyFa3orQU&Ca
z#gvb>#-IxaW`dD@5UM8``}2T)P<Pq9wTTlc)}GcGp2~w`j=1{n4&tQObEPPu@4|Vp
z(D8J`F%TM4OyJ%xb%pqiTBl+id?k)$g9?Y<Clv;a15)jEAAKza<1@71XE;*lG^2Og
zt1>D69!=keru-<J@TMyKPAcmLJdbX$O=f*W3K~I1kxoZv0$)ehakpYRAm}Rk5(XNJ
zj=w$uIJc_zGIUeceYB&^V$K{ETB4w>?T@Y{JdtYpw?Lb3v+p6<L?B$$bDBW4fV_rs
z(9v6KQ+S;dWAj0>gqILdC01ek9(cuGF*sIF_^Xov1I_(1V<57dH?fWDCAKZeg2^n3
zP8?Wyf3U#fcRyX=m}-B~P@IinhULnJc5AKaM=g{0R9&D!Lpd{_@w1>t%~ss(!VL2v
z?_6rj&(SZu=@(&AWmer&ITrVb;@~{s_R!RzMb#j?tAOZ!`8**J61cBkMdKd7fBbPB
zcNM;9Bfd$U!3}>I<y>~K$B1rn>G?gbS(%NA6fZ*N;nxN(vD;h2M9#6;UMQJ%WgCLg
zB*}=(bNexpJ5sC=3~4;mZZ^Gfh5*FAs^#iONaeh7>=F4?gcD!cSYt#QU)Y^fDqg=H
zCPF}oxNc#DMgd?~bK}B-@^6B-RRmCJH%1*3NaJ2RC=d;St$f0%$*NfoMeMUbzr5dl
zBecWX=R>Y}t#9w2?FU)EPqB(4*Ey)6+(kp&)9;f!;H^4Z?k%>2TNa2`a#^_+bYk_+
zlCrGNMB7Jwq&uvtb}!0h(2fyVvs5J_zgOM(!5_{c=o?3#j~QU@H<J9Xut=9F!K6QE
z_s>r*0Jz^$f{>q@2Tuh<Rk3M&S|rck@1t5q_%ITLI;yjO<Z5{oz`o29R(9ZRG(8Sj
zFvNMRyAz1sx=^{rbX@6%>2=$49A1KpFMFy~G`86EKMBPtKi!Xd7S+~?AWN9eq0TZ)
z4${1XG+iL^-^aIZST3>bh_XU$w$0Gl->TJ~XfRdkmE(>&GTIlROWZ+xImjx-o^LS^
zIF1X3Oc;4GO#nYMj8rVv7Vz#qh!b(4h&2r|r}a!fXn@9Q{U(w~WWvvMC)R8ENRd`2
zx$J@O7-nr&S9=Br`~5B%J-6cNBu|R1Yl<LPi7=Mye<%uD-OPtF7IV$`_ney%JL3dO
z=-RkrV*cT$Yhg^oM91IT!OZG?y7B(Xly`n1(Y?eCGu;Q|YY8C4n0Uf<S7v!8E@Ll@
zxv;^D=8IcB*Ojk*l%9&nV8S|+6t#fR$_y=LjN2W@A{h(YRh@c@NhU02+l8yl!^0DZ
zSjQRa?DqA2$uc`yg-OJ3Zo}N_DG`6;MgRywD@;0@G~rGqBu@#*Xm1ecl}9F!OFvNf
z#<(}gjhPfIZK$%Ak7BixcaBg|{oDA5pjBQAu*LZk3N*!!(BSh+vcaFH;DL6<_wNn^
z6lS!lk{W)ZHMxGGB2&>N!Bn7QG>)o7N_8PqkM<i?AUnz-gadMDlJ9al!mXm39(+BO
zT67OH1;KYK$G`4&6M_?guhWNCieC$PYreB|HAzKbS53`t%<fvewOzQ;JrFL(9Iv{x
zfq9%~Rw5uz&o10xs6YOo#=M5E-Q~FEN$bxtc!3Et^eQpStf)tzgWPQO^grT&Jsdq8
zwy$(}f%m8wH{_meYcZ@#KbQz^*>zTL_~>4y=<sx#k=vZ3m8&J}Hb#_vq6y@XWs^Ch
zl3B!D3)n&mUPE*L*vqh<-6gM3Dbd%4Ga%_TbPd;Yb3<Po$#z^uYp{~lj!d){Ov{Wt
ziN&u%w@}fqbu7M_I-latfZkH}7UWd9(C+~ElLn@Sh~n5~g#i4U(Q|h7OO4te^cU2r
zp7)~VUPG739WFy`61spEwD2@wlJ&Z#lA2tbK9t`t_ZdAFvhiv#q7ka3-9&={h9>>s
z>^@V<E->nOrrWP~6I>B9=nU#oV4}tF+vs_GBQvK&zG)y7FCGVs=X`q=4?{R6eQx=w
zaSDN5y8Xj>>l%Lom%cg*pYNwm;_Gl}WL#gpH(vGT8vtH>tdp()1f8(oUnh5&RcVhP
z_NTSN94_8XX_>FYlBbJ!C`F~eqNo`r)Eh(du?MCGM0?+WTzZr5WGCZ8^~)pwMjTt=
zN%CHGGv_A&95*AR0+ozz27j&v(q?g7C})7qaZZ<Q@#Z`-IVegWbOhHd2J<0}rT5`a
zgrmBU`PEt5aF*$|BMf{6sS7Z^NZ(cU^wx5Y;No`%g-F$KEvB_Hk1JGgQs*V|2{Jd|
zDvZ>ldlrdRsTz71XLRsM2z@nroCyLH^cqH|muRulBt|Cl4NXv-XHVQ#Qn<%we!Sc0
zEc0}u90A@ew5^g6^A?rl68FON?v=XB^QgzCxSif|Xw4;G#usOQL4AH=WyLzX>{y=R
zf%mU?b%)KaSbVzB$sJ2LyWPE)3r0u{tv}<ggb4x35!>`C0d5GY2eOCs8fm9Ii#Zh4
z!gyhKZf}6&akY`v?4>l)-iQXMXye?(vrwn2EqzyG7JXcF%q1+fzBz7YW|Oun=DqP}
z-;Z_Hx3jBu$QNXZf{34tXHVR8uX2>eA=t3wd80Sw*nL$Z7V!qH!89G0+EEK<I2D#I
z*hi9=1jR`{Gp~G+fv}QfcCRI5mFG|2o%C7**$F;KRCH7aJ^nm1wQH#pLarwg!q)y0
zJTT83cW`E2E2`~8H)(33sVom3Gfb%>$M($doN5E;hS|BQ(nvKeXk>>yffWU!6i;65
zsHhR_ZL1raGs6*%GveWwxS^aV6qpiN^EPsNj=R~r7ikRwbIig%LxvN&S6HS-@3WsV
z&p5yB&Iv!xsgL@r#VP&{orBsMZR-@|8-FahUaCDPcA=BWS|WSt0?`t0A`)Zy-bNYi
z>t8LOctBqbT^cIgCLk_MSEmyKVT*aD_HXN7)*o4obb_NM<EYNd$pAI#<K_Zl|Cqkp
z$NlWeIMKj~MH!rht^uL`gjGy^?ZE|^-uui%{+@z<tg&nDuA)J5V=x-jRC7Mqt#_>>
z8#x<^`tpvX&GCi|gF+&aQ-i`-Vs+!1l5+903e;)~a2xsAe+j-YD1|~7$wjlJvn}Ma
zR~c1vke@G8iC&n~oX*pslNk#imd>=sl-sZSkm_?@;xsaeB2MXu`(PUr|FwaH7TzDq
z9D~Z!JM#dt7%UKfmJb~E2;6yyO1eHgVPBXw1Zw>;SKjNlc2#dG$}w_L69iWCI5@|X
z>x1n-w^ku=ML{%kpMa<;L@}%b8l6sHxeE8{ufVFQo1&dqE^)HQwn`nc<D2FkR8*a*
zpYRRNPo1Y*N;|`oMUK13&sij2T9~NamcVt8sT<9UdWGjmOa9d-yEx_KCemSr3zy4V
zT+uO^JC3M^@T@n$=wj9uGe=8GO0m9d^|N?${*w&*fk=5WMZ^~U7s<)up9A-srfPpt
zLJH_3N6*@;7!MrmHi3B>^5^#YG2deW=dE;B&9{wlG_#w-%vfq++9Kxa4{{ZXjv{R1
zLNiVKhV5HBvelcOD1q^%+wuguC4|hX;|gs}wW!_l(<dC%5|g8dJ=`9y&)~l7qd|-*
z>N#5%<XpP6CIs<dy^+8#c(in(@^H3}&A+)+*5Qrt1eH!Bwfo%X)C_%p<<MHqukvv_
z$lb-p--aHIJ`TX3%-)NZ`ue<69j9$#{}UV;y3rs!&+Kh#c8x~4e|{%5(kQrRb(B>J
zKvhuRa07nV;HBKtV=n6p=o8nzVd%#*gLGb)bl-N;x_PI59Jt7MJW`&O?{tCZL+<kl
zng@tJB8{6q4b`j@H%r&U)bM}$1;18ReDamQhMMy?^LQ%ilc08^-rMrFkcNo?{}H4`
zOS9x_1uH|}MX(or0zw$d;*?oLp%HI%**0~Md8a-<ko=Y`i+29Gr3Pa#S*1ZKG?YpI
z;mnXW5Z_Wyz`#{KkEW-hdx7f|r+@-F#0Ja3UqzfWY~U1@W|xexh7MW%wp)hKx$qQB
z$i1D|DW38OLH5k%{VVAI@sF#=TZIqwyO+{)P+Q4nDDvk0Cv-%Khuo`-y$aKCLWBu$
zMk~toiY)pSyRz@5QcN-{F#|2#Y<Q<O8DVvrA`a(*)avGC?(=x5lWNOIF1GM@>XQ#^
z%mXJs`8aKS7(#B{lV4$LIh{}+RjY1&b+PKv|242q^`XC-^q8zl>9X;b@ZP%^2n0bK
zduu`wDXnK2^^DnuTM>4=s)aA1Sq>uudMb<IOH%L<*+6RO#V1}$hzsw7n*J9-(1T2z
z<$X%bj2h#>*joxNjV?j{2Maexc%O6{D?b~<lD0#Jxr$a}8EV#f&2Pv1pdzh;;v|CW
z)cgURDCtWva}(7Q;{*%V*Zc&H-*&Ilc|UxlbNuj)@x~sbUn3j>-ZN3t)p~OY2a~G)
zZm|kRM1E=0xo5RvGdUzMy&97PqVs)s?4dE}YZ8m5p&ocAbMG`?ioIcNB=Q7Pt7oca
zpQ3_%JN2%8;l^7oERu(pW_1@g$p8}Ak+i+#bx*={4N8uW2Y^82;Y-0X5151!^T#k#
z*-@@(Scz5J@jQ}XMLXX9I~M28wG}D3_Dg12Q$LTf_fLdp^uI@HaM>UEPF7b||K-fN
zw9a|AD>>2J0ogLaV)jkDhy<ryCsPoP9z?7To|I%wAZchuMTii(_a#nINv#}fe&gqa
ziuYxDBSpnqBOPonMEH&ui7F{;&{B|ot4_K+Yi`*J4Kl$fDin7^(sSa=xuhN96i91|
zLg13+Br6PDa9FUDJfz_6zBqBh-gyTvg&@=lzEi>i(<CoWD9W0$!0U};dEq^RjFw-K
zU%>cRf|L?J*-UPOSN>oKc`7aPN+!!C>+dx+T8=`A4QHAC6#WUDbRUJ*-`jb&iQd-r
zjfH(1vF*RvEf5Y#+4#qNDYS3r^S88Mhz{dNbe%ETesnVU={ARlv!=29s;#3fBSYGi
z4&%&7rFpnoqEkfPg!jyJeqIjrQlp!-hn*Ilv;x0qRGDLEQD;LHQH*H&>`9zJo_1Ww
zDIvzkn^~&myCcR4UM<V({k^q$qU=X(cJQk<f~et%Ke!87wTKuoG0}T|6<g`&ID)Fg
z0M`t2de6g=^`Zy>gs+}}SlWmjWj!>I$>MFdjL{tbK7>QOBvX)}tyx^?WQKOI)*$HR
zKD$-PvQ|IaYjm{gD>Ws|N6L8KgzV|8Q_4aIk3sJZJ{kTi?>jj8Wjd(&l#6uPUE`V4
zb9@uG<<Gp?vhFu2KN|$TjVFElmx^Lo$d7GX0|0(3H^I*D5~M0{KU0`s6p0=&VnmPV
zUAk&nprg$-UeJk{RW6$1LwPf-4(eUUMBTdW{t!|P+1Q&PJh0+ajh9lVBV=xrn15lG
zp5;_;p3qBs1-$<LdG?KZ$|6Ltu1^wn^{(#<{WeI>8m5y|f)w>DtACtxl9x*iEy{M%
zl(<wf`_RJ=o=^2D@K4&gbSKtivu+XmO?710>VoN{cfnEtPh}pIFLZk8bR%sMbU>e%
z^p2yCNmh~6M3`(~Db(RU@()|WT(`W?*Z#HoG37*Fo-2Mew0!2!<`XHziPXN8R&+4V
zWf2mH{*0NLVk!PWmZGn1RG$QCgYzyTaQRFuWmfvWwBp`xa*=#tDg7u9wej@w&&MK)
zSCMOOqMDL&{O>w*a&_`6fL@HQ=>dGuF>Xqd$c~CH`@%Wtj7_$t-G^L%<p}xmysu+S
z@qBK~xmpThVNt@=7pARX5<ZB`OrFemOPZ7kP2d=wD=y5gAELvh0U|wO6At-Ig+S55
zw~sE<X)nq>FQP^0x?XBrmR1th*K#HaCkMgqeN6U+o9|^}nAw~Moi@1dZ1a}oxx-3(
zZW@0jy8GjyLK5tjo&p#C%f2ef1-NA<{MblocpV8Bs8|!WU}_NHO39LjDP$M@{<mmZ
z?V493X;MrqnnH~%>n$m0DPz=S3$_aPw3E#F_X5XyM5Vl>7LyeY5hd&1N#eitCj%@Z
zG!ss_=%t;+E<~G&huz37j$Zrx%CKci=vP3`!-bddxXRm4LGVjn>W7lZ7G&mohb`BP
zB0MrGBm0|Rt<%ovd0qpv%|poG*O%!Txsm&j<Ao)miEf<AG(OO5j^0&k=uN`1AfUt>
z5<U_hip-{pMmaiuKGV8xg(JaQ^k#ofofUkeY&>8cmL`64Owss=IB9eoFDDhPmkjug
zYygb&(KluU(hVikmghK&`@Gq7hE$VFgLr#z<(yavpF`3Y@Phxz7OQsNR&^MjMJ>tU
z|5y8wR_|TC-`2(WUOO7TC*@Ooi}KM~=JSGa$sJ_-7V12Oz!CH1k5=@lb$)NxPNx_g
z)mQ?R;3j)uU-gq9vvCZQ)-RZ~#M^iS(JkY83MVT^I6hV^V@v$YbZk5$6YvcrW`W0E
zcYT_6dihf{k5o0Hn2l~#?=0aB!@e%VA2eEDw#3jS4_?LBjOq7Y46V_={rbk!1ZTgS
zt=Xx>l?mbw>3KTxK^vi8h%tz$A)xoMG5_ZbHi5*d=WR_OLYb;B@|QA^=)6$k&el~b
zd6)nN-9-A6+<4b<4Hl2a#-2Fa?lhXmnn*T!A)Z?89Qz5#=7eh(xaVf=K@5^jWK34n
zD3oos>&{Q|3OwE#-9NBXV3PKI&3=;sI8?sttD2yYkK3j$EZH{q6~<lq-t3Lo4lNS=
zh(Ru&igGK#{R*bnL{dDkdY<>T^v%7+Zw6^Se(<;d-M{E}l#knW?KC_jC8cW07-L7c
z#N%+XfFPh8#}&<ooqp)uxfj(1z6M_7!!M@=Lo+FT&pyv4=1)ygzXl5xprR;WaK!h&
zKIw2n@x&7L-H*P_eSL9wYu(E`C_iw<8(Vz<BO0I>lmUwgogY7hi(_vbcAD-^-RdmQ
zjTgO{MBSp~!jOz5<q#x4P-Q7q5A1ujG%82`l6bSN+vw)PfEyY)$m2iww@rm+*Qem{
zqp&3Olc7yv+k{wxu?Z_07`WMqu`Z*)u4CLhqmzld!|9HfTl<qpj0UH>lL^}NA?Y}x
z49cOgQAY}CDS=nEvZ}`478Y3eX)5^)FeK&%WN&%3a8RW?ACOyj>Svd5T7pY<`+=1?
z$)p|U$iUK)?d;rOJ6A6KSM~*Ffn}tNeMywiQphFj+8GMHiIeCFbp?huM({fH5q1Xo
z1|k&8F`I!1C&wFEtE(V#ktaPtFX<-kct7XjIFI3aj#bX{9IaxNAg(>$%-f8a%j$=k
zz37I&egYg;1No(cejArM!47#tezM@>UAL$CYgRKSOfszAy$+E`z|d%fD+`P8l8s#t
z(#T-CLZ+-mYf$Q+Q%Y#ee}K(^sbN@F1C8I7TPNq%l{GlAb*w<D(l>cBvPlWhH|OJs
z6`I!_xxR_}B5Me)6$<Hl<@3`qxzD(lpJyt2#Aqgc@BPLm!aibO#e)o^zF93Y1jNuH
z<p^xVfQd9#q_H{c5hyh&I;Y)i^z=E7(QF*Qcjg-X@e|oqsAENYP8h|Z(ffiGE1GmC
z#~(QyjSUTZk2$g*Au4G{JMC>>(?Smm$4)|xs=wI$qXU{^3yiQYiqU9yBh#xb*`r@b
z*zHh&%LRGE^z~;N!gVk|`PtnwXPgO^Ln>3M_x9^_&cIXCD0=a|(V4+f&FsJAgn3_a
z=i*}yj+Z<~S_kr^>XgwtTm<vxINt4V^_9|RLrzTXx-)Z4Ke54)PmSbuZZ!neI{&W)
z*xtZM!$bj@9Ak>4)JK9#AmZsgV&jo0EHY8Yml{yU?6>$>h7&H#>6n33zz+4oL3dr{
zxcOl=yMCaC7J>`$z5#M-Cyj3p?MBEtK{O`jer#QNljG2jmEfunji|_h%~ArT#o=gj
zrCE-3dKa9V+P$H0tBHjyOb&OKAtr5WUk<akoUxqJyDp6H)%dg1U+}~P&Yo3%Eu;;m
zM~TAKJ5M<tQT*|^I9HM3)pl$tObdxv2;W3~+9hup|ETr*51D3|)*_(0-s@=>8&l|%
zgGbvQ<POm<H7c8;B|%1ZSHB*?4dv}%;pR(rL}zTVO;DfXPtcxgTRFm97dp<4g#aeK
zgn9X5Zm+xJmGA}hSyolb@R@dc-mf48OP2yr2H2ljSR$JGhvDlYKfAj9`Gw}!6m7GG
zqbyKWa}Yp?<R7nZ6pyH+0DnB`VoL;oCm}cE_Y#=+M(VUBzUa%<SEo=GX$SJ2qknG5
z112anf93p2?e2&MtEw?(OIY*hUFyQyj6#q`U)G;LydCq+Wk)m57aG)&4qbOlzfQ6<
z=#u9e9Whq)x995E5)9^c{e@)hsc(S(eE6+$gE+lLfd(#RNpRJw?Y{pK5s0_?twqso
z%zW(<KFpaKXAU!dc8|c6u+9oX=PgC?ks!bs)Np&EfTNXUgefERuvM`K6!_%J3LjKW
zGD9bOec|@y^QVBrmGichpsTpW=&d6-dnjd5|1%!gK-z^5<jw|rv1)=AFBEZ?N3@`O
zn)D|+e3k^YKvunz9}*UPR=m!v#?F5MD@Gg*rE<^)F3BSJ;<VwSnL{N~l~X+*!xDeB
z^(eS@uJDDyy?u<eC^#s{GcQa-nsP#e4fnY-=HZ{==}UM)FPfy1Yz_~#Da-mM328#f
zT{~Wasc&7wL`?sPHqGIWG3SD)Q4mOk)hj!-bZKZqrS32EK$C%T#sv+_gkhBprkFTj
zvDVGk9tsF?N;;Fd6`gdLWnkBiQ5=OcU|d%|=YW0hSk;9DY4dxK&D{c%bdzr>6n(2T
zzmc8YERs9pmwn!!_DjB%FpGct_NwxcI0(1tma*~y{%-4aFd)tHLRYU_b(~G_qaVwj
zd|LYI=Ci&*7)OnE!nf4+4&K-jVb~BAWHCRx!D$~N>BJ_A%Ov6l66G~uO6hD4_ks|S
zbY_?E?$CQQ%S6>eyePvXvny$Otur}OojTWzpL{Zefn1kt-{b8z|9mC|+BV%qmw`Sv
zkkSts6jh>NmNATe)@bdcJBqdq2lYQ(edA+YT@!DTCXH>|X`IHk)!4Re+qP{tP8$0J
zjcwb`J$>GLzubRd?>T#&HEU*mSS0rA%@|k5cCZcJ7^!N7g#`p9E>Yib_m5TV1uQ5A
z1Xd(~r4pmYkNRb6s@lp>6j-}1D+-UPtkxQ+vrOWpD_Vve5%IM5EuYHi4c2{yzhWyw
z#~nWp2h0+{!80S%{X2wqW&HGy$q7V?C5iIr<^U(uvd`jCPDx&^%)I)Z$N>aB-thEK
z{fMEQ82e0S`o5F*uh&FN2s`A7CmeKto0|A!T=l|QI>5hMd0!IkGhDz&zL$=K3)BBp
zOVv0%g<4C}FcmaYbt3#L{n#KLB`uw8u4^ZJfTzqd<^&yKrF4KwzdN$U38pFH?>vf_
z(OSS}x4~nO>ai^*3FH<}{Gy1Yt*S3K5tY*_n83wV)Tb33Nf<y5`rgvK^*ssu<q!;5
zEcYvEj)ym<ARo+kh7L+)imyB=lg-IsZFS~+<^P>CljXvc%PuiDK53UkoY!zM*kB|1
zp+z<qr8>EU5^_IABioU$N&Z(PcZyv0n16%Z$_HbKr|fT#E~v4lWrnGUiv1Ij5dKcO
zk)*(R5GqrWS$71o&11<Wn>yo2Dbon^88o;a!;{|M{_rM5t|z$rSSGk*8%6+IA#6<G
z&b!UW#T|?Bhk}Ur*m^Mkbz(7CmCusm2|ngLKM7sP)-IGW9T=IB>!>>^=_KxUHVdt1
zzDT5SQMXzMrUs3b>Thr?L}rX=4hp&ACXctNHt+Yg(r7X+Q*}m6j4<_tJ@vv<*->87
z%uny@d>{dWuZ?OubDg!#&IyDpvm^e^b#Wg2#duy&Uov};)7@FPNAg)IL9?U+L8Fv6
z7CS&9DQkodc3#?TyC9dBugbLlPO7M4sAXKE^dvX7dT#%;Z?sDP1AX*9unIEaC}T$w
zEfaJ7XEelJKu~Dl%cyl@e7tm6Zuj3bQ2c?5Z`U-@>!s~ejpkQ8dDxzLut~fdR8)2T
zOrv^9V2<h6c{9D+F+%jq=O#b78qi1Vv+k&BajlQ%7mUQrZYCgZ;S%oRZ~RzbL6vMc
z3b#U=mKphPLcUTXF@=iT3w!v?B%7en5{^{3T}4#Zk<O}K=*8p576N<Rnmy+kpjA{O
zS=FR==NAYyOiqe$+gdnLLQi@|BAhzhh(z~Bc59KRlLI4~mmwhtJ4(wwidWBWU`}*T
zd}+MP?|!^69^is${~T-|dc8CfB-dwfEJ!!GM0I{nZ^?v!5tv@5@}AVRCh9$j;G|pW
zN><6C98MS}j+u=pSZ0<~^fM-(uA>PmX+(?N5uA)>R7((Tv6lHc0tE>qBn@A100@jw
z19}!b;JQOC@mSXpi2MG8DVGuvp-P*QN<m1OfO!m$LV?_AQH%Qj!h6r#;1H+KJ32j>
zaLi?t-XSgQc#Y+r@i|S_k4W28-s8asZ*QoON^o_oKI=5tNKDW4w*O6O>2q0%j9%ER
z3OpHWuFGt%Mb_C+GRQ@1#iMjFH~0q~M&U&~<E5b_r$V?3k?q%ZVeTJ-muSdoYtt;;
zq~bIVAum4S@s!(>PCNk2hSm3s?i2QJKVky?+9ocMeCasKD22Tt#ks>!q+qQWvw&A}
z27E9!7*+PF)lZ`R`ZX+)e|6g($}s)TT})>Dr`6n`!nk|Fh~dUf0I8iqL%?kf{aL-6
zVuB+Qj$ZwNzM8(HX9kjg52`#|K;Vxa-l9onq>UoH<x+LT>yzY=g_*V*JT7}9AzDfx
zp*xRcb1~oOVL!>EaW#<*zrs)NVo)0y4-4o1f>plxUuZ?Od_VI}6B4esL|K_GOAA&X
z$!bVzD-&4FTb3N?PH|SVd<<EnOhsJUGHi60!tvc|VA_9lv_#*-?{$A%eIvkh)-{vp
zHirnAY^}Au6Y8+6yvj~Ts-E5_DGU~fGK!bK|IuV;R8__1FA()q)IDKQ9BE^jKB-#~
zqOoIFF2LInVpsu@1<4vIaq}br6%76^T8Vc^put6ZimT9V7*b036_(+<8adX+w7eO6
zrq(I!`~J6BMpZ0wCC**iK~Xd4z7TX@KSCkb?^0)B^iCLY6N|;+6X5nJntixO*bA!H
zW#=)-^=|6<ujuK1F7v+MHm0J^6UYRb(N#!ZoP?nE^ZS)z_VT+E*dA6A6NnvNCQNJd
z$^f0D;#U8RvbOs};mjM7k8!u>v)U$oeLKU$4e>D+31^N=5!gD|h-<k-P{?1v9ma?C
zElO+%lJ}C+j>QVLV8U%Mo@m@QC0;UA!tD?|QgMQHl$G72_xrGx-ni7m;)fRW=L@?!
zup4x+&O&^Ktw8Sfr^{=h)l^W@8i00kZX;RDC;Z$sprM=Oi0ande+f<HTEB~MAwi4W
zEV5u%rnK{U(@jpJqvEGNO;?km{@0qLUEU*#owQQJyU`OceC)c<=SLfFE2$a^Dhs1m
zRz_|+dpg`0>P_~zLeLF0#!`EOC!MCShfx;sQP_!tdxnldC(0<(foo<`%RmPZv@-{v
z`Rt(nULKE9=<cA$zJ~FM<XhPVDHO?bm#rj$m^9PkX-m-(5$e2>;&Yodl~0{HEs+*f
zvU=Z!3Wp+-%9U(=nb2DSg^emu^qfiO_kFDaF_N@jtD(zSjQcuG-8S^CVFLDQi_4*8
zUR)xX5j;n`3CD!eKMWrSQ9IKKAF#S?TeszTCpLprWWxox(^E!G%rc-gf;-xqC+m)p
zZDggOSs^$>lRp18oku=oWeNmjYVs8?+Me(PNw&GmpW2e7MqJfqaMH0L^zj^FL_K#3
zffx5Y!ej;insCu*M-p%sgaVTax0WbEgze{*gj35KFCynD;PQLh7UQuu`<8PnJOJ#~
zt9Pa(7w2`4o%?dL*0EQ_a^3t4l?U%@4Y$c9A>{OWV|M@(bexlnGNMC_jU&~CWV+4M
znRo%V(^}ZK$Jt7G^6;?(ya&P4M@Df&F;sTC#U68bHXjX}lzDcqeF{n{(c|cB$XQ{e
z=g7FCZIU8id`Gy*mgaXD5bxb1WCvhi%0}5Vh|f{T^>}rktD8;F`85m_XvfgIn7xi(
zGB#8@V?de}T0=~VMnl}^b`tX0b=InYK)a0p&%+6`0x+<sAD+#!bkB>GGbY2%f^my{
zZ!cND;E^PccD-t2#mE4Kmp`Wx2-MiU_eldoI*zPq|E}kXF#Yxay|BQvfO5Fl$1L1o
zX1W|zVa#C$hC&qoODC-d`Ru;Tq3q}f&+$5}*`_Zsch>yVytdLGl<7$;wKCK+=Q!D7
zkJK@=*v4Xx)J78GER#0Q1d;ssK?>~<tcfK+PDF#~Yo+p^t%&yrj)E<mp96Z$D-oc6
z=#%86nSt`nJ<lKOLi!Ui(nchJ{X&PQjfNLC)Iu})2N4L@v$@K07k3C7qCY<XtIJ`@
z0K;*Xurw~GH)KbWxD%xg4aM(KlmepXiGkkV>3>6mb7jx@n>UpHER-P{`?k65jms{7
z9yT@6DDIoSsV<d2#00bs@b00kmHlHr4=w4gbWkd^ct)8mZ@DQdQsdt%^=MQd*xj5>
zM5^|MXguDL0)9TL8P4kG`IMTj!diaO%j*J?giv}zxbu8t)M|JJe}HUxt)=;|cCR@X
zi9EV-Y{fOI<$zF7=2<d^7st-_?LHD2)Q>TasZs4MCVhPox0NgBCYjkNlV_=Fg8ZGy
zMQ5o|u<W=r_kl{s{X{qU_(Dc+gP!Hkb^jNOeIBZ3k+nhp%ta{aAcmBr9-%Cx3>2BR
z<c0$AS(^0O`?wxTgmij8S1|QZoh{2kKuKZsNz7`w;pDpodoOS@MzXlW-tuoSyt)#V
z3?NHm%PA2s<ML1q;)rsOc6KS~?UuEl4PQ`WO&369-3*_Y()$x{Q?=ZEP5+&FF2uaA
z)CtBMEzTU;X3+DU7cD^C><ir`cV`CIf_?|o&temV&-)j+MC1L!5+E2$DHFHK;iwkZ
zP-!L}p;Tll!irrQ`R^x?)3v%)R6IE|q@As2EEt%({;L%B4{Op`Q|6!sxTq``uWCCS
zzEw}~=6L7OrIBZv<*<@J>5D;BShBTK)lY#$Zy>zzqrWKP=^Fr}8NFJ(*FqGS7azp6
z*T3C_a>jwQUw1cLeRE&e1$rAmqZC#xF~~usZ3k2S%=w0hMlcXZ=?3$mq_Az*>w&mr
zD+hHj|MPH=v&G%YvRQl48DSGD($m*t`mNF=8Rn8dV~I?%>=8Menojk$v2;)Ju>Xb@
zOI=ljtJH?=ShoS!23}9etFrN<T^+`8>Av}EvwGy)$a0`cmhEVXW14i-<(0@DWnQ+I
z#^`(crN!#e?aigkx6O3*2+26f^w_>hW({WS!S_NE(Z5Chz3LmGXfBhq4&X}RAlN3p
zFa5Lr$b>ZBm~kl=1Fd3wAwV*+p_w#%uD7?A0KRX5&A;6F_(|sCx|HLz?8Vj^1YQCX
zwHO*(8#1Xr_om4!|J?y&iAFrXDk}>}=7?13@5$mNRbROgo^QRnLp3jJ!TP`5_YSM6
z7*RB<0JD1oLaKIXbJGAm31^=EDbB``XT$3v23GW~p;$ymx%a@sMe}zE6a(L>76kE&
ziS2Co`Sa3A9}iFbTW8N>Sqdz5Pg726kxDgHtqEnrus4C5+h`#HdZUY|mMy^;10nSh
zHAsS?y*S@gNC?yU+CY;>6Y<cg40$zCaQAq9s`UGjOCX0`u_#|ytF(+PGqnk)35H^(
zfh;|qbVj!bT21Y|f9O!0B~)M}s%ReF{H{anyv<6bODDueobS*omLL7mm)Wiy`I@qH
z@DTemv0b_A>K7@JOm^#`ULXID*9<=#gmi@uvpSM#r4xod%2jdTF1@%?R5BY40v{NV
zPFTDt9zobLa=cWy9``}CmXbRX>_!Q=<aGMJxAZ-jI|9Z0G#Ctey3PX*HTC&JKd7{#
zLOQaN-*vZ<v20ber0oc`;p~r?)(oJPK<MQ99c%Wq(j^b<GZwpbr_j!1U1#|ACBh47
zGoCb_0({f+18kFj?SSy3Zv68bAp_4rZz-ye2U~Sq4}rokLvzf(UAmc9JRJ)OEf$up
z&C$UXI`fw$^3%#F^Dc<OXO_XwM0{OwYZe)*Mv!mt;|TBq-8`llY?<mW6fvX#0Fxyp
z!;!$^npmfzc4*!!>h<9;i7J8ftLb>iFXp%**k`GmKcL)Xa_5c}=s=DKvT9)7FLv$5
z<f#1)Vi@_aNmsnO6ukbOf8D&2FVUO$_lMRLeeQos1^V3(eiwCKd4@YH@_baxngS;;
z^%h!1I66b~;*0^Y2MOHB$0j<!yA=0KdJ$OR>0brQXyO@5A*||eQ(62X$S=lZ8)PMI
z0f(*I_#ZZR*q^(90wCXqKymC6XQ6nWCJo@U^OoAa9B;Co6ECLf&#2qptQc)QSkHZY
zg}MI)l+!J-CgV=g_gp~vr2^E6q3*G`f9<C<zltWY=FAm!bwN=iDaLDATGvV~YyJ?=
zH-f%}Sqvz3*-s>Fp7Tkna@)$)dhbhfd_tvGCZ#P-N9pt&g2~-sNSNBTVSxelRC<|L
z?tPlpz(G4^NY36rI2yQZr!3Ix(oq_hZwUvki{^R?>*UD;AL8q|_B<!@@d@HV!8gns
zL9M(Q_;v*}O_@L|$ecQz7I5j)i5Vvsv>HrLR->BIu}bu244Hh<xnF(j95NVIzuXU?
zSlPzE^P4NSh|2oimwsY4++6dE4Y4i482-`o+LYornr!%qc{*}?Ph1P?P8#bUiX3lB
z+65HRefHd9Dw;`u0#d);@enbhBY%0)0&xZ1cQzJJe$uHoY>z{6OQd+3AH}AF$3^AX
z##l1>7WeAS`2)!2w?UVhH`vaLz7(&4tiOUJ?(U|$qb$N;<9t4B7ffB(5*UgKuc|FS
z6#r~de|TmOg|@vF`}bCwNGoclsqcojQU1Yo1ZVa4pO@Vur11?{qr*g-8eeY4fXMSx
zRn*a?=7l=`O2t3H&Wx)h2amkHn_=RdV1X_*SB{j1sc$QVyTJdJ{$L@D?C89X(7F*d
zY!V?&iXHtD&9P8mJ%-|LO0~Due2sJ26?l@Mh$Z{Nr~x}r$ZNP39(yRhP0O0^-nDTR
zsj!VDEIVz$>cSuAEu&xQ&e`QDz3kg;FHAO7go<ELhl`=+|H%JL^Z;+wfGzguEHox4
z7{K0D7YnXq<;(JTxD^%T!Oa*hDiw-YA<Q3ry14G&v0$a3P`z$LRZOb;2YJ*UoC@J^
zrW3!P=JcV+z0kXE7yV^8ET48XsOOS2a_u^@0tmhz&F|Kn%-(x=93NcbP_L1a$cvhy
z((7PLt)$O>udMl9A(TDjun*PjH5|~;qbq}ekPy!!U22G071kvU-8{4z=6&S-Q(#;|
z-Qj{RlfKu#u|heWMh78H47Jjor-`Z+-S8k1XGcXtnNB)(%lD3izC~x?w8e#(V-&sG
zaLhz1jA493Nm~pCdpe6?@a-9D7}=Mw-$kO9gM1;X-gzyi+}t|y*@#M;uq&H1-Uce$
zvktfWqo}`8%9%yo>QpGi;L|QGu?6s$9Z<0I{Wvm>sseCUj0JjGnPB**g>StnO?&+`
zdsjB`wZFvuIDTjT)&=3@c>s(P6X7<b#@gC=u!_m@dU<!}!<_F4Rby&b#$^ww0fq2_
zM%LWvs)`U^ffV^I;t}xevygc3$_X4rc)LUoPh9OwLV&Wk@@6HGwEnOiY<xh+-si}E
zw7550EBkLRWM(EPORPb7$?hSLd{C}o9i(9=^7Ti3b>rv!Zw|;JDzkvcDbI|zVHfSI
z)4}Qq2xS9eX=(iQMvZf`xpD7oT>hYnN-_2G4WcFZn^6&V1c%G}P0v#C>i~)gXAMzz
zc8p4C!2oG`!hGc+NQn0fwvCwX;kdn9JST?$@6#&@{JFU7p-J>2iwRXUHI=63m4+we
z3O^w0I36DA{c{SaQ<;!&&s1=`^Xm^?5WS9ZHUwP6w#~O=b2F}h4n%s9Ebg%@$APqR
zlo2?|#*LyD7TbAI>DzyOHd)Bl&#7z0{?jm+5RGc&TiBOCGmnCG7oV}0pK#$Yn<X~k
z)~@}p?E2wi1(Olp4|W^H<5dZH_LDwBD%+0hCjC6~AoP?)bvN7>7x6jiTUeR{c%uIw
zx1J`3Vl~<*o#zYoD?yL3E=B#{dn)dL%YdWtv2e20SIDivo|nz|Pj|srBw!!xvux#R
zoQp*6qa_?*{HDu?f1A#r!a}mb!=^%*4B*BKpa=<#e0?p)hNf1qeQ3W5BTw2N23}an
z-bh>lLmu1iHa$`(Bn^WN3ExVYyi*6{{VhzSK`$PZf4haj1C0ftZ-qCU-b^Bv@OE8O
zdVfe;3`w5m4Bg><Lg~T}+di>|Z?B`J3w%I)vXAIMGM-cW6XEn)zjQvzLoD^$dI%H<
zet6DQ<W8Z=pG!T&f8O*Xo0s^`h~%p|zoW6`guP4QeF9dXH3}dzZAHEAFo7@@ad!&S
zkrh|2$izurSPlckJ9*+x99OI;7vs6eGmzzuKthDsgklZ63n@yVQ2rC{67kW<)p$lP
zMtc6GU}g|VV#r+nej&TN?GKrw42*)yE%XzFkZ&>fR)iXn*PZnARQhkbb1>;l@vefl
zM#w6g6$!;J;;M70QZB`<9{E&(zq_^nBdJIK4NirHLHqWpdACTOjM7=y1Ija?r^)~7
zeGMnRy_L&_lqmQ!yM=R3X)I;HXIG;aebsSGufP7$DIxRPi&_YnRM0?kEC%eWntr-Q
zYqQ4^_C&`@e<SvbPGS=-sYDpV(Oa#P$fpbo+s-mcs7fSVj`4Sn_wp2Tn5M)a2tx_5
zJ@&&!zv2tC{0J_7=&t|@B|e9_XI7r`4{K)j|8_sr*gaFqgZhO<vlp`23HM`vN?BSJ
zR%Sj!9#R6AP5=tbQ645mGfSB3t==E1`7DRf;L(!-=F6DSZ3PPQ7t92Z7e~CD@e{3;
z%gSnm%DwvR*5#k5Pf4;lHN2_qn{F#?(8WcYZwq&@KoYL_5jtkA%z=TbnLvG2#Pjie
z(xS3^@ji$p+%0(KaN+zWZ%m$uU5V$)#~@qz4gGfrxiw4sL2p`wn2{^n!g)XK7>_$3
zuY4sI<imdJNvu51k!R7Avzq7X-Ojx(_fP}k;E$vOHYI@iJ5N&Ro~qf!B}T$=12VT|
zYI48f7$%q!0v<;hOu?qhk!H1S-IVi~Be<udKG?TOk~kjx^_QXLn>@aN*VGU)lD^DJ
zt-HGI2opP_h7YM_cnaEoyxg|+XU1$vscIp4RA2#S*q<5Bg8WvSxaM#fvn1>*608%y
zFIHco>ll^i2n13bNQjOK6UtjS-Dz@B`GWzf(<(m$10-+QmK@}mwli3>CG*0dpYVoP
zq6k-o^8FJ`=3deNviyCiWOL#0I<yS;HLqr2pl&YD<YipEAzh%{(4RxBH8%(G=9`p}
zD|)iV^rrhp%n12Wje6Mt+GbXo&wd;-KmC?In0J4i^<!vse45zr3<1lhhzjQ_WfKE1
zG#NaF`Gx;eoHp76AsVv=p@Q-U5vL#ns!&;XgzF4q9#G2=y9nze^iz=BrhIy{!%I+e
z(TKMjt9RI!ZqQ!g5249(m1ak@*>e;qyKV|85<IA9<$~*rrbLUpokjnMZ6!Q)T&w?_
z{oCtSP_3s;m(H~-4bh*4GZhe~tj^_M2Ks`52_dUZ9v>OaKO1>~T{FhJ=X69eBw8sB
zrEm{)iL{n|v&(m*MbUb>ruz-PIL4R^MQ%J)qhqcYzS>+q*eo9`WeAf%+%S%56s>^#
z3!Xs~kMvByK+~0Y+uAz;9n{pRD9-G?riqW#B4%a}Ysn=Gx1C0a>>AEcfzW1zew5cQ
zWko!dCeGq-s*LXBT`k(0d4bd(fAY|feF4Y{grKiqgx>KjFL8wG*xK3GY=}rQSpC7=
zzzk^s)zle8VnmP`>8x!)D*QWRo-v%Kj5|T9afgIG4CuAs#-|}8$VstWWo#-!H1k-2
z{dKJ@Y*c-odF_~Go&H-@f7twHt1Jl<foqn@8PPYj7j^677_$6=izsV+)jDr*&dDnn
zliD$x@nSHJ-@O0N!<hdk5H16wpfsz0I0XJ?_zg0frQE6_ArJATz)7?g@-wGqnSeP^
zD*ZW=yhMzUY}%A7Sc_3u4lulcbfo;RwWQb?!u7o6`E7Yu6y>n4#zW+7K0xMH;k|tP
z8yAqbW0(VqJi-3x141pSygG8|60_{EL{qY$tSfKXkb=8@n%iG`0Ku5)gJnzj=U88#
zF@IV=)W(8Eb-fv3Wa|upsK@Vpgb;ny7Ra2cKVNTZ1lQy`^lt?Z!>hU>R9>@p`7Prq
z&A~w1ATk9msd_Zc@8~44W)HDZCTGz6RC#Wb)B?32ohBxWMY@)b`ER!{ks{#<3*whw
ztl`}iy9X7M{PD?7g0XXT2KeN#*b$CB0qvOJC!YJ>4d29#wEk2hTCvZMEK-QJjCI_N
z+sTVLi{2c|mRqYwvSnq>t}Vu8L<mIL=T5^ejUv5H=LT6>;^>MM`*3yF2a+^lvqjlp
zHQ0!V-;t(QS_Tho#(<5apTk5s(^oS+?6M4rH}uUzcm}hxYV{ZIL<@ajFLk|zTkj#l
zmSwcyggc}BKQ2I}fdd4vj3rUI+dAWW;5!7*v-LxkAT9!4h)UOE@PqE>T0gw+Fc5Ym
z18x<~`$cX{;a9b_R!(CO-FQ`C$1X?vp26bYt`P>i7r@n#!Sc4AbCMY9nEoK<tkYeK
z1y15P9|~2hiqOb>wc&(vm2;_t_O7mFBMjS=P1wm8vBL=)-K0xw&uGu<#OQ9O@1h+x
z#5*N%g;%TXX1I6|w045NPDzDy<-OqZJf*;bXJjM;>xov%^YR7R66&yS*dG1{BcH#?
z7^r9(L-2?Wfq;E!>54F+_A9LOjDhgv6b9<w$yJ%ge&^56FZCWw8jk}m*FdS-7Yg0@
z^XqTkCRCKi=ZL$dX}`E6IoSFLXNtMmwfg?qx>BL|S}>2r=yr+8$2_${&D}Us$E!G6
zT?#YCPx`9xu_slBBc@+6g9jpb!$s30j7j#)0D2)NSTt7dj0blB<tD}^JOMY%2^*XL
zV?vetv?jErcrC`gaR1aiv+j`Xva{P>K)=zliaT>P0jov<zplHKdjjo9RMMJ*ajEEH
zig5PesDyEzNyFwtl^XH)cNNAoxTGPP&E2biXIdmOhg&VB@gRSP7y6S8f*^S(OxqA=
z=Oar+RRzt=j>uND9@vkHpc{aSsJ@~sv>ZzXSIJ1V9l|5@Qdei#oEXg#DeaxpG=AT8
zIkOTqel8s#W`f4yenZ-xX-*ol-DomBaz`Hq*BF+0(lZ`YBwun)&NAe&ZQcHOQpoEy
zOq)@zJ@dM%;sWb&RiT1VJj7ttkN-JX;{JD_bReyG(+v)thi<c1I;`wW!gTd7Tz0na
zDWSTaI&!SGt8iMfKj4Os8{G<cuTf$_#0B8NfiqvuLl_EPni>l4tFyIY(RUu$f8Yiz
zJDY|y^Rd65?@6F3n{QIfrS)-|b6}a68DsHGqofr9err(&4_|w;2vZaW2v*IPjaKUJ
zc?E&<L#XD;k$Pb=wlZ>Qx=My~K@33|T)=l{nN9jFwh+S+##`*M)K{Uu&m#VMzwP=U
zS>V$}2~_vPPTQo8R`k2*+Z^8oXA_XrfK9<>eF5=l++6=Hi~(TI_k|N^TNLjJuIpE$
z;*LX(|H9x}H*i|A>fn$Bh{>x815sO0K%B@vJ|Xpc#r6KYTl@P7#ZUQ1)gu8E#g^$_
z_7cWzs~dsLB`q=t3`Ec8LS_);kNI6&fC++iucNF3Vez2iXmkFU3F+Jpo8}H(J@l<x
z-@|Ff*PYHHi+lYu)aYFGK9O3i&)=-;4j*2cCF{B7%!tH?Yc9M1Z2|vJv5OTiX8j8G
z`ZDd_=T3Z3;2p`_|2`!56Prbq=9{=zePIJmp0~d8-0(I&C1Qa47)v{f_X{oed&l5d
zX2Ck9QsZ#qS?ixYfr1aNCf2x#4PB}?#_rZ^dRCR!;s-Q7m1lZQnS`1dSbVUub=mYa
zPxe|iUm6RGo|3&AC*Wcd@qy=h9oqXxlRw;UFxU?0u!nXS46%dRm6-gS?%iIR+uMp{
zpFFP<u7|IZS2PG%B85ihDs}C*H~hv!^tQE9TB0wD$eafqaJJ2M34<qtvlZ%?L6GJw
zl_dvVfv(tGe5Y+|x{gMo!dj=FV<T5Tu~-iQtWJwm4qrI{GcdK7KXZsuBI!v(Q^|LE
zK7dbFNxJbJa74S()hVf-C6R5VF&8>L6yHn?t-PZ{;V;m&|5Ey6qT%EBA-GuV`Y%8t
zYP+c8)oI3|)9fS_T`aPn)RhFOlB-Zy3EJbk05;cb3~3@xn07C4dN<&L>j2c+2}Rns
zb9F_N{y~<&2m5-Q5Ac@>Ame|!>s<~weEsuvSCNHGNWB1cz5VAGY3|M*Z;PAmxlg2|
z^GY>Dx%RK@9oq4!6If^^)x}+@5u#+W6C_2FsX6sTm+*_^p4QwGXy`V@)t}f8RPi$z
zb`Ov!r`~+T=8Ws=uNss|ow%5zzY`K{>-xybJ!B@)!OI=c7zxeid}3XXy_<j~ljui^
z+U)!Xk9BO+b!L<S`;w^DIIwHw0HYxlSUmha5pb!Ne7>*D-JHQFb#QQZ$tU8kmHi@4
zOaU$u#ll9@n0~#$-$PpZ*wy%m<bF_+#8h%f;ddl|wV9Z#FIc0Iec&qHKzJV(Gvwc|
zuNKQT?7Ob#4A-gwphciSJ7Te^64cwvIj(ubqM)N4H2ef0hhu{_nSs-C`-TI#dM{?w
zFrIz2#$^0%+nt?-&AY8l@SwI1fZzDCx1PZF!wX9iue&m&l}R%nj$P&|Ik~yOQUAVA
z9Cq>=0im%DVXCN#A6y^X(>d;N#E-SWtR6Z<dSUw)RsB<AbtD3dbd`<*41Q7pi)T*t
zYPPmYflP7>Z8mm5M)8$1SE_bMyMBFjV3?_@!PUL^%wEM{<2&dOIM)_uOr(`rL-XE}
z`dlyO_W`)`MNjvK)cU(^b`CH{s2MZUg?7Kb)4%dflsGl^TC~ro@YwmI-4`+J(-7-B
zw@#q;`;!o@z(JR4Rg*%+P=+Tf(-~=6nKI;>^P`aA=i8=vj0X&`rh^!l!Me=%WzfOt
z-=1;z*CSe>oOqa8Qw49H29Ii5*ObPh{>{wU$y54Ky`TOUdxv$6aVQXmTDZV{5Lbrk
zcUgXaUq{xgju*e$A(G(El_IkF$tUz8p5M5hU9}@N*P=IccZ%EFgyYBl=o-HJ(;zuN
zoVG6jQ528^9IH8L<t_&XMX)@i!nA}8noeoa3KFTN!8gdy$>=r~1Eqw>hk@Q4X6tQ(
z-eoDLR8gQ|#tEMXu;N3uEVnuL`86^C!02WQfny~e5QQLY$~M4;j0={%z_0O>D=Y#|
zMx>G3?52OnjPNM<ty}yUK43#Es@ajAtK@W(TRbG>|4{X(f3NGkQ6xZ0cghQXfdH~9
zi*O0A8V|x*9v&`wdoY7ob_mr*xC!jiG{LE!4?dFAgsKO|av?PG#vUlOV%7K0i>)YB
z5GY|kvkVu_ZzSenWIyateiV*v4G`BKOzIg*zTq~uMhH9sd7(f(+~Ix_9A}ZxS?^`)
zP$Z`5Ki;?57eQ4=zy09LDB|UabdIgc$Zk#vA}GmBj#}spKTobo@K5~kL|n+Rl@oA1
z^AMJJ$@;=j^h~iMGR+-=ef1U;ac;iXwJ_3J&17+&-ou7$XfiUZJ7xI~K5!tnNSJ6W
z157U4$+T)g!<+-irxMvDd@OXK^QsO#B}#qiE>8gCzEhO&hS(oAaEKP2mcn99&gO5%
zQ8mX2O*$s3cY1d`(n2h7er_Z|`;KF0Ea}OY@!&gSECMCxE(GGRTYYSln3BTb02sHC
z-7F+QHpASTtkZVstEJ<k@krX|t%?PCA^ZZiCc!n+`-pjGs@9HsRE8Mlb6%=AGE}xy
zFT&G|><HXmiV6jqs^+J8N#@80>pqAGD8B;1Ay8avo@EwI+r1x_C>;u%7{OuwzaK=l
zD?@s}=CiytlF`IvZdH{wd-m5I2r^;i*h8UK8LW_=*n4z+G8Yi_^LO$$K$jdW`umdn
znts(1jpj+h4eK+C^Qa+6&6yc`FA()HSH$!Td*G?|2oTh5aP^(+#KDv~o7M2;POY|x
zkaT5SY;ga$C)sxCocRP8{9@3F?C2m_|5$qy4g^n`WW@ajePXxy&zYmsTQ1wgU@Sri
z_}LA!7PO36fl+n?!kq|7XWmEg!2?S&`)l3{Annjl$_BG;xwU&UbP4Pj3)*a{Bj}mi
z9IOO!FR8g=VqS6#Q`)w+T&w$A3NhRlFNFGTWgVUda4j(d0H`BOzBV90mRa7y7o=)z
z58;#X7~<<`q>cu%#Pb_(nLQVkay}FsC4dB_{Rybvy}a-cqE!{j9y8e+lEdW5=B5mG
zDP==Af~BerN@gK=dkv(gqnsvT4!ClFK#kP##`;A?<1~LJenXv_bFG@1Q$Nf|ycqfB
zGx{s2L50W2U>(G=d1rryh@LPjEwFl4XJ83WzKU7-HV7~;(BHSo^!|3YbHD^tLLZPO
zCj8sz32Ui6lC;DC*3SQ!vG!>(_ZPoB#e8k9G`wdPDY>8j*ELo8du8)L{z10ik-xae
zHI9JZdq&3*DZHX;^T&my-`xu-0~?Nb3Y_zeQ>hVj8z^HmUc<3t@5&KFGT->Z>7pMG
zgy!LTt1G{QtNO`uo7X${z7h>X=KVQhSLC*iK>OBbmzFTTKd8|XCqC>l7~wQ*qv1(e
zoy_r<?&+be{kM~6EA5Y>ultiAP`+tni_NYA=rud5)Eq?0>BjnJ*atoa7#-9G>D3o6
zeM%IRn$~^TTD&|Uk_c3!&S^U2CHbA%Z>Vb5t!K9g8af0e5yRKDv>N1J6f&;4aKu37
zc&X%ZLZCddZ`8a5)6|I58+~<N^uPXN)XMj+U<5)?0*K@HWk4wboJ|X-bgUS=JO>#A
zv>UdUjRgqz*bA^Fs{}Zi{fV#4^K{f9Aqg!Kkd)I2Ga7Aa_2`)$iSfruRfujH)B0*V
zk}BGq<z9+sZq1gY@*lGP#-oCQfi-m>a_Kdk(-A-rzMm%z&&Kz)U}3|XpPrqsx;>a?
zzp&wGiIQ=udeTtSru~XnE}LaPC@0wBKWS}KyL3B0QS+v~p9mlpu)FVK_fxoksT2ha
zt?%>PNgO4$OWA@e^HL0*mbFGfs89_(ZWqwc(iZa~jCq2hd=MWf9(N*6vsBc2X~dbt
zD%|lY!-W|HZUF#7xY$}cHIg|!SQ5*?Nt!7it`cTU-q#QUan0}bC*8KIu41rZ?j>+w
zqO>x^Wi~@`^b;_DO!y23{(~l0ww$Mv6Kl?a#pcG}(efKqmHlf5RiQ$+WH&?ZyRe=q
z$38!;7}QJ5n(vWgXQUc}fl{W`Ap-qN8K!qDh-_J7qI<o3tb|F#Pu-X&-11|d9yybu
zf8yJfk65PBz%R?Ej%aW*8Z|6(j~Y`gS##<B&DS%{5;+HyNs<$Gx%|5&5(f6OgA+`s
zqhItv!(VD0c3d3i!i!$m_arCetQ#H-*p&|z1_(Rsy+Q6Z=RQNXO7?&bA6Vt<7471W
zOnYe%@R3!O5RzOlD0tUDgz_4c7Kr*JkQSWrA-YEqJB+5%ln5l;58c!9Sd*ecB{E5;
zGPWjU!GRqX`xEb_%EZTQDWLG<^^gYFK{(e5e(Qh8XNuFAs#9NtF=cWV<cY`{On~ff
z-}R|=&UvEQzC9!ZoXRzT4%ffonF|dtbJ0<!K&uohWfm9Stl4b_-6n>#!L?s?n{RnY
zEsmZUQ-H1SW@o&mIyBkR%<aq-$5M)owdF{E1mDTNPb(FcC!u>bmw(r1pl#yQ!ZR9;
zQ(KIc4W(OtRNS61M@HDc3Y^*<sIC0DZrPo$sbFw&Z8B4Ja6jFr!iDkyKDgE6d}G+#
zAqE8G7871iaimPme#;w9-IV>Cf*jj*ky-#lE6zB4*mA3bC7O~t96#H~_!p4YTh6uR
zfv<;hnUt@OvSG4XZAUtVY5(7Cd}AgsAGML|3Cw;<g3;X)N*4=5fdUUxo?5gm$D}*#
z$#VLeo!@g4ZB_3i<K7XJ8VR^zkXYjdiCvAcq~eUc%yoO&kwk*zv;_kF0i{~AoA06v
zV9-I5cYstJo|os}@h77`SJW>6*pqNfV+gSI<|OpBbnGa`lYp$M=;di8$Gad>B!qyD
zS0x|;&}f@W`n?Zv0STy~VpLS1@0xc!nDNNLGQB)zh@tC?QcK<Kc}vyrya7KA-GbDB
zTi^nkTm<SCvUyfJvbEVYYaWL_D$gohQ38P2@^a*UrByJ1sCjVP0lMdApHMy^I4!F;
zUSSviHNf|Ixl5HLM%99C$F8S~$yr~w9~tW3z?(?m*~AgKyybM}-y8v)h5uiC-vid%
zY7bS97?;Zx>0AyJL~%1^X{L|*Dt@xB(%Oc~`QfV4{oW&WM}#+`R3V=g8MQjU3oypL
zi4$U6QD4x!U)oGTN>KW?WPMW9Y|G?1orB5@?K;I~2z12)S}#n|$>T^Yx9qYoQEm`#
zJgfP6(FnZWYU!<x>#(vm4Xoxu{fEC-XK%<Ze23%o!E%~1Ie2|8L=pF|1|%0|K|)i+
znluxbMq^AapN3dMS2#+1znuc??z*th?_#KW!lo`QH;`YEWe&>~=!@}6v%*uqiZSrE
ziYuIA)mirGZ%nnRdPG^q-IGgCWc+Ph)r51=MPBp*vY0QkfZ{i+ysbCCL7Fq=wyr~i
zA#7+w{I+n(kw8^W$!VMJ0&S<WJ=8ghf7!-KuhCc4Z63!%wXb^XU=X8iuU6&witvCQ
zH=++q*~;+%#4F-+251v7icdPbH<yl{W>0%|sLGleXH@kGmnxs5m?QFc&wIAU2tF4j
z5N@m{$c!o42a-0G{sHx9u?xNpag`LXTRAAupQ_B?w}9SB@*PE~umo}Sb+A;(Re+xN
zC@d(CN&v{DfFQb|IVDvvo*X!Y`4)?CQyq9(l*P+yyFB9`z&>}Sm%$rOqa|6--@L#H
zNahp+cZ-(bRi{li)k!XzuocF%HEXdQNlZkxX37>VIzy2vgDh%}{`fs#6BOQ@?tkEU
zU|Nfh4<VN{6bZBg>|0vlG#hsKyIO3963tvv-g**-%Ii^Xw-vV25yJs2FXPJ)C}ROy
zvlT3fxo>@_YekO}wD#b|K>7fkXi#1mT0qKA-tT7KL@{PS1;joQV@8bQP#0YYR6PHE
zTMKZs#NTXU3P-VI3>C3>bTGtIMWDv%sm-m?nM2hQZ4gv~i)$*EKUvEcQ934xS|9Jn
zN;#L%DiZ~qcAN3|r9m|xL&`?dsb%uhlKmpyI+RYUd+B_B`?^QITsjvAey)GA?GlL_
zPf5XTvC>ASOIIhqqvHGt?gOauDF1E5DeuAnE^tWnOqBU8g1eO6DT~Mc@9D7F>NdxJ
zI=WoxvVO5&w4x!)6_=O?=UVYfFggzxkQbiGIUUGfm)=oubkhb)?Js#R#mJmx6XFG6
z{hMis{c(43K}I4wt&DN6S%ODo=c$6FzltG^7XdF83=y47F&djADJ=z6(AO`7Z2z8p
zUp*vQjV|GAq>H(Tt?r2S=CMCb5AH(FC1?MpPa+$kE<$*s4gZs_wz}W^N(Gdr<~#KF
ztD<!_2oztOGzVoHXoSP7e!@++CT%v|VdU^g5P5$TbP`>`NdIgko(=h5aJgmssfCl%
zB~ac++;7Ec@7h67>&+&dzqc0YCjaTt_C7w+@dzR%3Ug3d?3=`oYN;WkAzsc+VAL~m
zU-B}gVZg?Z3xR5L+*{2C!&+GM*!?!=V7ONUQ1Tt3g;=2a9a!Ly@O&bwv1^V0XNlm;
z$2bGDf?zlOlvkzu!N5;g8uQd&e?ttkR1w!s0-oZ9TOP=e`|NBnK<w7a)63D!F*)%q
zGw6)_2oQUhVu}hzVo}OAyXWIMODtY^A+w<0?1Y41b}u5a&#v<yj}ta-9@0CC(O)z_
zPkA4+dvGbHr30bbxf$As;*PMwazyFeAVRQb=P4<-na!*OoB#q^Q%Y(l9qSt}wE2|X
zu%??hvw@uCmeZu@jE6PiAKe#PJCV=WFN_p{GGjw;q$yIM>C0g&l^ezALynVKoskCh
zGzk{0W<{O!8|r0z;&DJMt@~BVKs{o^b~UGouSS!T6Bd59KOkTHWv(*bV3ffVu7u-k
z4J17vr6bBbyE78E3sYe?1EwM`8f>=vn@nv?GWRGAuZb3%)Ht(&)fmh}EYSQ~+Xf$X
zo7+k~4}DPMRl{D72j*!gOut+Q=I;-^ke(Oa6B{6u0tL(fM<5)}q8-T3%e&rP%O|^^
zfRjzAm}q&$Yf@C#n8{FiQeAMW?wh(Noj_c40TqZ*%GLAi;8`O?#iMv!qiR36LqpiY
zu??O38e>T{)m^vOPDM++0Y^RD)Tsyk7}}RbbB9gi6pW-s1?&p9_`3V%n%S#eCKDUD
z1WD92tDEkw2ITG|ek5%>D5Dw^lwYtcc3<f3eh>Wl3}2M{cD^9PKjJDE*sds}{sG`o
zKUr{7h57=Y7;n3tD!Fk}2bSWAz>;?+E+l!D>G~|L50R1b34bR4DwGi$$Qa+^T#k*c
z70}zKZi2rrBuyO5;h@vV^<ub(NfYZPfKWw+8LuYxodrljdT(mDoADewc-dp5T)U6&
zWavauoN@e2hBNdTW2>ge!|0S7<35qhx-bjbr`z8Z6t6c~wXIlk9=F$$&Ec20?tYJH
zvpQ6es+S%iY9b9vU-@FN$A96J$#NSgtr}>Hn>T*$Lk4lTC;CDg4EEQK2Y1PL%^w8X
zk>ui$m*$N1j2oA{11Aqnb_N=4g^eh%ww*5I{{urufcDF6&K6X`jN)2c3CCbZ;4+ug
zD$E$6xYN~HLz#)h)igMLhlzai8OnQ$$hLfQw+-ValR`QX*k>ffAd0k$78ddP6}E73
z+@Bg;7u9@uw;A<zo~WNjmbkTmRA?)eH*SV1(8<Ojhuv%w+|2WX;z*9aTtIoDyJNDk
zU3usm|E(+JCa;6!0L#9zGzHkO20N9JLC+cjDe2r8kL9E*9WCAvyy(EP;hAs(u}7lY
zO|39&aL2_(YY35`<^bVy?lhO48M)Wp+DVLi2J*4q=eCtp<sFyCKT4KHTg8~R5={h4
zjgeklmI-FI^kJg=7LMru<(q_ynT?dCNP*w~*BXtwtzakQ{=(D7y26W)sB4Td7)j(i
zWZFQrW2d$dUqR%hqlSkd^^6v!BXS^5G$|oCIW-gvGz)f7Bsv_3#Z+z-`+?B^4Z5X`
zwju+S5>U~{X(cAf=HSZow7Au<Ph~_em)mqGTx?GaH1p$svQrILsa0uWhGm7o954A)
z#f^&WV7Ash1WF(N(^AL%j%arNAgRcuw=pd4P*8Ze#)@*-_>C=R)ZSdVd&g3D`EcgM
z|3CWz^wz22$c}!J^xu8~kSL~mhoKt401W(hhHjyXGlBhKE%%YTso=}S=&4ffGg(!u
zTFX#Le@Rhw2bq9$CNnllf-b8U{8X-O`Vlirdd57TDP2mxZv0RtLgEamqpY~y!A5`Q
zecq*V*}Gt1>jD1cDUskwM^R1`v`X|Eq3~u{x|r3#mD#__!|br9#Afk?2+*ufGOcYv
zYO|Jm8$jJ=4Yb3hCQL(2e}Rgh$A1y6vWcnH&<)znMA7i4%Gxp1!^~7?P919creP66
zPH4r|)#TI$bYN6A6YI-{ZRLoYFiflp5y_8{X1@uh(Rc^_w^!NOv8AOk9ZFm5OJa=A
z%O79KpqtF{xcfI)n_Jni*-6=IW|N2PZ&p25ShxHB#GitjuxhiihnGYvEO{u=`eE`A
z`sNUTQ;`-pY}n>1kn>(s+WZ14LciO%x@ODm%7pjupNpPhEj>&D<d@znffISg@3l=J
zjKIp^mzcnP48SZx4JdM%>>d%yEy>Bc%chfjX6wznH@wmx?r@V>8k$Za{__nwWha@s
z#T_hJW`&wh9yDMo<-3g6QJaiTsxN^=XGYQ?-G(+i-9)Y)IPm;XIaSE)i$`g=&wKk?
z@j|_>tYwjXTB0KpfP#i5HbuINMlR~ZV$9uJb~s;FanP1rIjOCkckZmLQufK~(NW&=
zwLMx)KBB0o{Iy%zw(A~#;T~>d4L7VGj18B*GRb0?jFjoJ)H=jvjep_f9NIFy;+1Fj
zk}yrM_*_eRf8qCarblUVUYOY?_IF){O5CLRp^48d!%}<DHl_BI=d4OJ%mJQnfJK-U
zXh^5kSaoE=5wQ)I*U!n}l)*%*p_)P|djY4M#)Jw8Nc4vV=W291u;fSA0OO~5us>74
zusAt&wr;5@60W6nEA09G<w|GW>E7P-i(b0od{PNBC4M;WF(ougyana2^G!YI-7N8Y
z_8lcdN>|8M;&~%A-zjG&4lF3lkvbJq+!53|CtaSwDGxjCm|MB0aRnS5O}v%t=9)jr
z%WT7U22}Hju{7EtbdsTs?m_xch?<xU-1=bX6Jt>)G6XZOuyO2zdE%c;F;E1;9RSVG
zxghSk8D@OwT*z9hGsn#?JFk59^FS?ds_YGt2~Qnzh5Tw3H^ee`S9HfrnmWZ<*x#2t
znWc*>bkNSIZ3REk`{I(~0e~^+A!gUCrP*3rOvKLgP!rGaMhV364$Gm`(&^^4CZu-X
zK?guyhmfNgzucLlu}naR@D5)2$z*k(01rd$q*3&dzpU5Au0x~M6|~p_)N<FW4~M3N
zS_}>q%asnDtQ!49%iU2)!Dk^?Pckzxd@S)q^D3s)o6H&}Q7Y90Yt7$F-@V&mnY6Bx
zC+W-ky7o0Bt0z2A7_(_vXl^vbfZKjLjT*Y?tMUzqwQL=fy^2^5UV;2_Dm^ikw28i=
zBc2&t*8p%&6spnbCVDh@8uWL#oEaZNq1WM*D#wd{4~s5uja9APxYgOFl4rm<EYiiI
z1)UJxNr$EAeSUsyoQ3-(%LVzZ>mHuR)vJuhU1u-4Z`KjEkbgg?Fa_7>K(RZ*<~af(
zUwS{M`(K=JWEGV(u2^B%lg8PinzxIZA9MCQfsImn@rR+`Ejm0`8nV|gA4o(*9_&%(
zuk+|t`f(C?$GH|9hv2q^)-3oot+Rhv;h0J3oT-<sb)^~r*NVluKZr{^{OEvYb-1%^
zHIQpC8fWR);eZ5}*>V)GS{_O!Qwd#J-sujuxY@Jqp$Jx++fT0DMZ000I};aI$u?#2
z<Bu7c;E(Zq1j5t6TGI(Bo#mD6ghHI22?!^`#C6WC_}<r`xQk==?45tFdxu3T-a(`V
zq(Y$22CyOjKQ2J~?bypB5j-vF2y<IkFC0B}qO@8Tv*SBnGdncVE^JR$L}>szP=a@{
zSx_@4JKj=sGiPF9+m))McOtRpkI1IfmhxJ0p7))rGHjEc+wPN{%<Q>P!rh&B09t{=
z>=~2Dd?1e%3RE#<mlFh)+w@n6wxM$j$fU7-sDC&EMN*PVN6ATtfP|N<%m55;&r9&w
zj#p`IcB9l--dp&-oR3IjWT<<o-g!7X@Nr&Fy318$PzpShpwq&lW|IVc_09yDaWYph
zoBrz!mW^jvFp-fE{PuS<W3Yy=zhkF5xC5SxT{w~crl$%jaFjuC<GN&l3^lC<Z`2Xi
z4W=<<&{?scyYw>s769`>YCEgN_Sv_;`kj;>niY1yP$RV4ve`z=j)PNj;2A`lBVY`?
z<yNP5KIc@bT~e|#0<w-5=4gA0THHAa_{^_f7^lOy-`@pq&cE94&!BGa9F1?@p#yb(
zbSG~g7WkB2e9Rf#)ITHn{;io}Pt*<Os4D_&24BUtL;Sufo&1YmYX5kDU+*-AlJ72;
zs{cA6%#+9C5wUYJ$B&M^^ey}_j4<<va~98r#96)TnkW2tcF|e*hrl_;JpRQ6lSX+%
z!+2Nfbbr;|5a&<ov6^<BHi+L0r4#Hq#rBU=zZ!}n{yjH2+ijeww#n^~B*}A^9r!R?
z{Tx@RdWF{3-%<mzX>p6VQ@n*<^W&z}=aw!sr(;&@U~5T#QOe~MIUp^$IMXG-sd7mZ
zZblx)D^dKMle^*I0y1Y)dKy_NVF)lR$tVrrX&-i{{L;)9sDF(7s8^2UF2XIc2OOVF
zSNAFNyt(ysAL^azC7yd!gO|!x#j;uH-%WMil%lce-JL(*Q;>5eT_pjNXYK_hHwGOT
z8lW#zKDHDQ3~3sIQ={=lJFD5{F(IRsSJWT3(A|;?Yy;bSX;{|T)duSR&%k%~%e%Y~
zott5c&tix~_C4aUiuNU3d_xB2NRSXGF<Ev<STLjS#uNTs-X2HBv_nmCJt!t6-`bM$
z;|?dLb#gd;lJh$4imwJ3x@bd|x{YiW7fq}Krevj}WteCs<AKiz+*Y650w0HMGJdd+
zG~=Hl(aj;`@dU{0nCkgRfb>jp!4)EQ+Eq`m<$W!y>qMvy%1WV-k2u<*<lDO@(VsoC
zg8s-|%C$&2v&p@r;Vy&PUhtT@l!`R-#nmQNP~lS`@HURMdUz|@jihx^^4y7Pc1hFH
zFN5dBjST?49WFkdybYWvmk>{bs8`h%H%+S}kbA~I+WNAK2@L?6AzcUmt?;El{K%t5
z%i?6ZE$A!r*7bpcyuGQ1gmVD1q|4{01Xu$wme>@uJBU;P!PwD_ahsssl{e++V;m$7
zv(xT9uM;23a|1na3;-@eRO;e=$`8`ZG*<lh%A&kt>*{qdZj<trcw*qfgbFm`_@Vl{
zf6kSAYq*ul#<DW5ho+f1t=v-Ij#p_Fx6}+wY#48O3xOcBQf1o7E(QmEIi3xcklN?~
zw4Q&2COAbfRk_50gfw|sYcwH3(^ZWtcbB?9-p==(b{VdqdH8J*M4B<kC(FMML*cYi
zRMuY<{=$DbUW6DBUoReHR8URB0ES`F2N3I)0oiSD)C===?Ae&um^|*0&EV9Ql8Q3f
zSmkwI8$GrYattC=|Mdq7=Vx#d!l!`anx?f{BL6<d@)(OT{3eOO&Uwv?z8#N{KW>S;
zF-f8Gx6~ubTunN?bAHM&gxsV(%F_eQq!s>G#mu7`5TtT!J#>G?blm>>B0{XH=tUDF
zDvg8@C}4837Fx!?LYl!XVbsoXI~=T_gi<bZ^H^*9%(q^39HqvsOAufHV=;|8cKM;q
zAj9*xSIo|1@q>WsrKy+f+o3~JqRa<@Nq-x4Hj6<j=WsAIqqa`}@thJ;R2K~Ft?@-@
zw}nMs%xf6PPcCfj%`JX>*Y>h8>pCCPd$9KZ!`@d##nm<2LZ`6+!QDN$ThIW(CBX^q
z5Zqk@0fM_j65QPycemi~?(T3m-!JDp-uHXQU}WT>yY}i@C9`JL+Kc1(V>0`Rce{uM
zH99;SfI3J=nR-nEpeU?y+mkA#%MTX%V-s;tc#XTB=EmCmo*w7yaNN`TuIEh=={C;;
z;_ZzVXL7!sqVDOaABpU!Qz5v90bA2c9k6u91pTQ0o^dzI>Pm}acS&Racy7QWx4*)t
z&~y^HOhFm{AyKzY$Oqr2c)^0(bZc<rF{HBSg$=KK)2@G5EtG!H!MH~+C7Dyk8#WW_
zWefh9&>;X8VHHKp?PEf*%2~z&?(rO{MwJQ{)|MvO!Lm-(pk;S4BMVqOoC#$}JqAyb
zVZkDEfPkF9VSCW8Sn96|hZr@32E)@^M56{aY9#s*`CPXID(DroU$)mvaTT3=)V@NP
zD22m{f;>w-R*(q+yf5<-eUOVC0;7YGtS|Ows?_Lh$Fac!9hB(i9#-m!b(^DcjJ!4W
z4OOWsAIG6&I8g9Dzjt{z7z^#DAtnjJlK=q0MsC_0u8&YYEah@HG&>C%KM^vlr|fzQ
z`xU>i`lja8!_4Mg$)_kEO=P4V)lOW`2c3)eU8f~iDw#NOJ6{Zc{b*NbR!Vz7gGhH?
z4?lLOT+T)Ey54c4ilSw5-%sB!J+7zIc&=a*cE&leDz}f8v{-h*?>kHFCSo-)O6z8<
zRrNPV-lfx&Wu(&KEWj-f*j`A6Zc}xJ!mu7EOOnPSjPcUOnr0eui$Hmg-b4;18XQg!
zZ%Tnq-MmkudD&cg%oMI8)1`_03Y72~H6})^tm2MBU&NLnH(qxt&#@7I+{VV}lB+yJ
z_&L0xK2^c?j)qYShdPKW2*KI>^o_V_dbUhWgN&$r&#g}>g)1e17wzF0&F#O>Ytg6+
zKUXl-KHkf9^C9{6`}wwFtU}P&Ovk<#r12rH6fXrqNXi%6)EiE+4HdfHy0~zps)HS`
zFlMS=7|JdKNWh08!^M9Oz_5v!2+M*jSCqDAyI=MWcX#d3{@g`-U~9bO+9|~no4LlU
zfz9kjPMM*CY%-v<xPM?77ezUh*f$S*J{7aN0U07*g{so}h?rN&hw<*M#&>3TB!=x=
z8hH$-`?hD|#NKccT7}{aX4lvrF%acgEEMC2fZJ&pI1F{Hl)w=7RBZ_UGY;KHp2}Cx
zN8AZdeB@_xwTG_QDJ(8Dfrp$*`PDQu=kPkZt)-ovA3h<zPZ1S!6$S|)jIA%m!i`F{
z;X)HRWdTvhQl$~uz7x~XEL^qP%sgC=J<Mfgi8is63$G+K-Yp1<IAV8Y(L-=QlBAAl
zduO{ZwupM5n0CV5bR7$w)DO77wo~LiAov(n_?Fr-NpUW+OkjyD&9kIjQ0S?|I1Cca
zFFiWVqJQ}Lo;zrG1VGr?d;7+Cg7w1Bo8z>Y!}^}C(ja*N?xp9S;v?Wv`ywkH<3%%1
z$n2r(;g*c=qdIMzN#MXcCm6oP_A_V4{lVlZK1y^ia-5<=?sIv{K}*sDQ?=&lDh~+R
zEnE`|oRMs!0yzY24&+1go}ckr&m5tzXBSPij4p7I#Fs3*8?4Dq$UZQF*`xB6Q>gED
z9_4+{*cY>+A)xvLS*7v4$(Og6*`4g@!Lq*CJAIa}L0~O1-@xy>ilqeAf{p$&>N&wp
zX8@66bXm@3=v4XV5*3#}-^U&Q+N|id4(<gOxk!<<gf>OsgUn%q+A9KdGMO!F!|=Vr
z<9pX}-nl6U3ch>joB6i)a5%gd(x7I%^~PX?U^DEt4)$exb%mH3P-+(toN0Q(b0pi~
zp}%H^>p3=~-vZj4JW^zb#QZ+c%Cl<mL!*F8Et@pu{6^1i$o^otKC3;7wrZHSlN&%Z
zAO@PhaT^Te;&sWQ2kuDl9{N7`F!()kC^|P+mDl~WDUHWYN<OoB2-P_wlQ-H9gWhXQ
z#sv~=CMYwRFsJ@&2r#%Pejp}q;#I<;vxq<?kcutOx)q2zYa1+@Qhe6V2OkqCnY^p>
z{wxq!jTnWjnq06@v{P`?KLNM%)|t(i92%Vkh(dFOGSgxparjjfB4@(KP0z+O$g+91
zyy`kJG!0p^&5Ze6-3eyhg)@qWET=-MI-~k#vUvf7M>Bzro}EkI;8ltYAca9;TKrW?
z&3@rNKW|CEMeokSrh97iHw~$`@WPU!#cOb6M}o_q!e19YD2}nEmSYIRLl#cbj9M{o
zH`e*1yRqEZWD!gO#X`HlpVkc$l@`YXZq!E7Cee$j{`=r`KfYi06{dKb5#wOyy3Ut3
zUpL<jk0hmo80(O*4Bow{e&)!#9qid=i-3B;G+m5yX!EA@?fyPnOHihl3|vS}VDp(j
z&v`_ZAvK1TAY*j}{J7AD>QL|$QBgBQ)z;dP%y&cBFbZ#W6W$ny2&dD}b{7QIbrGw;
zu&%mHo5aQd+qFLYi>Aw{rxnI_&!8*p*C<!-4i)Iq?)tA~IWr~d8|b5!_FV$jw)4-}
z0aYIB1>NV}&kmJ$v7%Z$yB&+d9B^K&q>F6Z2|EMXAGPjwI@E$yVuUjRr+N_ULZVy+
zY_5g1f>Oe{Xw0dR$>A)S+qA%?3{?qN2z89a^Rz+B;@e(&h}t@J6apGYc}PD8mO;cg
z+xoM6ITQjeh47js5*0nU-y}vG$>&=x`OnjNR#!*%*?wyWib9AXHTf_c6gHGSqdIT-
zK9sij{CHCQ6@ivvA5qCRK}GR-Vxavt4oJ-kG!aYG(0tI3AGHA@gYymF7Q3<E_VIV;
z56gl-EZVd8IyVD%xb1K{z`1V|wtxLJIlSk#k<747Jp{MlGKheffc)bOpe`yuM^xXJ
zi}H0(`e$|+#}6i?6<lZ!cNH|)iu}F7D6A<vBX|VrJ|^(n$pcj1e{UH)im(tMWWS=}
zxB6AI=#p5_jK9_(XXXOjpQW&FjONGJ-G&z$w^qj;`jxTzDuajlfmoHyWhka6I&AkQ
zR@mE#H{Ta{_`BHvdmc(X8mN^%);DjBR^s2)_?D;hq0X<lp5m_GwM%UG1)Utlm+(cp
zmj~_bp^LE_=u;HyqGEtAE)~TG;MW)JW!nK|5;pN}1xx<Ifet9`K!I^&=E!KW1roFU
z(LD0kjH$ygVpZxvH953^3&gK~^xn;f+4Cn00~sB|B-xPCFBN<Ua(|h6*Rc5a6BjWR
z)ZoPdv(2E{8R2C8(5XH#`>zN6=`39#=TNXDDG2(Q5EmMWIaW9dL0H&F%4#Asz4ufi
ztv?ko_M!ne_bLabn^#?1nfR}8K8`H0lV=hn)jvMzUs0}N1H49EU!@NEDKs<#c^8S@
zBWsyzuk12~?Tr^Zj9^Q09E(o9DVR#X#1DHYEDkMBV~d9^9!i^&&1sw08oeBCH=-k&
z5!p(6QL?+I8!_ngNSoiszj&#9260JBiUd`l^F(lX7T=@Z9e1mTiuP`x91UF(4~RK&
zzfD9bSG_>)6!9=v23A{M^%dWvdH7kZY&F^p+GRtkS#8?YQ9dgaBdVWr*9z73$cGWj
zMEl;nhFYJ9Sy6OJUg+j{?%E4+$_NT%qQ!8Qt*WuAk7Dz{i)~}l3R<Tx#M?$zCypd6
z*l`|V4nzCaqJ+T!Fh^@UWkZM(=7uH;$!ckk5mc-ARZ;L($sHgylo?IAz?wYFn#Uw7
zCPxvhD&2b9wX=J7>3T(!QQv^?7Eq5|OSa{KNs$s%wh$J1PzS7ERIKx3_6yQDywrbr
zrSFX39)-%uOCweq8AwH7DZaBYO_KBlvjqWGxjn=wAq~Vg;rBzhX@*Z}Ez7iUWlnuk
zS99W`)o3l_bnPSl@Dt9Lm)&M_o^CdJU;NsLx{E@ljmt)m9zKBzsD#(jzAjsagF(PG
z@*8G*sDmgesfG1I&F~Y~RXg0$7ODQkzZ`ecl>~V4j~c^}Wz4j%H!&-eD5DgCrPFo~
z_ehc^fl1$2GjoW{6MaT}ix=IQ-q6$0&`{OWm&+KmXikb;BM*e<4Qkcg-^M%CPDWt}
z-?B)>QuKicO$sgN+o2J=N`nyPiv~i)5AN|JY5o;eN+>?|gHEHmT0DZ7mn0aTfHRBU
z`09yW#874iT8p)ZuE;+8Tzgxj)o~cKKm~n*0f5VA!ipfuRZWolM>1GnGs0dKEwi6y
z>=Plo7&grf+NGQ|Nd+v(HLrY6{Ee)P=-`svx7Mv_x??uy8^hO?__Y9O^X60Vjg&7C
z-hEld*8|8uxMfdK$ZYlJCG;(n9i46&d<@?1%}{@@ews8OR*@z>Zy5^qe+A604g+Y>
zzB>rBpE$@J2rUldV&xWdSe8S+?|N=p)STZBE^Go3;tX3s^I196<dy-2`455kJarm!
ztgtWeSU58_UQF~;3TUEa(d-49dq<@_0Dh*w-AEpUBT1$>X{u0iJ`uuFsHolyRA66Z
zU##Vzz!!J2t3!mvxy>;yoO_P|jsc8;G%x-;IkXSDuY#kSzDt-{K>hdNAG()!+wu*|
z+oP;Gg){1U+%36^(>~P-Oxjk`Dpg*!v6+b!7(MT5?zc@gk<{bq-&Vmq4|M*l#QRV>
znye2CC`RlRPl$vY116_M<BRIzocK;`*EdSgB}-|fh{U@7ug6<b*&^|{fg)z>GqEn@
ze^ECB*1o4IrMroY@%wNh$<ZwCJ|wPzV97QB8JJ$kYES?>jtH*i{F?0%Y53OMw9nSh
z7065(woCOcSF(nGl11j+THNA+KvV-HBBjC9l4u~CkfX{;>YC?RzyXLD3GY2XW^sz~
zw|q-rPEMQ5rJld~Q|*hrb=$|z4M<M~zPFS;2m$s}QJhpu(hyEmJ%^e0#q3s6_uJk8
z&rJmHg~VVW;*4U88R{QAtKJknIby9S<Gc4Ls@?;aUn1fOFMwxervQpA45<<TG!cg?
zObPS2J3DU1fzi#uF!~1eT4oM=DL4g9jw2}4i2syj|C6*g)hs;lcxb>qU`Z?A0ygf0
zYKuM*ylnSHBl{r!(><h8v2&QhY}(v%T;Q8yL({{tyc&hIay&{L)gZOHE-Ygd+?J_K
zg}X1U8uk{Ov0Tp|g;(@}eo7Chu62a)K@LQz7XTHY<-)L+F=hVIOD@D5eqW|U`|t8x
z&L4NjtD)Uk_Ct=_%esXAo+~)xg9wVh^SK$g2C{EL-)5^nk;ca(NBR5n_cQm!@Be<2
z!^5p<B3~BQey5QjOq?&q24f$lmmCgkl?$LjFK**Q79g_G0WbM-3g{`{AVX)EvLe#{
zJG)FJw-47pxQ;))15`yZM47~~(+SQ+t+ylrw#Hj+V{Yae6|%&vZ6BBihGU-hX_)gC
z?88D`XZ8N^Hd!E-rJ1vjoW%g!O7<&oqkO$@%DjlvzWRN7c;L?uw2`QTjAEG^7&A=t
zWY%o)bT|lGjN=r@bbB-xtsJzD$cX34r*G-UzY=zIym{5m?kC@l1{`!9;J>8M?BMFD
zuQMjP`T1pMv0%V(CEJ9+9fzpTdpsV}AWGr?`759V*71)czkcdpe{FTIKb+6$kOv_O
z>OX(NbU=>Ci{V=MCv3ew@pVfWfh}2)C*qU;ABmJv(yu{1d${(0B>I9qU$JPNQg1};
ze<cF8_3E^A=2eXU7Wa?P6@aq3KANo9_`g_Y{Z_3_-bf~q(8V;Oz5WEXVS47-j*nq+
zsX9Fy>yO1+YT9~F@$&~9sl83s3N9i}-S#k;7`2Dbll*&4b8c5n9L=`^S=zfVB`@aH
zEvEwz{C6^HowNt`GdW&=r`!ejWxkF&d?Su4hBl<QUK<O1I$xhoBwtqO4S$AtV>B=p
zqB@Z^ATfALw$!_~D^ob{={C$d=kfUM@9<NQ1htfgl(*tl(`g+^f1r|DOd*9?;LF5|
zJE@uIx~mqT_Cs<l)umJ@u?rj_&6Njne>IawH-DJe_rL{7y!+atk5U?{%%vwnabh;s
zSXN7o60jcXJaxqn4O%`|&6-5==kHC8k}nl1-+7#kOpr+J%$59g+RAs-E=!CkqB*J)
za7z<#v{qj8EPl>^bj{=4w*Hu;uE-SMZOqo<nLQZ+kyDk`IZ{<JFSvQIIf_P{(TuTf
zgfvNJc-wj^UQL$VX~~ncL@5(}q?cVItj2S4;l~^~J~uw1T$TKlrL^f@C29A2<0ycs
z_DG1TeBUHdnNoTANMu-(3*YLG-g-dx!NKg!$zM13s_M+;Xh-d0QajK?*)*c}bmsC3
z<E78lOr_(jn8t@%)r_x~!(H^(!u^mXobX^1B;(HGDk(%$6jFU0$+nqSOBC=rY|2DP
zYh+-n9XN#7n>xB#bs19qmS0=zC4n?GsV&#45=pJBt2Rircl*d|E_52<ShFIT{|?(e
z6H1K3``P&d@qBmF_<ShFG^g07;Q^VzdM*A+0=U21rC5ha$4YKqyHh<#9Sy;V6q@PX
zP@B|FRP)DG#jHE;YVTvYLG!@}q3BYAqaN5aZ5@UNrG##6-G|!N=mcFsS}PBo9J7d2
z9rw2!)iqyLcU4|)10Ju+UKE+Qrt|VB5Nf}=?0WiBrH3}R9M@kU>rg?XOCr4DG(8gv
z%CS^o3(dsEzL7mIiV%9n1wn;B)gGTTU3|V(pLfi$uNR^`4>FeV%eE8ZaM~D8gYMTz
zgs4tVym~d$L&s9T_Y#CPnD0`rmbSlEoPvPU?HP_HNC}rp31wmGO<Ao}jhLlV(^)LH
zOo(y*{;+O0QC>Vm^2ohs3BjZree;6Ov*fAWQsVjR<#Ou8-5b@$ilennxzWPa`}B|$
z0SuHK4#G4Eo#$|VKN!9jy|H<B*W<zL4zPyj=~vqw_1*iq(7M~xx<ZB;$pSC|>8v8F
zId4b?4PPF!^R&5_SM+sw-ViTwhIt06BOp+U0VsjYl0gh)0$(i&oYzm0Wqd$MDrVY)
zq;R*SI}%;LJ*SDN%ltqr%2C(+K(v>CgN@iba&FK8uGQwJQg}LC$iN)6o#3&bTYQs6
zgucXi$l)sczu=oYTJVf+of?cj&W8xyxQLC$j?&zdYPSWhtZ<xZrtq)2k{l1D?;Ixm
zPEBONs6FqzM`sVaW%`l1kp%=H7G~&=lt6{{%T`{%p;i(k0N=GywveS47duU4k+mqd
z#0c*S@1{gsq+5zvd@J6y|2~D{55gt#t-#AO{mNC`mfMcL!-_{<e^%oU3@jZA=vO@5
zIGV&5C9DjBFERn_>(Yec3moc}$>5Kfti|vb)%LUkG{i3)5@NAv+<I=;KNnyP7(3|>
zP5w0gNOjcUMi8S0YMo@{lPKW<_$22Uvn6j9W3R~R&x1NuOcdjb9P<ayB)XQdMBz9m
zR1#3@cQ5V_%rQk>!h6c<-}g%O_5P^CAW*(mKG0@B`In1;$)sftk#5hW*pr=oUdZ;1
z+|KnYsal9kUi!<IemLvA4qg_l6n*wSTA|YM^NT5{gv}HAMbOqxCrSDl@WD5n>4agr
z2%A%al&C+bWB24F){it7@@%>_DZf2STH|XY2jy7xI(!*CH!(rGO(e3(wyMyhTjaR3
z%$5FiaD6KUq9her8?u2$a19Wq)K4<Ik+u1NZX=X(Fx~qq;<2EL@TAV`QK-SImLhqm
zIanLE_2K?S644i>E=@|K(Q!r+?;DpG=(QI?hRILrcZnXn-I^*nTD~Xw!3B+@`N^fd
zve!f3Hx$;8^QNN%@h6l4|BK^>P+nR$B)wn>Jr#J<O{M`?(!X1yZEGGuP#wj1F2KAi
z8RHwvA`g}Fr@DeTp+@ma89H}m(@*2SLd8+2`S+w&ZMa*b&QYD5f<|u+0|reUXKe9U
zifH^;fTidQ1N7S-rQV{p;lXw_Ql8rce%2B8h~{KbOT=hqR;V~LT@z6ACWez(-Tr=x
zDe7e|<E5Ti_@HG)(PR&?2*6n5MW}zxjM^>5>ELy8)c#2BQfbTNh3iaou{ud7wj*7l
z<6LM9hX+3wMFED|9Z?YTv34R`kwf7U$ED+#w&-i^P_NK-$mV_XFo-QlItFI&yUohP
zCo&v_WXiO{wHj8Bn8ml3;XhC1#>A$Ie<&HA`}CcvD1UMCG^U8A)o1dY>QfF|Uh#)-
z8Olri9#22@f?Abntel=F8e0$CCmI!W?3<DEV7(j8g883TUGp@XOOc88j$P_<3RIXf
zn&*p_#F*od8KeFGJc@-@5hCEy0E`DAEj^`Q7eZSkj>x#t<Hq!;%lox7*UWXsxo@*&
z5WVwDVr|Lz8eaOk%uQw4xWk_XF`TY3M)3us{_O(y>sPT0;!^{NE04)}5Vwj79p|)J
zI**a##P3%@#-05FAiDz-|EarVs!?eTpBpj5rQ=ZhHTTfe&S3SM=)sw-)ic-vJYG@*
zVy_J$pBMW3vG`FG-6aa4$K%1C{pW#`(l|ci6oX<xRWT6Ie6^KNARUbL)d!vKaMGs9
zN!0CH)XNb}U&T|EKETMh7e0?roYe0xAhoCQ>DO$^K0!_b?6veJ;_%j;x;Lk$Q6Nxc
z$U;WJnss1~eJ{fui}T4pHW03TwP_Nkl4Er`Z;K77yXNRv!~Ow&0MJOG{)%)!KUI$d
zFOs4jkEU7@1Y1WJmOGy29rJjz&M^fI?1VAIrbP=dju64EAT4owMJcgqovS^^Z;sq~
z{WO`drI>^xL+1Xe``EKG*oA(>&>{4E84oYO;?QvXd4C3d*5mm(BKBH|7{FQeK+v6|
zDlbj|qf<e-gg1qlJShSmJI?)|S%9V+gFj0lMTCz9a=Cw~2HllsG+MPkltL&_NmDM4
z@A>+fGMBRCGO~ZP-I8H!<taS3ah@T)>?=f0HLxW)Ld<ue&0UbaMic<}<^mK+jh*o|
zF~W-Aw>u*Wg`Tc!2~Thx%niW6igDJsyGMgLAzNd3-ElGYu=j=j^b^VeP4TGp&z+t9
z3>TQG+@-x5u7i*5+TTL}u&NFC)<0k)3f0fHJ?S%5t%z;rE>Fv#b9cIEY|C7PHGW<l
z{z?7TMAK>HZi;@Ir2&=*|I1C|i#qIPa%8q|V6Zi1S-q5N4v963Njl{!79db38R7an
z*b=-tgSckKR&%UwQ3=s;s%d(O)tQND0EQRjBiC&wb}iXRj_GaYI~_-NKYn<k35Gqt
zEPe=g0f&NDRxG%lx&LUi$#o!x9OLUG!CaF8eVerJf}Mn!HV6R^H`IKn6V{k(Pv)hX
zB;K?I7agB&F5j>0`KQ7fu<gh`91$O#b`gNRUG7}8&Rle=Rt|3_EsoK24kEc*<PKpF
zl3v-u2r3J5=q#{oqeYTqmMwmMf4R%tR(A{^m8Je_V6*%%wG*RFI|-#qM&J_FDV)r(
z(B$EVx)rlLp*e>UGp~MCahGr-gi3w{F~AWC(@{=hgy-SgwBLaK8nGS8|9<#k>jLj#
zN{2O)F`h+d>2B~L1r{4@g3{P5!N-4w4c+Cz)qJt`Ty)h_yV~MfqvD*|xK3;4ICDDb
zSk4X&pjN^*Dd5PFD5;h(HDBfQQYN)#prPBu+VOpfd$j~#dfu@3f$$bD=gpPbcba8N
z+mlVca2Ly`c9ihRyL-rn_&%#P!V;$}hOrl=Z75XW1tT^;IrJzRJWf=<bGh5Yh*6O(
zgALnaIuNR68b%2h<Szfzq+e&3QGHF@Qj~;hd`gZe|KeiV7>#B*>oqMYkW2r&@rU`J
z;z+PHz%sUU`%6%+@`6q;IQL4&HGGzCnf&1$OzrE0Y3Mz6;tj=jsPj@%({N$-?TuxA
zD6r0Vtc6O%mZDhWJN53zmiC6f-7Q^%RhgCoEdl#_Cr1h$iD8Q#)9={e)ceEe=MCLL
z<=WLh)>3b4T((YhWx{RSPu!BPM$ojX4I1oaW}R=a!cm~oUJ241q0|m)_TR!NI?R(I
znb4*%4=DBp69T}1&tUBn`sq7>#ZQ(!NvX4z&vh-Uq&^tjPo#INLAf%^Dq6FRbikmQ
z>wKRkXDh2hBx7;RvOM`4vH#PIdfh}?YERTU=q|jmhk%Fzz*p-=#+0Z)SDR3z{SEpl
zH8i{wytGn*)eG%j(>Ie)y~*$QvN`^|N**dxQ+lP|$`NMxi(eaYOcZq6to?JdEC`E)
zQYMoQ-lg6ViO}RClQ7gw41huN$^a0djVTTSCa!+j9`}DYrCgN)$@{Y<N7NQSxUqGM
z8p};((K8fl7ovz}rkf61SEUOW+2>mm-v+hKR*sAFX}U0+!L$E6--TV7&V~I})vno~
z51R`4xm&7_NkdNo4Hy7>D?H`un|~x**Lk;4B(MLjq;{bQP-)id<`!{kbc*6ME7nx0
z(!oKHEj%4*N_L!hKQb6Cp&C)ySCYg&^!_8n0=K(=K(Y-`!4NYD;no)loFnCetkn2e
zbHry5rbAiezH=wgGcWu}<4q7jNo0J^kG50As*BnU<@FW%SZn+mLmQ1`HO72&cylGd
zt#c(SiP7tUYWfZvkvQ5n4?zq)A;b~C@ln67)d=00j02+i>wBs=+f%4AS5a{tiBon{
z(=icf_R|<*f%VO&ENlF#-oz{11Z~Ni`sh`_*wMkg29&6;BOPFtCRvaUMU;?J!Z;IE
z<3i@1HvENk<BXkWx|%$Xj>0854lK1alls;>oyMp!UmXn}j^FmtZw2ZtjUqMYR~mY|
zDdl%dqrv>xRM2ohM0@wyH3Zxw`ActZL%cEm(AtOEg={EOn_`ZtPDf(_Zhp$U(>rC7
zmHZpaU7~M)A3m~G34&ifpAl1v_))$+Eq(vVcqHF;a`=v{OE|Hsn+Qni6$0av(Uba3
zqWYExv~k3jFOWDt$xyZrziC8Gkq*32#8l%g>r+nDP=H1EP3;LhVC)fZG5YGlf$R~8
z`WJPQ8Uyg|zed{0@&NXnXoV(hErU<OPAcJ>rsjCw4!5CEW>O@Zn7))eI{kn!i=bs0
zW;1x`av(n!O5@dJq?%($B=0}ZY8ZyISBggQ-h{#r7f39<P$3$UZS-JS#38ZHeomVO
z6C^1P?r@;&#j}9JGDg{d_h4MDsG7hAsw&LGwX*09xituwA>uJDeqzn(R%EyLHS~%G
z*BEZ&GX2%2fR!8)0kOUg#-L$VMf>6a(UHQ&pr)d<+FA(LMcekoH#MlhX#OX0jqY9d
z7n=)e1Tk{kQ`hgpzA4i5CZDeBf`0ud=Sl{QIEMOQzzEofWc~-jMrH)ek#w8t6?l3a
zg>Q4G?)-BPL#aX*ZkcXLe&D}yZPlk*Ji~6R#l)r@`Xv`#7g8?au4b&v|AX{>X%H|O
z+{VmW*OBRGD!D6Q0_D?3@HD7-Df0N1);ZBnF`;8S93<`hCgl(CT6sOoy$}vgAHC8i
zw6mB+niN14Kmd#g$8d=DHH8;86R}N^J`0FHXmZy<w>t@*oB<L%`O>-*`YDoMsG4Rt
z#C$R$(%P!0jjWigT#kA|)gji)P@8fU2lzeS$+Xw8K&F&^D<2Ra>V(r?tFTgeMSdqb
zF@3S=(S<Mhi`#EzhUmnLHH038nTSYfPaW(mi?uXl^!FRFLgSDO=;LcZ%AVU)XZzu6
z-aZF06qP-E#rko~39^n8!GY)(u6}JlmQ2H)vUr&D6u6O>W}6U9Hh3ohM7u?UK>2Dt
zcFqh^Aae=;xg6R%6dkA5Sx1;N09A4%tu8~d?AKvK^GG*IQZ$h5L_Qz3Zqtt0(PYHY
zf}CigkI~k9zp?-Bkl5=E@f;93kiuh*WawKGT0JNrJSJx4)I!v94#YjG8SIwWtvDl#
z>`bWra1oN6C)ByJZmj*rA?T4~5vq|bH(!^&c=Z7&#}5p@N&Wf9CxF=E7yh+UD3P<*
zEZ5`yxAJ)B?@9ZKSaF%#L`v|@dNZ^IZ<cwNO!(czN4*C!2iC#ym&9q33U33V!G<1!
z3X^bJl3JwR={2uUSHT5rCggrduQZ%r><Ag$cj4nW<Ag?7?3-lClFXCO9lZ8(XJRYC
zS~553NhMo%PY;E3r4Q*lH(l|W$<3cuKR=ztx#S<)UUIXZIh)-y(4S$mX1oCO25nln
zl5t?Nh;J!=GTQ{9DJeo1gRmj47srdS*b)oX*f^H@prO><R+V9Y4?mvBuZo=UT}d@r
z9ip0+#2Lca11q907q@37OJr3jZ@i7#1Y?E&067r@7;vLpg$4y8s^K5NJSI{E=7e(Y
zi!-GArNS0($x^O5fexHgLUY%0D!#cOGhc;xkwLej0*v0EY(*yJsxWEUHCg|@>Bz=D
zKqh>sCjVQlpT2P=#*|VW)4Sj;@#N<k%F-GHPuS#4meb67lpFnb<YpXqk@L0Tv;K-`
zgxC}MY(a86iVwYw%!@D%x!3Vj+YgKn9*BcL)t|jl-vFan0YFpvoj9bKpvNIw5D@ix
z;AI&?h$-+8cl~#Be?$NMSKZY0+PV<b8IlA2bv&-jdyhHI#5qj@Zv=1b@olMM`ZaVG
zZ`d`OZ(>nRnYgY2;U-Xby#WQ@)4h1aj5*XH%f^(*F_Zzw!QScW8nA%-jr{80{`_6)
z7m&NcAQsh@uFY`y+l&MrX*3Qo!A|@|JP>1+<!8qlt?)Yd3lHD65SiZa>pwmpFM}DA
z^`epQJN?F)k?1LaMkRngR7m}xP$k8p#POX6msvtDuD?fD_IL@P9k{C69rBfQ|NUJ$
z7-Rr!9x+Av$)b0b0KTpun}L_@gzqP=U0Mv{cm9Gwgu45$v~32jCMEO+_&jffP1UqW
z?tNX;3x?rUhuoX#Ggl?mFUC!u{%oH>?12hq`H~uXf;BA&IjrgXN~2R=6GFRf84&7l
z-xm$S1CE5pI=qZ$WOv1K;z%tlK&yUTKR#~OzNl5Vtlk{*wYv&7MVoNeZn=2tFD|Jn
zW_;Uk8Jeu+$eZoczkMr3kT*N)B_nlrd!=g&1%|y7ucVONDtpb`pXjWxcTazFZ^EW=
zWQ^5Zl0%Qa3L1(q$XzGXG-ivOo6cM&GfyLZWsz9Gq?(2}wumiqeCG0)J%YTY$kaaK
z&MsOjkA|pAeS;^2l<UfKr_PZn-MCH%H{6%^?zC?p+izCR)w*-6R$xFZTwmw*MqgWI
zkC+wT$Kk8BS}>2C#h2*Q>iT9~HE&2NxkTG@-%~68vBrlS9sX&*AL}#SDHs;t*QY8V
zJ3VVh7AL@0RcA1MO2<iWsTnUeUNGy`&JR1rXA(A!_j~1ScK5bcWiiZV>xLcUd-mD)
zO8t&%%APJ>xTfoxqzq#U={-Jg25N;UFS@04@S<7o(N$e3kHQr7UmHHI_F3bfob)lQ
zAteBu$2CFsdGf24Voes_7?wb8ShJez?;GZX^1Yl&&&nI%G0ch__v2bp!tMkYInSw8
zd(&FW!mJ*K8Kb19@iREh9Ceq%4+NjmHag<3hX~}4S?7Z!(Ky+L<wQku8h=o?E1>VD
z0>ES?EUB{-{I!fTx7tOnHxkc^*+nm8{2nOexW8r=48+v@Fhy~?mXNFVIdwSZIT!!a
zt-%xs_HKBhVr?)ah3N3@bM*p%JeoHYbOZ%hOJ2Jndhlxu|3I?r<R`2j*pR-u?{d9N
zJ|D;wRM_O+R@~k2YqI#nx;+L?)az;h8k?~LKSP$SSs2x8kKf$dFwv-|r*a&jSioqy
z6|bZuscF7q9Jz6r@zKe$trNfoYis@bGyDKZn^T&I#!IKoCp5GC#kf<7S45xiv(!_6
z#0&ZeS*M;Z!NhPRmfaq*8O9LG>qEyzeUSV6AxX)a8Wd7m_B1QL_ib%makw)y6Nlhn
z>xhCc%-Kplxr%S1yQoCbrFhv))4LGZSnm|ZYtD4QCnuzYL>HI}892Glw=d*k24Fm>
zMaPq?n-=`A88u9V<R0uQt?}KVcsqCYL=7{EGKEZwnN*E3+9J6g$6Rw{dOkF=zY@V`
zwU5eVOPr4!nuiu25H@qG>}u1K!9VGO^8HRYxo$5=N~57;w{eS@6+f9SyOaGb2`Ehk
z9>NEt!XG=j6}r+3tl0FgKZP%o6VtmuXd+KsZ~>)>f>)`PNdv}2+a%19sgvHX=B~T>
zAWTV}$Jx&?J$H#CL7_P2=f@|)1vb00$9RHD=NCK-mB4-9M)4yKo92qQP7!dFt7w3e
z6DJ4D?)O9mB5s9dmT+@zvXgj`Z?sqyLxQo=HL9rBo5ie=@OK^uc%mVP#;c{#04)wr
zn|N$wUt!>UXZewTXN20}7)zaU_WA53s)_G~dMSU_8`n?B`>L@A2Zr4H!Yb=q=huC~
zr^id<-h&XG$3`K?T1%R*x~{zS3B^x_RUOI4_-k=)@}OpfF_kQC3>~{<;g<?dan1Vj
zJu~S=PWh5K^Xe5Uj3N7Smr~nnqzCi)2svieAv_Z<2y7eBaUdglcowh<o3*t81!q@0
zSKpP4<AkS8HL^NwZE^N)x@!h)G>b7!N6;x`t7{n#NS_<an%ij^-wyytxCQ=95Fvlb
z62T9wC<kSY)$nu?#`SP_Gh{WrHd(>!ecGDcTrs?-+A$+-IwD_hsSEP})V^g5r7OMU
zpVV9Ba?=WQ*)_EDOKXWnX%U~U$9PfxN~}&8I-T#3Jom-F^K>|yAv(~mo3P{v$1_yg
zI4(C(-3-?vgv0OrBt8=ijb3HM0kjChGM+DyTU^8DgcwYx53wc$`%>gBp73~2V`r`k
zDikKrI1h;i(<_JUXUcVDt||yZF{@z1;?vMC44|0^N-Um>95bHZPbJM}gtt9-tVEmY
z+_7&k;hPy4NHv6!u#Hd8mvllpcWlyec_1#)cPIHub&F;}$N2KuZsGr}rD-WY3wBzX
zu{u6YyqpU(RPPCFWX)ddzMy=(d>30_bpJVaig!BR+68$6F#9N=>faO1%0wMy1iB37
zE7#meN~Up^EAjedtGX}u%v%es_ZDhm?Cz(#mmY+C=gTyvVG#2+uRkW6P`$OXx?+5e
zGc0oPSJA?bfDMk{otDag%Z0}r{pq`VN1u~f&yZv;e$bF$t5?a<a1#@DJS54xAaes8
z^Q}F#mt3X&QmE=!?T(81-B#@1=YHM4UZSEddl=SuK84uOrT4uwo4tI)LYMp2D1C#8
zdDvb`nb)_TJ_~!qJN+55c**ndj5Qc^u}Ai0=Z-VGfdM#@q+-5bIDzR*pr5I7BbfV`
z)@r(FgS+mw6zfC2k>ROC{j{7Z>FuT>(&_&?lpw2_;$-?`<sf28c_vb!-oof=Fdpa{
zT@TXoG9{Tkhx<2gG+?MAf=e|Krgv&ZrTYp<f-et0QcyM;{4~N+k)Cc#EnB;Ipm#~S
zeVk+z+MgCTTpFQE#pjG8W@zA(w>+w9{<4)t0b#w=sE<SnVmeGTypkx<qG8UAo0u6H
zh*0v_T&+PY)ky4YyxiUFs~DDRq%KZkXJ~R$8$Bs3Nx<PE;hO(pPA$&YJ0M9q{2I;p
zp_3OMqhL0)E4KY<JC=w%tYVeR(#TZk)!qiU>mmsLh39p#-q4u=v)}s$MqA%=FZ61#
zm=<#QJSIOWN=Gc!I#XP;8L)`1Lej^?C7z%UdXH+?OIlY)gc6JO>m(Rp$B}M=WaZ1b
zgsL_*B*0FM%x}r1lBB!ccq_ksG(b5BxrVk&48p|0B?^itZBH5r8F@PhlMLjeDASn@
z!=e_4WQz_w(V(ll*&@vnlIR!B`$)!k!Yepxe*FdOI)=eDt2IItqyp){8W|;~G&E8C
zX=`o_E(W`W{X>%%4VQiRwt21T6<+={^TdGp3akQxq>`NZGpzG%Q<{H{1em;kJrdAP
zDNM4XH&Vnb&bfYO8Wxlj%5~b)nFyn467-U-$dToPk?|~EZH?coR=!S<(D-9q`XQe|
zB^14c=tb+&>DVx->F5n;ym)x`Ib&m+YvQ9<Rc~rIcEATisn^;_rndfkOq+s9Ybfm_
zpmTP!%4VKP<*&NUEZ*wPN?W<Vdl6m2m8t6hK3`qWX(aW!To5RLF3C>;){)|CDhjbp
z6h-3jap~P^Wpwj>)K5g)0^ii&{%oMkp6-W8lEMQ*zq5o25_e7tJyMX|2tJoaJ`L>O
zB2sC9j`0hm3v4IKBdJX1Z5q*M=9Xb2pQNDdQhfjhlHB>ZB%Fp~S!NTqqeGv&)sc#O
zQB1b6GCLF*m???{#L5A6yd6dxlYc?5IiU-1LFZ@nFO(ZCPrXc&gv|p@3&-IZ?W;@5
z4d%0rX^kvZ#4U%0J9b8(m#5?1R*RDp7W%ZM*>2|ZUH26Yu1-%&SBS;s_7FW#=?t9+
z&m+isUl&y0kYN!eorc3HjmwsC0i730A7{C;18c3mOy^XBDe)2vvbFPEeoRgj8xw55
zm6Qr;YXTJlyD{r}bB45pj6gs4sK{c%t)GoOgBB5c)UR!(UjSclVG-pFg2&=ETHCF@
z=^TF7R+#b5qKa4vxGnpBtVW?LtjD0sF?*4>RUt9l(f39#)pq<AXm=3Qm=ZXrg}MtZ
zr^3VS1ddrL1;zI)TnS#3Ex6T=@(&4v{)5Fj4b$&do*8n`xD>+;rhV?dD|63fHTxS!
z-tf3=P)>!ScJ@ZO%&as(vh-X%_-}}z#l~;<Vug0A3<;c5vA(}buQp0SJjM?i_0Tsv
ze1qWn0~qo&%_Xnm1L^LhtY~3L!pFXJHq%CCqrGFp0`^fW09Q~jjbcFjd7$-=tFwPB
z^bBbVFu|=W7WU^inpV?19l&5-6-1A^VWe${5ft!8NG}HO+clSn?v|qUQnFaA0(AhK
z_s=KxHmR38X3$P0G!ACiAYy?Y5bhw+^4!P|zZiUW?ph67yEc%>PCYJ2LicBiS(^a3
zBFJ%#@M6GBDRz}lWUSLQq1}mCH7MvB2rOC6#Q&)re)9R~riAF(-}en`Dl2hWwm;P|
zqr7Zg>W_50k)#Kz7p@(~grhrGLPC&F?I!nD!KallbK#yP4lUF|R%A*MYKVmxROcY2
zUN9h$EULH%$?x%;PUHH*=c2ilU5vfQ@645ID=dwPmttvdj=^W2p$utaUt%P?Osak9
zg?WjKpOb!@@6AmZ3IAyxYpBWP>cvE2xN{fs?-W_M#Gyjj+<HR|hy{<qS~t?`yx(t(
zlP!HwOQz>FUHEn42UfQ)92OCuWD;JJSAn`Gu1(NdEp+BiTFlxRwy`$}s_-S!Ek{`r
z;X|&-=~N&*NAe<SVgF?rkha=Q7;;|;FM!VW1|%QOBqO@ERxn=u<kq=NUx4ixaU1??
z`BGTFR_cd4uI{<pZFI+9WJch&@x9@6GIU63Jg-bv-SX3DFm};g0ZDE*379URa3+Qz
zwPKPL@?$19`AG@`fC3zTrd3`TV}R|`)7+%bz2OxX;7~jEJYrO6Fby!Y>(r*eV*S(E
zi#E#p$7+-5wiI*mZYM@bkSycR&;cu`oUM~{qm1N!P%z7s$7!kh7C7!%uEcKM>U=_e
zVWR;Mir41GaKL$}jyVd9c55Esn3v9JR%XAI&z05ay+CJ<QM=G%$k5%g&<B(bLp4O|
z6s$-c#|c}J41q)E{?-SH>5h*O8y>Xt;~M?IZhshG8~z7Htx~*ew1&0Zv(vxh*VOzD
zvM$J@vM_w;!k`9dLX03xVGsqek2sy0AZg>v{!5%;nf28We6K)*@@rOgogW~4SM+aw
zWi1io!%bzXcs8{D8madO1s_Tx-6qDPUM}&0fdg}D+tDPA^BnUVN`{1^r4{=&%fs#U
zCPIZnY%`LqC*0{%io~)7W-2esd4o*HAa!Vj>{lWd|5sIi7kt}2QjzsA`@-hM$&(Fl
zm5Gf37L^8Folgdm28&Ma^BM^Pm-U(EKzgNY<zQ$|mEfIFp>@v>=Tv%zvD^NV=4`P`
z@jd<mZ>Gc8kz5&T?%{h1R9|B^6P56xk_@Js(OX54V_dYt1c<1KOIs1qfn2M%fVX<*
z%Hv^xAm@3(5@!>7-F)9KU^=2p3MiH2^@cLML8wKxD`q7^6hSXr5BBTna(`WCErV(E
zzL0<I37$eUKAdRXphv8lPzTOUvluuz8&>>Q@>yyEZg}tV?m<yh>(sycTp<BrB8e%b
zm&hrqk@~QYQa=|MR_r0KQFF<>%e*ltF_1kF!sd1Bh$m$9hn`{ISvL+C{t25F@Mz-u
zO(f@3>HIYriI#~)(3+t<+m?${rnol}TnSM=H|(9;xkOERJ}inrKDl2LjhIn#s?cBI
z;(b}eZzw-vZ)mzm;>fg-hM21-$mFTX+!fO*KDBkWR5MBVj{(D0j)w}%pUBF9{t~gd
zr?pN{Xpows>YTDg(V65^!>l?ppS){t4Q<AlU35=epOa-(v(-tr*iUf=c2oOgy*xo?
z8h#cnD@)G#?wIFqE+SL|6ge_GWrHK(z2r*~5BFi8vW#p4?@ry%9J}^TbRaL=-4j0B
zoUhGwhS9c}?MoDR*rOUBF}?5>^*WuDh@$$EJb9Uis{{M3P$jgTy!=XKpa7G6ZhKZ_
zbAr76u7>gcH=*5X)5m`%+fd_uCsqW?o`yp|KrM4f1Du+;T_sx?s;`jaRCA<T4&Nc%
ze+T8D;8_T@p{S!p|1~4`uYYYbf!OvCHV698$AB?7VJIM1!Rev%g8%pY+ka<A{@0)!
z*cUL`|1a|2YWjazm%^nFa^*1pHJ37U9*1jx4(__HrRLwFL6WT<poZTA87fX)sc%*0
z=v3e^5N<;N!^<=Y^#VrE{xQW)u;Bfz60fcg?eSqJ1>ApT0luDSHp}*e5nG3BbFXON
zxZM3bZ$F=sI1XtfM2r&t0My|W>6dd{|I2)RZQ#e&4^s;aB!Cg)*PgxU=+t0J4Z+ra
zjriXFekK0k@NhI=B4ZrFhD5^xj|I)^0!UuD;C9N2(AVkCRW3gq9dAu0o~BY8o0BM)
zl@(HoA1F1C8cWA;2mgGW8RJ(GaLzc$i{kami3oc7GcPxhFU07>8XDwV#@~%b!ul0i
zvl%AunRUGB9f$rIh3!)YRRhPHha47xhu|qdmEtis-^&MgcAaGzPyOuv?Les2Fi-k&
zeKC{syP}}iDdTm@RhSy+2#s`U>?8rdhAqQao5Pfz{Q@IWfb_dG?B%+@MLB<FtL^w(
zz8|NCvagGeDgaK*MHjG-+5w{$Mhwu6W+S7cu`jnn7!iLo-arm=u|7nrH$&ce0~5Yc
z!l;0*2m>@WabJgGN#B8(Tc?jQSnI3BGH&;uyb=PY@LEP6P>5^Sudg+H@jO4RUU%08
zatZIz6JMxf)w!K_*-hV;L%&5AM4P}fKY3=}JROgnw#$CmYnp`$3;{oi198rVz2`Z`
zR|1~!iw|MNouB-I;6l;!uLw?bxAvCU<Ywe0Y#|I9-$37B4xA#hX-4CZyzDSB!u*Xl
zSIqk}8#lzlt1VeR3JPgFOEeVeb)#Vb;3a;YQ*D>hPn2v!0qbeZS2`ueAu~l|4dO^`
z)WF_NrIPej7ho6<WLMasK#vnmmp+Ker+u)9XJo{~ogcQmEthUE+<VMF2HSBb`N5R;
zm1R{a**n-9RB|k!VPRwT4eiAqB?5zduN(EBleg5s_%f;O!u&j=o#u!7Nq2#TSx3=#
z`7?KCZ%x&cbbSuq6t%cj<@G<#C#%xbB=r=n;3)G{yE&Nlr6pt9gbc0r5(w3h8|jg)
z*0o-_E~-3E&ya*PA|d{>%VV!0*Gin17~M7+Dxf%`@N)q<V;aBL$TzFO>GXu%?*7$}
zZ-_ilkLQ20{~mE!e(0kUGL)8+<k%3Lhp64jx7-(Ts%o0S2_~$KlT7<4(|Di|M>ig8
z5xN@}9|R#H6t8_4{(KhlW!by=r48B-ds7{$-fPAivAt6lB9rxc(Bst_(tzq}1Ke{+
z85II1wXU<glO4%ueZQentgG%-L>Ckgz|JuG;bXvlfW0JYk6NkL0o6A<l&cQK^u$E^
zi)|;(6uYGi)iQ{7hR3stjF!!#Q7TpL(T&S7uUUGB7v-W)Wnm?7WZYw{vR1?q2Xj-?
zjae7M;DJJjrd}US)3-OpJ+0oE%I#5z6wadBW$}DiC18KtLDevAtKHz}C>b=*SuXQr
zs=zxioJGp409z!dc>J}3@{-hewA1i4h``eUp!C-WL+>XLEJE8kqMe71Reu;s@JN%^
zj*nn9e4v@PhE?f!sTk)#$H0M+(=`35e*5(F;c4l{eenM81U~rlEbDZa(UwldFM_lC
ziPw>}iM<_po>Z4nKi-v;543Y`jzwcsnzg41Yo@Snb1&=A&vFqoLXVWC2g+c#LLfx;
zo3pg$ka31|bw*r65$)Rs@AKlv3k6+8ONTA)OxO1uB&@2PmjA>U8PJH&ohL7*a|#fo
z7(wOY<luPhhVn^qY4<=u?6-f?<WbAkruE?Ug1==;G7&>OHTHgeeh8*1KJZt#YHFA;
zPPd;C)<*ps?O7}n?<$d6?rZAb3cRW*f>?<0)S?uRTipBJrd2sCr8plyg<i0#stO$X
za(}~&%~F{EUJVhNX1+?jmw?+UHr8}xFlSJW;Vdk)WWRm4ceI(|)NdMvz4`6=Kd1m4
z03>-~YPR>C<w0bfN!2_ZSKd!=8H=a_Py#Rvg4R)-T$9z+;|gzU*1Rh#;sz)AHS3Nl
z8LGX)Nq!eb>1q5hp3gv~=i=|K`YrzHlSck+__pc+?h4Ei$o{s^Zt!Y-w*mt#@WPmC
zQ%JRx7znh;O*weJs{|3?(DPi#(KPFy0jwf7k?fe$qnZ+=FVA&mYiVQJ{Fu$Be>MM3
zFP8?mCc4|sF}Rd{#fOak6KLVd0XrZaDU9sB9PiD%M%8-Ro1-ZiU%mfKm6ouqV-aEk
z-jhJ}f<HKAt3V<N9s7)<WC=#KwjS6-tkk}1`&Lw?BxQA0vQU}TYc&{oyKRDSO%P)3
zJ62Y{py0MM;*|g|o^IG1Rvl0k+xWv8<9S?i-bqv!<?qh)k%!SvX|ug{gWhhy5_E4^
zafIgkgLC;z*Hh2y)X09J$5Yekqu9I=KI09}e=HUgSgEia4V)<G7|j<^d}_=%p{TP*
zPLnZue{vqUsqP9A_Q~UeoN{WmnMJGfL0j_%_XhWY=UV1<IM!(o)ZXfvVFF+9k`ova
z>kPX?r>+zc1$YG)v;_~j9e8hTuUpN#q{pcuhyb>Du=PUrrZdk(?;gVj@3c+%R~IkF
ztJj0mf$beLP1>M<?gXzAzd__+XgcvewR?}GU2Q^ITx+>vg|LmsPJPw)Qxd1#U}H+i
zyE^|kS}L9N4;(r+h#aMh6`Od!4!u6F6S&{{T{KCCco;7AZ94&?*!p`CMdI-iil$<m
z(Y4OU<p+|%sUPA+9<#GXwd)>t`<%Rvw*d^AhtHbvs#}GA9L|n4SN$(AsZicFj+m)>
zk#SiR(53+AI-=zYoH!Cj^W_PIpPKHXjkOR-(5mmBb-4+JnU%!Cqws{DJ4{v*IVK5)
zIwOb_nk}Mh9Id{N7jIoHvQ(gm=K~qYNI1+0x9Yp~ki0~4Sx4)uh?4(w@>tUO(Bt3g
zGUI8)I3(JN%KGNH`~LUFgV23)&8*G*B)(cB@CNVcX|AI1?ojuK^2T`{`q%XD#{r-P
z`8SSSdjg=c*1Hx$wY8eGzucMk(##b>p1~~TRy#U*<o3Ns7sm5(XU)f6_10B~%P#L7
zn-xDXyIu|!iSAr~-_gB|*HWyAJTi}%Z=z64ouCd+<VzT?GgY|9J$~nLy&bs7wu%lN
z%3M1|oW&3($4y}%W&wYPb|u1mi8LC@g^K;|xfj)-)%DJ*!D1^W3O>N>hw;MCi(=<@
zV&0qId3t`(TytnN`28ZTw-9sa1}2b;7s~@gi-pd|zV-6f&Kw0Aswes6IICNJF~G9M
zRiX)x-5SKc+?_95(X?r>@TBUvDU{fzRPg@o>>&14osX|SMLq5HD9+<YAg7aDbm>53
zSKA*{)ft;^2VFK7@7(D*<`bvhS*%Te@!&?)hZ6dVM3Hgib+br2HQ<j&`*C(%BGkGX
zX1sKvF41GX0*=9FT{piEMwEWg=xR6^#?0X+L-hRjPXk{tt5u}4URh_CookcHEu5zJ
z(7}DFHgiY77Cr*-icV1HQ)I>Ow_04J?qtC)M>}PjAfgV!b?h6`QvEO85kBsDBK2Cc
zhOC`8Uuo3i)|SUxeZH8r)1C|c*~U7mt9H_qChO33J(FvX@Yh)$dC<-D8Mf~i-mT|L
zm@Ev!F||0yp%c7X#S{y7d&aR*$F}KO$>8(&*)uYVi81>NTmFRCcJ^mc#4a@v5z+aA
zf{%(7?q!Ojlk<R5>zQ%8^MS`TNrLgdpZBV-f!l*Nzm`+)@w*ji?MC?qPNc^cwrJl>
zfbn{8yJjmsf%n*QE>!h@Yv4RP9L{E7Zc~4r9EjE6WA{aQ+_DT&q)x|*97NCUP1=%P
z+>LttDZkD=skz>Gc=@(pRi=JOwXwY+SuZ`1xqRYJtJx9xBHn-qM{8+uHoPBf^#0~g
zKX=lf70q1e0~|%49VFKhz6CExf^b@9ZbPU99yBAnV;GOzrb|{B8n20en(5cNeAY~5
z)^7Rssc{U2>6)K9f-GgcXBT+e`Ljm?pK8I=upNdCVw&qgFd+og*a~Us8Qle!$L?rd
ziNo1VNEG;lO2eU4zHRf$tElMz@PlA@=b<GEfY=&4x7uRM(pM+it{o?<n)}e~UoX>u
zz?-o^VsFUU=}EcD3AngUD%v#-xnF2vsd!W32thD&x>-jqiEXd%sdyg?&7j>!?|UD)
zir?_-6ulR{j@y?~2~gfIopK5Pr@gO=YxCLK#af_Pa3?@16u08;RIuXih2rk+E$%H8
zC{`%$PH?9<1cF<U;7)>^wEw-oy}!G2bM6km{J40NnOU=Dy)(0(dDa>znk;Yk)JKUW
z>(OrZsmZgamo!T}wBn1YPn^Bn3q{=?hs7gdeTm6)sKb|J2G+1LHtDBq@@;<GHX5Y@
z8@4@Q1E!|2H6^J6begRAoKr%KRcXU$^BK3(DdO9q4$y|qo>k6qI<kU_4xYq1`4V89
z=ojJu@JJhF?=kv7+vPw2lZ=+j9A+U^%&*hD-YQyU@jc(MF8BvQ1<Ks2m@i)ciwsj!
z*&9LlzORApM>Wc{m&oOPyaobm;-)H1k)n!%*}S$HmCudm2S}i&a9aVrfNi<`+=Elj
zWzr4e^1GvPJiW@*0p>;$k!x#9g62pibr?kO$!UGhbgm47O4F0u<Y)AwY~0v?i>5Rc
zcz+X4Q~pZ>N(5PKT~75`YVtOFFRK1Z1phZ*2f~fGf{s?N2Gcn`ig{w(eW9!nU<R(1
zhmiG6-W5!9pLmtuS8+u|3mC|><nsTX=q7g{^Af8xUNEPldzk1kYxY=XO4EsqR;(ph
z`f5?WR4M!0kTosLsC<BDC&%t~^vpi)Mdes1I;m*Wo;K_RePC{X{fIjgDxPe<FQr(d
zczfYb-AIY|79xiZI=zZy%o4aG#Ir%yEVHK$09;Sf<W=qp;8z(57(oM?LL7ztHZRH6
zT7r&0``dYC^5%Z9t3>CTalO0wscMl>)Ahn8KSw{&e1aboeUjDqh)cV=Bs49vYQFW>
zGsN-y#U-O8Q`^s%2ihiCX4vy$@0qsx{6gN=Sx6(4q40dvH{g%^FraImgDQs<Fp9V2
zS=4qhVyrJd;t0AA_S+uu9AFPw3PGtx;cm5BHLcgF@f{HN)t~RDnCm@ZZ#eX(P0?Pe
zR(;A0CSvQAAroQ~j;TY&{IV9$Z^Wz5{k`!B8`G+Zv$L-++s39}N6UHU)198I6dA2!
zo!2h_vF~g*b8C>aP*?*i+smq6l3uFr)jjsUL;IfDr?->F>U$CoSA@>md~Ge=&t6Ab
z)BC@Oxy`y3-(Ob_=J_Jiv}qFjg$9qrW?ulQ#b}GNWDYrIlGH8-Q1%720H5W~M0byt
zH9}y_v!szF@i*0V;x`YJ+54&1a7c&a{DizjO0MXc8OLD-eN&vPU*M@~&}k17nfD>)
zP81cdcQ@IHHqB;$lpJm~IR0)BJ-8f*bm(3H6;$Xuaj<Tpu>sZQPy5hzl3kS^U$05M
z2togTThIH%JCU{m{^o{e&9mb{+TU2$cRBvb7tG4bnGmFc@N3`}dGo(a2qlTanrn+j
z5L>!5uYTZVWx)1Iv6|R>CA5GsWy}ps(>2<tdXt)+P>gB1X`_HkYF4~nOg1%PDMSu$
zQMTwS4K+2!*f&uN;aGk}5+L%HF^<7%x@Zr0XI_){`3IKLP3DDiKOxime*U#m?PZ!8
zy*z3RlwKG4j3VjfuJ@k;?3NowYRoo-`*FG)NrM1RiaVM!ajv8s5q?e6bOp31iGTs2
zIU<gX>IA~q(MnNP7*2L1s5HWKKpI)Dan00im`l8hy?uY*WWKzPb4~Rv4#EE6TfyhR
z6;YE@ir<eXiJQ4jQ8|YY@CZfR9iRorhVI*ugt+Npb{S3aiAjECz9V4FV=~4s3Oj^o
zxCoSUA05pOriXjt9j>ioyW2n;H78IXr#Z-5Sn<TF-UIc_$U}6CPTF8T1u9TaiSFaT
z0-iQy6e$FpPc`#(yU9-hiLRYu25QAd6t>vUY3!>FNE(ZNt#3jXbE0thnSemJP2Vf<
zFmHjQ`=o?=2atK|y<8d7!`>3L6lBqXyN=2oUh}f(33i2yQx^qB7kVilECQ$lp<Mwi
zZ&oWD)k9>xshMRXi)HlF$U4Zs$~bPvwp4BAk_1^}Vdgy7w7_HwYAb_f*sR}$2Ex1t
zZ3N%zo@RAZ=uy|-<GB_+q32i}zy&{P5~E!=&Cjdp7VdxK;%_CxTCSp&?1lcaqE%$?
za<66cl5yHJl{0Q6#-TO~r4%b_!syo{T7;Kl?V{6svrPw5t%9txXK`T{;}ZUu>1Bus
zBFgqHFYIQm-7^AG8y`4u<ezrZ@T}zVBWs=b4#JC-{O|`Z8TH;N7JGw4x5rVOdZNRI
zQA)D$#+R-nqQCG7FIzc4s*2X;^yhh4V0TL-`}aeH-<O86{S;rxx_bk&Ivh<x%lBD4
zyM4!w8@BCYSI<Bhj28P8ri&%3(|ahblx|$-RPjwMk=-rC3s*%CjlMU#;Vxpw8A`=L
zbftZWVS96)zLH{^+Ab5b07rT>&FUV&jTRvZu{qMNG?>&ax0TSq8eI!m-$IX#aq^x#
z7Y!JVQq^tZvtB{Rl_wpGdLP=C=#jHwO?ha7Exd=$^v%|pM%+=Su7nU~o#P9?;g0(~
zr<FBZ<CCKrv;JX=@A0=PllLFcN*EFI492fTo<^(k!o>vDo6lIaRZ<^fMVEaRy^?^N
zNnuzQRGZ6t=cNP4fBo8IBMsLPDJb`?qL6XZ(8STy>a|f2AY#J^H4(4imkRa@wM8=E
ztqT`u`{e|VJAq%h+Fi#A)4>uinEc$c2+&A9h+<MyNtZ<HR_&T3qcz56E`R^l&r=33
z%oqDbx3+U!6c*=ryakOF3$3>m7V_1J360S)o>qnBAfPwwPwGoDA-i*!b3XRfVDJfM
zDfhgP*sJv}Y{w^cHx230ZU-7F1#Ro#ESk=XMZ7I)A+sh3b*+#^?m!QHznh{xk9ng;
zBsg<W=!>88P1v^Cdv1#=$VKrY@zcroF0+}b(%U()O$&<iE`~s811^;?V^)^oR0aRl
zWaj3sEqIoat!+}e*%nN}4pL`~$GXSe5C*BV93b4c8H(s5cQp3$!w@A;w=W{gIi)sq
z^ef%6u$4`Ay!X|gWh&wMi<9?V{xgx`(WrI~de(&g%$%I5=cF9E_w|l7wXWC5M4IR7
z+c#o04rhT3h#mYhtMtwqJzlgEJ>opf=*kmxQZ2E&+%QW_tXLEK&pUWU0mN9qd5b9#
z8-4TkWaF|+lg7?xmN--rS7|TCezH9P$~Ewbs}!p)KQKHi@~buwasM4crCzO<wIMU`
zeua5l^+k62&sZh5mfVLm`hH%cNfAt%sMXuy)p>`7Y0Dl7x-ipE5mAyhn)+`Q2!1sk
zGu$~XGk5u;9-#vy6k)X*{yFr8ig`$B+C|m!B?Gb0`JV)n@(0YEjlPrV_j)(nH~R(T
zyRLp4xpGWgsDU@pJsEb!DUJ36M>O5!dD`Qd9Kb!o=-C7DM&Z>D#N@KB^$O-rdf$sj
z7|!=zJ<Mf_yvbWh3hqnuebyf?u8*%cbB*rbYc^Su`<o;zPi$rZGRMN2&WVe;Bcl*E
zG^D;`adq*Dt~y4^`ReDg>|wxR`97UhyH5fuITtO11}7p3w-YCerYQ%VC|!nt!wRGB
ze#gEkG2NuEh8RYt9JW0zwOYq`=Kg+ppcOtLRiav*leqJKjEU!)yVh3qa^Rq*_-ed_
zk8v?a%Hv+g{glgc_lBPA31YW%=s17zljs^+M5671b&bQ4r0SsDNYr&r-ZA|JRkU-h
z91j+4K|0o?m4Jw;vVt-`Dxfg`VMj~P+pz?oKuTgT&;f`DOQ04TM+lICL~yVCt9iMu
zzJ&R&;}pC_L*r1(;}6na^&kSYM+eJTUwOopK!jy1i3Ht~dwn5^XT2`|%P^G!kCif=
zn*ML-W;_O?8})+yQMc6!LRHZ@IuxBiC5)2j^hzBqrP1^_G{D<iYwx;nAp<*^n?*u2
zT6&Rr^&5ShgOJG6Zk!i}dr%c|1`nW>&XekP7h^R(#**-F5N*md^LwYy&TC!oqm7%U
z*XS@YU2z4TidyVG`68T?Ds?~ZU_Td)aVh2}U63b+NN~%vUgrOWTLJO}^)Rf_QZs4o
z5fCgF`9?O7YZaC|CLVAhWgMv}8;BoikUpEJPjDb#X=aVOXxiAY9VyGgW4cLs<6wR*
zkp+Q>Y$envn-?jvXVWRuy&Q<A{#mQ*-){i5<^7?g3Vm5IbY%$N&iLG!TMoeiSL$!*
zHFA5-Y1n38UK{DhtDTB@vL7DB>n%kGqCcD(2hiDy-{{b&f;NOrWBa=y3u*K9>2h$9
z)MG&s8EEADw&tLh1J{7na>u*hJz9KKKAXZ{+Rk9DXRMcuDE7OsY|vMg#vzqINu!?a
zXe53R!Nix;VZQ6?$734>Sl;wRN?6A;v!e6sbpmTaNdcFu+937)_?J_AwL8?c-8R_B
zqDr@R`)+4H#x3$;=E&TIkF>jR07S&|5a&Lz+nvqF_Wi9&mN2z*5223EJ`|ejc!X)F
zxl8D7WBbbJSUHnN+zk)C`0*Z3qMFUzU$@DZ=-md|X1@$hS#nfWVrdxZZ}!BnC*kQF
z9p;#fY*XDRqU>e|mwmN3@UO1RuF;A(iU3c5jn&w<LsKC+k@Fdnem&ZK42mlc=a|!#
zdY4Z%3D>xjq}kx|bH_mFX|blv*2ml;-VJ*!l9rDG&teE2oe~c4(rSMwCDra<ORWwp
zBC-1=lHMI)9$^VDSG$2WGimLAMn*-bK2rh21BRO7xtyXra{Ju8R%FMyI{@20Y@DTX
zO=J>f-#mij(3M5Q)>f%5P#q->`~a8F4VeX)^jTPk*-zV~#J1SgBEobCdruwe@2_wz
zm*=#$)QiTiqg)7$(x+wO#8l+Gua8qeX~|jaIX?r>O*D^6%Z@@hN+s&U$u|N<i`QN5
zddY1A8h!?FYNc5-5-5@)M$9eIs+Aib@jBdb_W(7T(AVMx{b-L{T&pb)ccDPbS7y42
zDM3+bMuF%4T^lS}Rb1mBX=7t>PBLSxdM3SHtp|J>Olxtu@9+QXLCCpV5PES-8eqwO
z*Ymn(ol&T`?6v2Tct`q8pMO)RfjSnNdkD0TGxf4BTl~(tc5Q%p_F|o6LxXX_7Lf9V
zRTdN_5G+ElFS%qDMbSp0F!PM96HDdliA7TpX<}E4UE^n&f{(}%;r5F208}vT4GH_f
zeW$tQ{$}8f5&e2FrlTb6v+$&XJ9BG0J)2FGAs~+s0KQqCLZBIk-X^gT@L1&=>S+!V
zz)VIos8XW3N)=1XcV5^(u)4-<ADv{TXy+Z4O%~^Z3LCEf+k5>yIDt67?0%<y$|Z|O
zbvv-d@B;mzeeYd-fOoy<^_?vO-VM57W!YaJp3v@KneEwZc3lQNq;)u@Ww=`6;Hu2q
zbFs3HKvMQtO&=a~EWG#5_-q@8PXZ%N*ZUd8CA9G-j02oBdyi$jd=tk^w;zgf+AL-T
zBz3=M!6<&RRVD|0M7Z)lKGleCqKRuf_yHII(q5Mxa`q9y5*0V7IHYmb;w1o8SiM_a
zTpvL{Yn$j@1mP)loIo$tgXBIMDeght&ShA$^r#^dR-5{2{aW~}5tGi-dP^ca6@ajh
zIwJMxojSaXzeF2x3Nf=T5*l_KPcj3LKlv5mCGEEdKh~vzq2iI|T4RR12Zw^%Pp|Zd
zuN$qf_pP^=+t53<tY5E2dda(Cv<*$-v1tyY11qTXZn`y5E2pY&pPZSLBX8(Naz5)h
z{%$1j9qPG{dK1|?Jbxa{Op`Fd#6~nLa!WV$v|{g7z@0UDNw2ZjXE(#^^2bj7hl3lA
zE+v`c_fgg1@xVy~)u*|H+yQJRtkeb>Cz&ZH4~)JKmrNjr@y89Eg}Xi6?ek2lPfI%c
z2H(r}IDa1BMMC-Y#Sb^-MJijp<eM3~Eb|O+4I_(Cjj6QN3?YSzx2%y&c0+NItJJO6
zrS%lR;pBJ;K=z9xS6Rtm;1xmU$eX3qDnz;-T11pC5`C#tI#QW3hDx}^h1X>~xD3NS
zyw4g!zwLmh<4cbQz{%%iWkV}&;L5{^Qq_T7_5|Z%MnVBiO;(NK^rO^9MKWt^$Zwf^
zWzjMCbcE?3b+j|LwAr*}bi;KWZ2^|?Z5a7axzU5e1*nDFOZ|is>r#ma2BG0$4>oG6
z+JRjC)|5$-cRHraGcxjZM7EjhheGw#*$R!e<CH^BUQW6D>KX#YPn{xi6fyljX;>BT
z3>if7UEU1wij_!OU`K776aZIDChz;+I_hGfy4JXM*9v9#+#{QZVo}oYq_cq*xq%!u
z>7P!p=TWHd4QsE=yBxA%_E-Vh>mao2MDyYH4I7#9$O+u-oWqp7@>|IOQBk^Z1;T0$
z#9k~x8eyHxVZVM5Fd@;+<oRK*!#@X})NkWIDw=P8Itc@J!uafN#IlPbka~%XcDmok
zc!y$OsSM`Jx!j0-%a1|a$ER*|AGx?^RA&p2*Q1h(p<SW_nO~tfhd&8Mgcva*N2&Rt
zpC%wenQyKSlUu0iCW{y4{XRCmjv-&Z7YQ+xr^~K>$vO8*zMQg?zKds3TFg)@dVbu4
zT6LY4N#}Iy&=Hh?x4yBK<7ZS(9b`#9XWN^raFHkz8LzMP`|8SrH6)^fG{=zT;+xn*
z6Dab;Ly^q&<~V1pM}^=7CD@MBEySNTSO-HAL*n|bG%Xzev59^isAGRPmE-kGd6EiC
z?fSfP3f-36fUb&yCQYA{$uBbGMuAex{g=4t{?oDb_n~2af&+MQi3gwufIAGt5o#+u
zg&cQz%s6ZoN@g&PIdJ9bPn}~uZbSlhs({YzSOYI1$w!T1mD7AFQt%d0#Bps`k%Mu1
zUy<@U_~0_AT&$9V@R*6Mi#N6Mv@cqF3ohZa0-5;XHW5T~+B>Nd<_LF#S4_06BNJ|r
zn{LX-mf-Ti$1>u}Rwcq>MQj&G(aHH<GP?eo3s9v9=*aZ38ds%ruhN{2RdtC{kGma^
z{*XnxuXe;+Z%w+2tdmxaL~i>e;BILoDW8Ocf4M0a_Q-3C`qk1Vw^%vHZiPc<!dJe0
zcHVQX7-Z*9^WK(z;y0^}VW>K!lH}utxb!l2;vZJQ#y4e{ornX97hlEmCnmuJ8_5w&
zaOs>AUr`k?<feH^3mtB!SzFUKmvH?jc|>U3N}0md3MsgpG5DwH>v18f#y9tq6<7dY
zK?OT2wv&?g-H70qD4r-Ia}_|UQ)yS|6EpY*pTx@3wQ`hw$vT2|R^XYW#;pAUo!pzz
z^<cEutKgk0*rdYEn?<0(T2gqV$)RU#<J&afU>o41>dA69S%S(j1qK+^K*9ecR9+)Z
zbX)lQDodGqHz6j+ODE<a75@J4U1q^Zu0F3df(Vrxfl$)@P&~f7?MaoJz~?@rT=hpt
z(b_ws86SYk-2;F+VYYATFLtA|CE)&1J0}49QtcIo@hRv{W6pz3rESy6mU*Iok|`#h
ziZDYqC?NCmo6QuG(H!<0=QooO*?=%(3wmVEKNN~?wkkYK{Gg~Gipm&<!&rNb$Sv!#
z%Pel*0sSgS-TX@|&$y%d0s&2FPFiLsYh6cuM(dysuP1nF(aS`Y#o;bdUU`JF(6sVa
zAGZ0P2<CZag2l8RdP<`l4$3{kZc0<)^-NsTvgKM@$f+wl&s$3^tSoG>&tK<T{4zG)
zr%W9a4AujS`Zat^JSSl)IvvS(w)mA2YEPrz(}C7Mgt=x-knB%;<JTxh{;Kusa+m)@
zf<Q|{B)CKosiz{t^h!pOB`WiQxFeY<03)x-HZUcs8MbP{(csf7S6^ytJgf{JW~m^^
zbM?-U81(VweZGK#reP29D#(5;>r)&GYkQpXV<-k`)l+C#18v6QEGmabZ?piNya*Z(
zzgCyD$-P*RQ=gfNtgMvuE9`dr7LvXecG+!7xG>!m7X9HGIv$Sl(C3QWhHwQ6F`F>W
zj<Lv>;=9R5SiL;3S|3ffkqVCSKUpmv3BUI(Ji3w<<L_pbpzyuK{eh0GLt|Rr+fNWL
z=2BT7tmOJdFrq44u;(ciqy?N$5J}ip65%wxK0#~qV7kmW0EPhEPUS76a@IbWdA@Mk
zjj-a(U4`5uWrY!wno($J>lo9q<X1<`lF*ZA=^SK4o|s&P{W1Z3MZ-)pY_eXvCZ{Ll
zz|pho4v9vmkd?-?!t|tRa_uBW9=Hjh#e;-Bq2wR{bOS&>3h&s8WiHZ2(m6&jiK7jT
z6(suO$eF!u-dZ16f4VYZ28&GJZy6g``(^aNgKkz%4#Qb7?mX%(DKCR(b+?B@K-OS9
zJDk{K8pKhh<Ojgudsc&19`j<VLQItD9RJzs3`2>6$FI-HjNBN4PC}xRLbOj$O66V(
zP?vfZKwe-+XV8w)`T!J2z4=*BLj_v^GHD%k1db<65%J7rnSfCz($0!NJtT<CV(#-Q
z>F12xvP7~pVGP}~rj0UDoOb)j_;~m~&0*aG9$yIHl|PkS+;l{ulo>84MXEjq+F$H+
zcJ<^=7-%u#-Qnc{@z?;BFjHBHZ`PCO)mRQb!QV(y*0B>SRWaobeN4wIp>`kl;^3vw
zDr2yk)7ta0#i#UJlX$ATkw<t(xvRy5;)A<^Ub<o&zOqEijKeZqn^M?0R{{Cr7hNl9
zsm8jSljA3_S2Dh0)(YTHHG6N4+aIgHiMtxr%vAW#2T*hHnOu#4S8M5Y-Q3)wmWyt8
zPp9Wlu^BoGFwJE|?(R3X8@KyoKaiJ|UCwX7jAu~g(<n5GhSSUWWu&@)yu9(RrxZ5w
zH;;uO$f>M;)EjaFtJc-SyIx~nBVnuScC*@|4BleB-w<rWhhMdzXXLjMEjAWgb*>W(
z2}US@ULQV*G9LiVpOgQx3B#<cIg$auV3o3&Gl?ua;)I5PM$oOd#;aQliJq*R<o2Sw
zR`GeLYA4j6?Pe+jmqa^7ZU!%EHGbx*4}exBZ!Se~=R-G4$4~_zZp8()eh8fvFRLh0
z&TcsChI~eAL~k3r)|=|arB{>CSj#YM+J~Ct-<nXZucIWYfFt2ocP+|rzO8Cw1@F80
z#X0W_v2U9?Oj~kUEsyJ#m+rnD?wL(R@uxo#2{v}F8l2*Mn@Eg+M|&`*7>mkU_?!L5
z_~pkqmPcFJxy~Oiv#7JqN(e@G0}EkdUxh#VRrW;#O1+SVhlVgeeYm&_h9A{+L!@3k
z?;_r&BWCq!{v3y#PNQ>h8iYaTpkg&X$I9(D7%(IZIzAHF`V6D)cL)*g$`RLQKcdkP
zo;c6bz5bfR@Evg?Xp2bo^*0Hd4thN&TbbESPYfDztXqDw=2KQEE!+G1R2}&n7KXM6
z1%_1op3*0oDQm(H37f=X{L)=Q6;Q)R6HM6<KKx2prICG|pCuim6Aoq`SwPkNC7xw<
zsOh<q?N5Y*Unyq@&ZF`V!DBn43pDM}Z+#;DZmlICymI6`8EuI%D01k{rIx&eYNugg
z?&;joSKpayk4lL9IJf0xTM2YR+f=hbLUK)*U+xy7eGJx}$s45}re9_TQ&7k_vR(W-
zfnB&aHEpp>j>)0~JeLGob^IcOzePc&t<ur;xdduNSrz={B=H5vH5*$8M}xQLrBple
z2B15pnj`G2Tt*%F*~BBQ#yijP&Y)ocbHys6&C7@4M9n#9K+5E`iSA4#ED`;3lLi9(
zS>{p|7tVy?cux^nRg*!-X>{20B<vH~l$2yER_>uzz0_whO}tVk&4MHCR?4{;YRmTf
z+ucp}wCKoof2v~<rzVBoWLq3?Ir`>_AL1gP%0H2}Fzo6;1^^cdp!`aEQ%`cS@*`II
zeTMkltvgLDQ=3td_HGN?@Gi{Bjx*Zauu+|Q-O^nQ$l9@^<0&Ld{f@!pb7$D8dD)X#
z{l;Upfa}efr;M%T=yEghhOm=OqQC`NxYw?owVehO-z5cDtCQI%IIEw>@A9*xX`7{1
zLRy2csHROG#pusfAcYGi#)z|aM0(0RM0&~{c-**ZD-QjkMrOMoXeTH}E#PYYfnvzv
zQv=8R1A620c3O4g2*8+H^i{Wd=-Hm<8_JoMppZM1`U#J)?Gj`uKZK7~P2DIa^=ff-
zg-RT9nDLi0b%KgOCnZJ+;EI3Q#F8j#7?Ps4$oAt`shJh3SGF*(-V@WBpAN0{)B+8N
zRFD9U_gEG^(>U85$0=0_9y3uT2`|M9$Eqg;3<M+7!`~7ic%6xo#{+L9;zIlx|3&O1
zh>AucxitkV^lB=Qwd`aED&bO#SCBkju87}_HXrphbof9+RVgu~B5_O)7HiFyyX3Zy
zE6=<9(J)1XyE9_L01<~QXXqQw&mlN}d86!Oe*#|-jF7)+L9q!WXJTq?WWKZOdk(YJ
ze|^XBmb+{^%D#e7mTQUgAYh~^QuyHZ8R9a|woGX%tq)~%LB4|*%zP~@0ZBoX7*41}
z#!A{S5hxJ`O}iP_&BZv5?>~7+O|t)Rs~>*xjxTWXO=<RU^7j4ZO#YkyT#$+&&gh-i
zI{zW^{QL8toC5z;^7sG8KhYgpcXog26aPr9*vX0D+THW0AN)hT`=?S#J;Y2<L@Yu1
zmt^{{N>32iw^?<J62TEsME_H1f)>J;`Pet!|A*uH@6X+zSc(5jF$XW-DlDiFU{D0x
z=NzWWM8@^=m7yQCs>6s$dNrSZT{Ui1yP^>!@NCu~E`AAQLmV(?Yw)GJiF|48x5aUu
zRafVgFNwg#OkFUz{#sX87dB=t^o85}>~VFa`5vOS?CD2OY>v1LiW}q-cZk|Aj*iNW
z`E-e_ECI=$qr6Zfaq5ltE-L4}D=aBe{QD2Zk1SG)-y;e%3|K1q9H<6JV`7iR8@hgP
zn3=N_!dtWI$1_%qTy;lW<F2#sb(6k+<%7a>7jFbQZpR!g8#!7EwJ&DDD@R#iv1<F7
zas?p_NhB1&AP=@xz!N}|GMW~LaYp;Y@3G<)2|*X0Oo$-9rb2jp&X>3fnQ*S)x9J^G
zMgsK=Jqv~L61Qx=R$v-WUHHp<aN+dyPR+6}Udsq%03F!Y;i3CX8c}bMWH!?KSW&zR
zS}rTJEq?>2b0xKVY9SxdlD20fimf-dg}Xi)7=#l&Z|z#vBP52sGx2~z_=~dFjT^q1
z@zf&3%MtrUZPignQO^K~q(FLakb0U6u}115LObWFS@WLyTpcK=ZEN0i2KP+57puNo
zyV&zOzCT-Z9O3gi&}loh4`fw{AD|rDEZq!nY?g9GM*h=X@(5lD6bu6e6!m0izEAp&
z4$amVH`r+NmyMKi+5~!Ea-eaR?d53m4f^*~R;w7JA-S;tFLCgPtmx`$P4~}gPZ1qM
zLVJc`B1jrUM~UJpMJrc<50yfI=c5MiEE!;msqOE|+Vck#AX}UrcO5J4%gOGxRx!sK
z&4a>@RPOLXDl}RoWQ<Wjr&G3R7dcu57Bc)9$Ogbov(*ky{HSpn(+q@9<TVtUU)K+z
z<Mk^DR718Hx9GXgf3W>6Y49Qlkthin1^iXA%;0%{=r=PHLn|uOW&cr`=*+<yOZinl
z9;4^>^+U`qiK!!2((BFD&S6N1qQQbFWMj^4FL8qVF$)Iy#EZ#O>H0ct{6g4mxPIsM
zDfu2|aC>IF&ZH_;)3!9u{Ki5`4pdlO(#KH#BQOI&r;{?80w2b!*R)y{&NrdvsmpKQ
zz9nmU^R6Ci#$Trz2L{>l|AepXXYW#C(7Wo_7nf-J*S?Asf^by#oe|#Oht!}g3T_d2
zmiW2pwwKw)9n09sh8z~_qDJtzNTS$2#TEqLDLj!SOjx(d>4QMq#J$vD>3Nmez{mQb
z>1iwb#9^q27W;VWM`?~E+XSqCYF9#-1wYF(X_G4q!De~GP>o9H<-p_aXdDOQ*s%^1
z=r~90_KQ8eSGj#Q^hk#(|8~+|KjaWw|3g+Ju?W*4?p{%f;F4D6H+oXtYO}8r*kg2v
z-b&IYB5YUINh4Hp8ZB>|;)U?eykG|iw{g$AK3DAeJAp|0C-=>EmY-APe>HX5vwr!s
z7xV7`$r5%t#mUP*!5GC2p5LXgth_hhxO?-1yWZK%gK;)VOTA%mZpPo<CC07aRQ;vM
z-}219yz8(HjK3B-i^lb>lw1u3#Z|g<h;QG+qm7|#Au)dd%AfrDhZGBw>Q%yfjcJ=?
z^8(%qCCcFn%;qTD%;$fb?j@Amq#BQ*0DR#?NwS@}S5$2s-@EKMPIN<!o}8kecAEJd
zZ;8h&S6{|2WEpMukLZp24d(t=R*z{}k2mU>hCb~9W^DQ`ZXd7ef2kL4N8JCu#mcc5
zf0*WUnbvWEb{{B6+!=9H?r$s``qm1ptfaCw_$^EwjIe6M<{uBI15;TCo}kfkd7+&M
z;;m$G<>4~|Siu;F(xjz@mpz5CT(It+(N@DG3m|?WcXWk+dY`;*aJM>+wPWbExwgJ(
zC{tzY<{2~6@Z&`^spYva52Hlk3FN#LHGpvWe4tTc$?Da}OC~8Klu#9<9`6%y6n-2a
zSf{!FVj&*3+iU&t1yj?<hgg661a2AH3nv~|0YAp&rbk9vOLT?3B!uEW8_2FBMNbVO
zW-ZpNMv|Yev-gc3_Gf4IWz-Ov`BuMC7I_U38VU?8-hK5JOdNQa+Gh1vT57ORGo~R2
z1a+?AcRJCu@tMj#S2l6!$*RZPz3ia3zG0SsD<TepR1SLwbygFwdw=&57}@Vv_i(Dq
zJ$S!GZ^>O(B$9n$tHhISN!7qt{w(ZNWY&AFIQin5XUjg|1bS_y5PuOE>NFDa#66=B
z8$qGZ!rWSwG2H2z5JRI)u%zUFXK{Q0QKt;2w{8NTR-5iJiufmf#*8Ye>#H8JNrv|C
zT)|K4hkm*wUP8gQO25Y>;B|pUcuN9b?ftic4TL<at&ZUfts;SoJ5pla`);<EW9Pq@
z*M6*u1i)U4Dcw{Uc0kQvIMy=Ini=#qgYlUVeU&8W+!Re1>HaSNHOODixUW0@J3e6q
z?pJx@1-9t*y~f|R$p~}S(g$6&A<R%hJS#`!vHI8WTp2ZV#Faer$I<YE1r)q&dF-_=
zEhbZ=Z5pL4Eh^sXRqgWB(D57;>c72Z&xjbw9wnssbEisRz9Vz+C84d2Z`M5q1qk0T
z{3)WT+&@j}sUQ5Al<MR>6fzHN$JMv!M;tqT;V^r(IpwZ88Pzo1q1Knc8}hG3fCSlM
zt{jWazB%owL#^$0w}%Uc)SPx|N$K!y@XGM=LniR?`-uGY6}4bWmXNPI+}q!vadSLp
z(7AQ)LFW8^*RQ@`cjZGV5bU>YuLWz(xifcA)|?bBC|&7zRQ1@JA6ilfhj}?qZ^7&~
z`$c^1;W!<4!}<%^_s7>YM87sUJw+E<uT-*c_gsbWyww&|QxX&!>>LjD(R1*3003mf
zYKsmalSJWm!5KnYx>`hZlaGCsRZr7H<9Oi2E#?3>!D{R>uyb#igc@P3h{-gf=Nz6y
z_W?(Q+!0iLl<v8c8xL{d2TuGf>%W7hKd_2;m_alTMU41<Tw7-HucX=s9^``H!#5LE
zt%cL+{Wlwx#=nS`dsGBQGr^RB>i4CbmwSP0+Qhogx9rn`TfNW4me=?5-(?PqV`KlC
zodr{T&VR65y;A+rh!Rlqo-n)<>xG!fjVpGlvp4#R$d+=e!s$m}>+YqnW-@OSMLzil
zCZr%K2E=QgKt;=lZD<D%9kgy3AnKV0Mpr7$kt{Ih9`yHXU1q#Xjow`^eHkw9#LBy3
zAmV#<;Tz{@>{8mgEH>=qcVrx(Y4ZwByoK+7ADb1Rb$qA(JX&YzG>Viw>DlRz8H-07
z6@S?4ew)D?cgqzYXdsN{%@(l--izFL9vR?M8=7agRzQG(#MR63wziW1y{4BESM>RT
z$Dr#wo8|LyrB?vU;m!u<4uR#`+)55|fHf~N_6~m~hbyJcpUF}w3AHyYH~M4i5g9~_
zBce#c6Jc&Oat+c8xBBh^bfl*PMXt~xO`}0Cb<kTMIUvu*+-jD<gz+Jn(_E8_JrVUa
z+C?Xei2v^r?4H@=Y+=C&2|u?Y7p1Gpt27SXG8qoTml&^^R6v}usxMX-%b*hqU+LH{
zIUnw~t>X~nXa|nru?-1DnVHHY|B>&avCRj+f`*$caO{5B+tdUQ(SmUmFtB-%3n+Tp
zd}7*X(ag3el2D1flcf`e+Y)on^1pz?{?6&8O3I1R^W`mvFkO|3qqmM!Z_b_udOb)_
zG$rABTn>Nr;pr|jUGz_V>}(e!4{ob}oJ@4SnL;9WD}lc1q=*!)n455Qx65=`Qo1^Z
z)EVh-lgiC`?Q{fgmR!bx8g;)G{TRPGY9=f#BylZT_ONuj$j^@2WUT~!<*n#@3$r1u
z?rA9HXS4cP1Z9}5ecF(vpXKoqHQ1CXXkGMxb?`?2ZRZg4ewa)$)puo~E|<dw5(61R
z5$FAPZf}(kbICS+fMDAMLm88{Wx&ED&cKgyyd4?vYgG14Wf}{s3l@UNN96C=hnJoe
zm;qe7*MVmm9l<~0x$#Jb?Tm0wd%eo?vrAf2^~A}e7#-~u@ijgBW*RbSnj7lXK>F(h
z`ZyTb88F))G@$2yW1cK3Yh>C;(&Fg8p9|6$6D!y?7^0Y;OAe?fdJLC@W10vy2F89}
z2M|Vms!;RH7<^Uoy3ir<&^Gzn-ED_?D3CINg_JpZoce6*M~r$Rk)LBNWTO5-$9%FO
zaC<nNpS5-G$8EEy+-BA5eu%n?C}bLI5&lQ1TA6b<ZC=%ZrDxw87j^@;UtGoNTvnS%
z1$cI=%Hs9~&I#HjnYNkTPJLsVk5g&|!luZF-Wlj8t<Im$i!aslRe@v5*$mLNBcG!|
z6cL-B*wW!|dNQ=~2qC97H+g~Cv*o-t+wU6&8z5UL>@}GfmPG>3j%4a4TBvy@UrJjJ
zpD`p?8Ew^^UG;Q?M#f!v^UnUDc7177Q-BQmmS1M<$ypz$ZLDDH2a6~^BK6L+ulqC+
z8LTC9=o>TJFttD17ii`9Dzk1rUd{3geDi)InpK<f@b0>5q08wmj#%iq0iLd2dP1<$
zyL}pYW%(GSi1R3R!D(s^6lE<n?FcnK5%LeL+84eRCHt|EdxU`H|J!aM#CxvxLXRz_
zmPdCE1=sa-<99B?>4`5w*$O7>Etl(K4RY^P+bsH6E8Zww9$)82JqMls^gW{y89#i>
z*|A4@oE@2`6ef;c2RTcKk;mt?6NQ3sv3N)_%G)}c{F?ROZ(f5H<!fWSj0gPornW2A
z?PMmJsR39m43sG4j6v&5``eZ-_OufP20i^`4Srju&%FEYT(}n%O3S2chzoD29p}E2
zpZO?$`kJ4a2^0`=|JOe1tB$}?1d(WjXoiYEeDWKvkWMwddIz;!$VqJVXO{NGIcN#x
zoN6jb)9B)o+hco|yRtOC!nZ9cI>^Dd%kQS`zic=!OxN}`5jTQ=#C0zuY=84x)=~V<
z!dys11%$&c$mi{O_1eVu)&!$PDLH`jn~X^T#@1s*?99wpHnfOP$z+$<pJSy*B^SfC
zj!finxgL0)F(2-k->iC{Rp;r0**omoEJM#;a53v!l-eVvHhNMu%7B+5hKocH`bB{i
zyW18p<fNA(p#<7CE1W*yb$xX2Pq%0%$nPy<uUBu@-&uF<wrug(a9?V)?uTu=)4Y0_
z+1}vQaQpBRBx<P<KsJARVq{yB@sW~vD>@Ej@Dw(=S>$WDmn#c&8rDB%@>lfQoj?zm
zo4v%|Eq4uX3X+We^NM*nf7KJgc=ZBY{Cqkg`?T6#zlPMv&vI^EX{EhlYB2Ls3QbsW
zZedQ4U$<e}YSR;|4zfkmJXkZPwQTwNX>x2DV2U)o9>0Z21{~N|$;0!d175TUuN$GO
zEXBzr`JCKVXMuJ41K#M0Ev7T*YP@BCl^@mA@Jb%4)F5ARFZtg6MYLDVxjZf3UU^*3
z#p(BZB<G%G?H63p!Z0t1kJNTvU3rlx9W*<CmJ5E#kLUY5jdK8NdII^>cRgK61MlSz
z?HI{3^nte!JCDRAkG~sR-Tu7=ym5u=<G(YSZ5uVZ3@S#r7uH_1jQhx0Tt@t;hF0$W
z0eYYrVUo$bOD>My(E(Y4+|RuSdkKmh8m_w;KFQ~8zvwViQj3d$awjM1iqON=O5sMl
z7cehXpmj^Z!?&_tiX%Agmboa@^yP)>8e{__SdA7j$v!I`rHafB0_L%~^Jqe}I}FL|
zc^s@*?Y&P*{pDxvE{Ufd_W#*@N81IK9w9u-T<=KfHr-8jjmn>*hx=CQiXo-~)``wm
zv9^oKS<sgINJtGHnGaIcR0>Z|%~)jczV`a|T|vOs0&ZcT|9auU*Bo*1a?O%Fp7VA&
zhk+y2T_f&ijg2X#^PT4DqscKxGyBt?w-xlH0Bdf<f=j~5p>B(Z%mh}<O2kRknvIJg
zdd>gg`Es?5H$TEd;?Ds6=8p$Tq|QWCDk(3}>u15<A$HGtn<aSch(%rx$BrOKvJkww
zw6OS~%kzZX`>MX9WFa1$V)gtjn~`5>P5xB<1#o{*z6c}L+YjE8d*EQCm`9KlRAy9P
zQyKf{cYFEmK_}v3Jjb`1H9ir)&Gi~|1l$f!x=e#x@;icxJU@z!wNqvh)~}0B2Z-+g
z$oNohJ*D&ceSc#G)6qMzPxAF^RYqn1Otw4l<$QEjb(1lALhh*&;G3Dwt5Y<ftgL`H
zXp@}WFv$PMPNQ%lNi(9BsbnmN834Kb(%y8N`YXd7fxmNP`dq#FsWP{yw6Of(NiIFj
zLl3T@=Nv5*(EDRwq1)*afc;fZC%t`hjCy$B`3v@q)BeTQmcX<2F@qsiV~eKMC2k*o
z&f3yKZg{re<5H^gfU1|6XZ&+C`A<kRJ8F9NzlCSeD&#7vJ(M7<b3a~CE;fjNfk*lz
zkjOsOf75pG<FBW1dOBMuC<EL0!2tMtXz+WMjrvP7`IPBu4cFsj#?Pr(_GOL7276Cx
zh|l8kbhVvK!k?7@w%%B{j9<^sy_dPzND=|nZ=z*ug%BYU=WY~Nzf;m0n200iw4~!I
zLpZXL?%QP7gZPT6wu2+k5hSxtKIR27i%$_>3jQvocnl7;hmkdJQ={LxiSM($-Qo0u
zcrNaLgg=wD@PnQ@rcn6u4AL}JN<18IkD*5ANDChj^YI^kEC~!`duU!f=pA?6(Omg$
zl9Dj?;kn%FmM!3c7mNCrrji045zwRTo`P?#;LI{taB!6i>@i$vN-Etu-`=}|ZsG$1
z3$Zsc{5>a(!aT|M-hx70kZIxM+-w}(FfaGEz%H@e=Rsy@kN5sGQu7Dfr&xqBCPwed
zD2wT&7OHN25^h<p;lKCgVY582hj^vuEb~2Ba6R5sdRl<8=ZI_YUoAbQJ_5yA%9pZ&
zH6AP4C%7#Jhbi@c+iQt<#0_H&y-hhzirE3)pZG33*XQ??nQHd)B-%M#q}fmVx#7Bb
zcUz%%EZ9tUAO$itdO(%>#aZXj9w8Xq8HIl{d@S;@_xGq<WpHP^iUhFielh1xjw@&M
z1Lm%~@5?exT~410@Uk8f5?aV>St*S^151t6S+Lk8yf&vJp^bZ30O-qLWukTIDRyMr
zC;`1MqljF-t+Gq+&1?9OADagnj6_1)zzlm>-dt_-inISY`KbJZo!6WOU`>bI&QT5_
zci{hV$Uw<VnUCEf+jDx>4&vavn(u|YLXcsbnDPEX7?&8dv&93S-UAT+0drEUX@Ygk
zT{p@fOb`jp1_6&$#C{Gz;DdH4v6T*xK?S)xyFU<<<aa)V#pu_jyhB5&Rq1@qzkg9z
zMwoe%%YF@Jenb8j?g{EtMIdK1pDoQ20Ay4s?hKSk)}p87j@V=0$^U_0B)=mg#ui7i
z023pIBA8_;f`I}|F=RgTg~#}B4P<_ZuR}ZJV*n_q3g6$=1k0MQr9~qnoKS>BG6(@E
zQC{=@dF|<jo)W~W!HK#4mdlqzU=4M1nG4e2)ljq$rSjF<UOoNWTU2?(SDhqb>yjp4
zFm94?7$|z`Tw;j-_VztSm8;1!zI_DpD29?`w<J(6-g*ogGNM6v*MC7degv-Ouvx;x
zgfYq;tk7uxb(3Zf_is^Ju4}Xen^7@WpTF?k-=+X$(g^8gnyj(O|M|@ywV50t7Si$m
z=fCT9rs<ZW?4?_UUz~>uvh}0=BN3t-6wiYOnz>V6{pSho|GZ%TsOWEJClL2>?)0lp
z0sdlD2#p{Vp801F;O+jE^1q&$qk+!g$~I+u6d`|9^si<l-(s#gJM4Ayu>WHSf3^1k
zQCwgodG3FH^<Oyge?7}UV5UoKDte*+sn41jAxs(v+w_06VV#B);aV&D5$8X;{a*OT
zBwMQ9Vg1)LUBnptIxFD&FG14&4CDV7?tg3U0Z2)eCZu<zUx0-8do8CdTOn-{@_zs{
CFqW?X

diff --git a/images/add.gif b/images/add.gif
deleted file mode 100644
index 3f40d591d10f17eab54fefed078985c9d339dff3..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 986
zcmbu8?`vCC7{|XiImtckZuL-3TuRhtzF2FgUI#YGrgWz=T`LtG$_x_9ttPFZow&9v
za|`7*lcoMDI1m|>+H7O<MT^137k#0}xlyO~MIwS_>exnLsN{zrGt9y9X8s92uMUUr
z_xbXC9-cjePe;NdK?LzPnx$!)+wCTVcsw4j*GnmFX=!O~Z8Z#|t*x!Sz1{Ek+pv{V
zbQ(`g`RnGI8i9->9XG5;5^RjoBrS}xQdffQVgmGx{&mH!>O=`4-5b{taf1gyMRd0w
zs@hx;Dn*POav=n8fWD;<?XW8bQB+9Rk2*p&I4S6&WQQ5yLePm|v=*)?t|XN|#Ym$H
zT|r0`Nq;Y9I0%J^_9Hao_?#TX{BW&dB?>D9#z85L;7g0X!k`<V4SoAd_A(*9h4Nr6
zvaGmIQu=E;zpYyFDkY>Wo*wS4dbkQibsDl0oG>bA>_z4ZC%~E0F8^L(Teaj>Oh{o7
z^!+!6gR;N4HdE;#3OEOJ171KB2q^)~c>#?=(JWOsQ+0wG;OHc<r2%1rax4_FrwT)X
zI={CGG5~4X;*CPxEGf<;6~elf>g)1xXH5c24Ma?Eeq|Ba3<!Y`7+k#V%4DuO+jO)A
zcfAC~Ae|;I)Zd44GzFR}g?$c}2N!WCV{^luF`;v$O7JmgPUtP*XP^@(Lt6!U;1Gm$
zXAA@V{5f~+AXEZ!5;s%0F^SG3#59Hys0EcuNO>riPX1#}-OZ*!lc$foK5LDE1!0V8
zNCFc;0toI)cM;13djs^<hllo~7Kchgnjk6ElTZ_o6DTLKI)a}45K~B-uu@=?pyL}u
zrrXiWgAVooQx7%G&0A<f*XX?i`K9wmJ@<9TF6DC<vYRxqFz`X{;yeD%;q^;<udU5@
z?8+>6PnItp-`acTFKW*|^>#r2Z1=Z6%zicZ=$I=KT(XZM5O{1ZdT#5w=l-@}{>>M1
z&u5=KbvZtF_4+qw&6W20M*g=abJqELKJT#q`2N_)h4~K;-`ukA<oMakzqJ0jviDPR
z;^i-P7RIlfxQiPD1KUSk?;h4R&+o*6?>>1Td5ph?-=8`D(WbG!)sB<p^^eW$@Qzyt
KZe%qLkN*qD6d}L>

diff --git a/images/animate_load.gif b/images/animate_load.gif
deleted file mode 100644
index a6563b031e0578d30a723f0a4aca512aecb3292e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1893
zcmeH`{Z|rZ6vrP(6j6yc6W>;Vief1mCh9Uo@hw6@tv1sEnTjVDHs|bZseytz8)Y3d
zEHPi3&9aWRP9-XtSegpGw@r=8vUalCT03O}%T_<^oc#fNe!Ay5_j~W>e$Ty8F_H8j
z!6#r5cu@mjFc@+^4%mbFXbc94Mx(49>>c1<07bBIU194KfLTCXLclwcT-=F-g#<sm
zySuwD#gDL@=)Z&(7#L{h8D|v(P$J+Ki=!kg2n`DjNuhdgu<;RL{PPzCg>yrbx26M-
z4*+8YBPnuu)RusiF|o0632ZK#$Ki3gackDBN#UpR*QKsc6^b%M8*?`0WM=Km-}FuH
z*Vtqw_KP}v`aw#@0ebrGWm(m3+0DLNI;f(0T3$=gwsyvsRVkZ`qPMkd-&VL#0;t7S
zQ7TegRRFhqIj@i@tclKOiOg(|&hJRxu1Sg7bvj9YB&oFXbH%xxyW|^pcdV56epabV
zuD+gi;6kR#AU-&dr5^q2@KA2!RS67M;owm1>!YO8_pM%Cv?H$}GE$u?Jeii(k(Vdg
zUQ&KOr&Lz1+^?)S)K*`kRM)6m)dyP}n%j=GwYStAZ#{mx{l`-srwV_#Tzuljp3Ym+
z&IeWJ?zD~8boF#KcK_Phf9+`h-Lr<rT{ow7XHH!1>K^JjJF4jz)LqmK4(oOOh9Uix
zk)i8Xu4%758oo7kWBlQb`!DYI4@_Ow-@kfg?Dp8you7?=-JHBPX?!?6_2=D3Pk;a8
z(ZgqtpS^ne+w0e_-@JJP{(t|$0m4fQ2n5uC#pYb|O#t`|2+Enz%CTgQivF)bZu%u9
zgw$8&;GNjaPz$B<SQYK#WLrc=>KXBfkcGhEB2j=3ScH!Y(!%Y?tKPOqaOY&f`{jHl
z`lF_U%I-OgdU=BLyj)C}QZx56OGp)pC!XJZr*gAmHpFImGRCyl@8R_}Uf=i009KSx
z#a7Wr2!$qju-9tOe+D!%2M+rZnU#K|It-a3^_ZzIJnS)O1G4C%xum1{CJsD0B_e-B
zE8j~j@Mg#J82}XE_q{i0opwws*W=N|GifH(-p1IHd&lGuzLYpObCbOfeS;H;DUtac
zc&loPQ5b-^^1GR1q&A$Y0i_?d*p|pDMLAO3GC3}IVA+~KJ#oIwQB2R_jTf1dsgA;2
zPi|ef6yb`%MSmx^M!C=n07PTKR>hw(VPMHD>3gsbf@%RklCj?AA8Rg=6Q_zOgu)P&
zo=F|Xmv{QnwIQl>+xVGYofct+W>&)WcFw_B&!3F1zknnszoNApGi=C?^KOfYidha6
zVFD-V+aDZwB(sIbx>&P9NnLWfH^x!}vgtdbX9tsE1Y3J{DH<0BHz%r@=PM4e%3c}9
zY3nu492`$<_O~uVS+saS0>@T3C#j76o5^vXe7csd!w(l@VL0=4Bt|P5Vcx%-^fJQZ
zq}YmY>Ek`HGAzd*dU8C105f~%%B$1?7-4C;1r$>dp$<)G75^lZ?k$IGqI2J3lpu48
zrlEbMI6H(2wZ}v6gi3hA=yoF&IB`g`82Z{Um7!M2D4c-;+)~4^;CPRwBm|9Xur_f>
zajIq6HzOudO?Vi<Qw31VatBXK2A|4c7Ol*HH=8+{*j3A52yKg`$P&9wn;+KAm<Qlg
z{@B>~s3SC+&RyFr(l@S>AhP{?Abd-hCEOMT#xxKc2N}+h`G5@xP<gWP#=cc9ejDiz
z1U(iy3xsop8@D}8x5p#1k{`R2VMn$%ohXP01=e#r_2IGbZR2}ehN~RB*J{K7FePv_
zF}{`|U`njf9uYE>fN24c2JvWpb37qfYG{=m(?Be&kS;Xw+hCDtg9JULku(*Bz^ji|
z5Ll(q5p{S8(lt1D^3q%|tRO`pn_^`YFJ5Adg2<BE+EWtyQFTlyf<Q?$l@6C79C<ht
G2>BZ`)RT+=

diff --git a/images/animate_save.gif b/images/animate_save.gif
deleted file mode 100644
index 8b1081c5433266032db537c41285962a9226ebce..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1966
zcmZ?wbhEHbRA5kG_}<6B&c>>$p`oZ}rfy<mYGJ`38p&(nCS>Eypq|2DlE!3KsA1vA
z=jg}d8X@2pFBF<>>f&SU6J-&WZW~`_mQ&~AVCU`Y>mME-77^~9kQJAZm=+zHmYJDb
zR#sL~!R9xYGi)xK-%_s7WxU~=grc_brcakn+N+Z@N3*2guDsW}ZkAg53eD1uR%MF~
z>bD4`9aPFXq+5E-w)UiRd1pv%cW~Ww_lk8Ml^ZJS8yxFTlsDEHw=A`2*=aZVxKq<S
z-<CxI9ScLItxE5nUe(f4*)^?x!i2P*r72TaW=!8!IcrV*+zsi=ca|?)Tef1aTgwU0
z-b)_SPWVl_8a(q{+@i}#3y){6yk5HUYUS1wmD_K&c6PK+pFVlktgbnWn&xfjU%sMs
z>E0PDR!m;KY1Y=QOV_Mfwtn-9?K?KES+Qg1&i3U8T6SHXzUkzIJrCFJJG5Z$<<0vK
zbRNDo{mlLDLr><OdNS+M>lFu%FF$i{$I%meE?iiC_SuTdFV|lEvg^j%t+zgJfAHq$
zi4zCUT|RZ?_SIXrPu_ZR@Xm*8_n)19^77QnpEsU9zxC?NvsZ6ly#Mg(!}~X%zx?_8
z_y7O@45NVBA)xq^g^`h=k3k1yFepzjaQtJCHqE-SB5<)=uN1|MBOQXuZaor)cntJP
zIo?0lzU<u&%O$uh)no#h<J!p~`%9QjK+&m%Lr}@ZV}gQX6AP!%5s0pvi6R;T3+&5p
zOi?`S=FzL-HKpWb(-K#1QM0Hm78jR+y>pS1MdQMJ`@AD9FOyP^@kpEH-Pw_te6&kc
zJ8VtF$Aew7fL{LBy6|v|=Oh)kmJ2f)A2oK#`K3IWFj2XEvW#WXlM{-ET^JiWrA)b+
z9vx(6=arB!IM8sg&l2QApgWY^+IS?5QZoERS%GS@npijl<n3yHd=O0GoZ}1$O+jUs
z=5IW=Hmm!IfYbyvukf2|Rq)`+$;-<Fl-znCAr4f-#(QgX`dMM18bhaM4v<&5M72s*
zTv+7VBVn2ZvI}U=o12@{efd~`=Im+$s&Q&(lC>&%5wOUmTTBzE2549Hmp2C&r}y%J
z?0VG%3bJ-SS*s};7eVGg-0V{G<Hf~hc5fb_IflRfF`ej@HBUd*!?-!sdzy}ONyUc;
z2b<aXWULA}7BBY#DKh=V^rCOBeZre58;zGSE66rllrTI|3}97?=@3v{w4BR9f<wqO
zNrY{Jw0YjC9?8wF-eNLxRs|OhG%~mIikW2`IKb*JD#0NFOk64fiVpSXc36H<zaU`W
z{pZKU4H^LpbmgQNKV42cD<T9k=L_qFxz^>Uc5ps9eqO-7@y(o@6HfHc*O8I<X}CD8
z7px`=s7BU0?+8%MvHm{e#&a_^3Y<7EDI+B`)3Ev2DPgEN6J*VD4ovxZ_?R!db=|q1
zjRG&uNb-Z!d_LMWSrFu&7PfyAW*8;yX^`B^>eH?hWmU4GVKFm1H_N7i8wyLj`UOCW
zzOYV^FihH0aq-Ynj&>#S5&?w+iVW-w++sQc4hy;XK?Wt2JnfUvPuc=j!=@|VCBX4O
z(Se<TMS({^fiXc;PEkN3>&lC61;eBx9gNIuJQ4;`TV9-4wD_<9pM{af1c8NYQ)Lx}
zRI;vc@GejQMnmMLB_}7Th0luFS)}Sa%OrfkmxfEt?EGR7HJsc6s$Nqx0vEY-3c(o)
z4h;;DoB}o{fDxD&6+9Xk7@0r}pd^CP#Kge`v}=W+Lcb$OTm__{iGfi_1emBm44?>*
W(a1n?aX?J5!m<HY^k^tBSOWlELaHMG

diff --git a/images/base10.gif b/images/base10.gif
deleted file mode 100644
index d3069446b6d24a350879a3821746fc0d388af4f1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 978
zcmcgr?`xA+6n>K2=H9lcT+0oMpcubsR|nagCg~Kp#h=Bf2x<e1_!`>bhlqn!sxy?A
zOcb<IM5qWQSP)V0N6?6NsF%8u`JrQZ!KpZvcI^<x^5%sN4WsPtP4{Jg!Oo|{<vHhh
z&N<KB(zQ7rI}k<~?@<qr$K&()2q6s(4Sv6$G1l1F*wob2($dn}+G@hoT6drn02r{2
zh8(k`36w;Z6C(~Tz!-}0tCpSSno7m4P_trIDA7_X--}0xry-~dN^Yc4+Z38GB~#?6
z7g0dCT~E+Nhgk^_&7|@^rHGmk1WXpGw9FVMLQ0kiC#$iF7Fsd&JVk0YsD@N1lg>35
z5FiXP*@@`Dzokfp;4Pt$gb~GY{An<oLkFO06x}8Wfs`1TO;qE(T5!eK>#SU9_xd$2
zU!GlKj|ttLq{J1H+I`g<OBV)<D{V_EX%MWR7mdzxX`s?hG;jxK1N?w25@G?|&4605
z9I+~bYlol;xHttYt3gJ<1V+l(Vk4lz+}AmR96;)2J+-0}u@sZinC@S{A}HK*NL;ER
z9)S>V@;H@4SANc$%T2lWa#2N}V?lHDc;TFN6|jCQ7UY6Sh7a?Y@B2Z`my3W3^jipW
zcwC~tfS-Y-z$`ptKs($W!I+mGKt{l+LTf06gYV$Hg<guD1a+#e6NpkWoc+=<;+l(H
z$7#+mDJLx8uJd|7;z0-o7S1vlHh6u!aN<AnlTJ7Tn0F7K4gE1z(k^v21fx{<Rw9II
zsqV=CF+Qqu)&Bz6J$TsSt%XbD-!v?0OFb!!es`+bBS*HL8=W}KY5UXr3uiXuC=mvq
z`eoD5!eC~#X?ngt-?nenMq^caa_HkjZ!&Gp%#M7=T6=X@c&40>?DXGw@#(mE;<Llc
ze^}Hv?mfBpMrrcmk<q>W{hvIZ{Cdyy^PA}<`F)=cUwe7`p!MycC(km;(|e|$ZQs^?
zYyZ%(%HO}9IZ{YHIQ;OpO&!C(|8#e-`F!xzu(NaG!rL$M_xrxu{o?|k7i<3kBL*D%

diff --git a/images/bookmark_folder.gif b/images/bookmark_folder.gif
deleted file mode 100644
index 28ffc21d1d71469dde82dd93f5af2bc5fd074f5b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1302
zcmWmDjZ>0$0KoAdui?uIv+0`c!k5xCMQJNrFPf27*S7SW+cj@#iRMX~W(tj(5BP?X
zqNtf$mT!q%CRFAd3MmShf}x3GrXV0%=F6<@x-Rzgx$nR54LcDUxG&ZPxPX7bX9xrW
ziA17MNHpBey%;nK;~9vl?8Rb`SPTm5a|j#5!|gp~vjJsuD9R=*!S-OJ?T@K;LBHBj
zD!-^!IBwkF#1c7Y+;=&{B%IB2%@MdmcXr0*d)|1kt61orT<$|~@JX%kO{?<FtoHNs
zBSv2$GU`dp#skfopu>lQxPoK1JHzXGqJju#b8F+{DM?95R4O$knVOQ4k_zJq(qMw_
zjEoE#jYg+s(CKsrgK@1>^(RdE3W6XelgVPSvRP1ec6M$~c79$io6Rm@=M@wb6c!e8
z3fUZ(WTdEfw79sK3lns3yA-9RrMyxukH;%3D=RNAuc)ZFIcC09S#b*{>irAWr>=%6
zr>cjhYHDg~YisN3>gwz38`LlF-MiP=*vM~e;Pd%SO-+KPMu9-k+}zyK(jpWJMIzBd
zQOl!8j~>H@#@b*iUE74Qy{)yqy}biAI@Q_!q!Tt_>gs3{ceabg;_j}FZkT#P(%mKL
z7E61&r4uIU!n<B+kF4*hOeX7}u?+P0$p>U|xm+RdS11%prE*Xye>SKXdZrv69#*MT
zYSoZht$seD(mYpbG@4P($mpnMOfxb*rqR9_)sDZ=YR4zFFLV>*y2%Nh!8|poozhLd
z)J;rJP0mbD&AfU!J2S1HnKtNW3<krTVRp`-pM$Ne8|U;!qtRqCnN4$Mvw0r2vOd4M
zW|=oxES3d}dExEa;==svMa$CC(%YritIJF4Yb)>9*WSPT$7;0#JOK9KNaRnb<qTk@
z07)8np9$vE!1hECoDMh?uuQO8DG~{7fsyDRbJ-Q24seO}e<VXO2>?$3_VEai2mloT
z3IK#Ch9^3P2=JEx?z}JOB3O<C)<hs70&*r;Nd#6ZkWhg!5p=`??~5S#GGGuvOeVmm
z0bG(Zhlp7|53F%O61)5J0iRz0xK99q+jd-d_bF?5vv;(@zebun4E!0m05=eZ-{D8L
ztknL^am-<FF(+vQpdur;<ml;|(3>9D-su`%JNW|dUj&@hhZJ(@p3LdaD<zz8(hs!I
z)DE2U1E}}v@Yt0-rf|~1oM^fi9*6fm^WkIFiWHsoeUg+DP!%~)9#ZuQC#-bK(e6rk
z^YO!%Pi?pO#c<aiN92!s9=?o;;-7YwJBsO9tN67{8};@}`zbnJpc}K_=S<*rBtN&v
zEz)*d@aAI?4JXUqrelK<eHaI?K*oR4kP-pbuE$6xzu9yrh_#qrq{s@%b7<d3oBak`
z%=0{#uY}wiH{LdZijOBCu6{q;QIpwrK|iNN?M;3qL#U~@9>y8J%L*($*O%<%d+>Db
z)(c`$^qyA2`BMR6Bk7q>#BnZ!bzezPprUCd1JDzDM|)zcZ0)2^eZGdh#gXYB;=Cyu
z%Grt_*K=)K6%ls3A3SCjcvVxGHXaV~F@5OpV0}Nd<Es*!7yo|Rpx2?_&z*4RY;(&;
z6Tw}w(+0n|EaYzll8r~nw`n-n>q3&;9-sqcfE4ACh4|SLzQYv~mXddog_B2-sZnG0
zh_fVpn0MIF5wZ)_JR5wMxs#lQ^ykMGd|e}?voPGy!I7H>hJugcZeJb5*Z2j-EeeLw
HXmH?v_?L%V

diff --git a/images/cdatdemo.gif b/images/cdatdemo.gif
deleted file mode 100644
index f8ca3e6bda59a9fdc35d1a5557c8787dc9a0cce6..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 413
zcmZ?wbhEHb6k!lySSrV``tTK<$hJ9KPQUs4uW!X6!=x!o_g-vUw7+Wh9{<WE=^ZP#
zpS?3>?Xm6Kw`=JcJ$v@7cjhv+;D*niKc7B*+CFzyQs;)M)(J69EAu97I(p|>&E!?_
z83md_RSXOa|NsBredaEZq$yDR$->CMAjO~qv=Qhd5e5dfT?hIDI(Blf@GvnsALN;T
z=*Y(t7CQQ!EFl?l)c9w3H3jW(5NBuGx=Tj<UPJm52L%JQ-t0$z`)u?jIGXt7<oJap
zY8V;)Mf>_%S|@c2);UK;icD<fVU&>(l<}|07n(C~{>q{ya@~vzS1t$;T(@H5{AvCS
z>o#ncVT+b$+q+%xz(FUjBZ7V>&YhQ*<&&4adc{dro=>*WS=veF<_$41*+tjyIo%cz
dxN`N<4S#8AJ~r9US3b*rlevHXzas;KH2@Ik=3M{)

diff --git a/images/cdatnews b/images/cdatnews
deleted file mode 100755
index 277b2d9ae..000000000
--- a/images/cdatnews
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-/usr/bin/env P4PORT=stargate.llnl.gov:1666 P4USER=p4review p4 changes -l | more
diff --git a/images/cos.gif b/images/cos.gif
deleted file mode 100644
index 32fde08a86176a89114e17bb003fe09c5e3dd7db..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 964
zcmb`G?`vCS6vw~0xy^ml$R6=2OHs%jDs8TA*~*fxQ?@69b`yWhwQLc?xTU-8;yOE$
z+723G>|_HOj<GRBf^@JJ2kYpI`(hF0nQUYc`XYhI)T&gBog4PvymFZnA8+wr@cZ&`
z&f$DNd{5u*m$&r{>d<iyYbu~1<ux$^WjOI-Iv+l!a*G$UAvao<{SZUAmkITz%fX`{
z<2aQKHNE-}#29i}n43UHK+>>>^;#$BPH<pOQ&$VnRB#Uecgxu3fJqRMoY&&@0_#u-
z=c&dir4)z3z{!VNvA3`qR3+g;scVQAz*G=!YXh}H4^<iG<V*c?w`Wx{DTugrS#3Gc
zB)C9TO~jCAPziW4fq4GEQKUkCrHpxh(nOqs5s)bxW{psaAV&-5_LnWypnp`axiL#J
zDhT;$*?!ik1tjHyR|Achv&uA?)NaKKEv9OzfvVCI17jd$;YZt4u>)WL@dzg7%lW#Y
zc5u%y8bAp!0B(cUT!%`@g=npBMDLfFX7DPSF-QhMu;CE63$n%rMFo98(A)MmFkOaf
zc=+rVno(5DtC1Z}JCnrgoAJX1Bp~p!VxQbzQv-B&6-unEGs7dhaH!gJV^T69h^=gH
z!jX^yqOu%_^g8VU4H5tP*462#qcK^rl}HWk6g~y`3s?fe-r=4J+gGZ=pqSbN(*czT
zip0S;{O$y#1(Cs@lenEhI1N6Iq6Jr%(h}lWC6|6+X4lbiFB2E1-u=KF0o4&4_8|?7
z0v4d}%XO+|6x7Sp(8l5sxCbHA5K#<FqM3#qg&0L6jnW{DBj8!&qA({xGa!er7o&c!
zUrczM{ue#Bwq@-XKrX>Yp4_!C^Y!tzwysoV;llNkkN9|T*O?19J_>~oELPt9rTlTn
z&gsitdj|_=pW3+ng>SlNZ=UFCyYXtIdu8!Vcr0)<qThb~Gvl>?QoUz?tDbmbY}3{+
z?Ah|%y7zzB`p}j;wb`U~;H8J#SN2?b`}jN0eEQz#w^xFv^N&8*`}-?)cjwB}htJ>r
s{!0I^cdezz=a#;jxPQL>R{c)@Oy;r9eYsDz?7w+>L*Kc@%{rd{7hz2}V*mgE

diff --git a/images/cosh.gif b/images/cosh.gif
deleted file mode 100644
index 977887a1174029e8e7dbaef1c09c2fb0929fca2e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 978
zcmbtT{cBrg6n=8=yWUp}`VQV|WI{6#+GV;>*QJ@k^^ITqU^w={40_dDy0pDEt);DP
z$3neEVQ%Q4j4IQvbh6b6MFQTf6esWeAa28t-B9QcqI6?JsHBMqE@<KPM*jrQkLR4{
zJm+~hXZy});;rprgmDM8aNsC)S$09|Og>?cHH;FG7Up2694)B*kR7-)5ZY=Ng8iUF
z*gY7k`1uaVSCJi@wz|`76Qf<!wewzQnl&=fMrY7nayA*HUx+?^@+RUs_&QLLI$*?B
z9IgnJ!X#%kv;rKUZyDxRrxYZL3aKp^h&kZIH8P$PXM_tuv*GS?Ye{h>sa(vE>Iw`6
zA#t4~w_uEeP>56#u`wU#^S}&2E8tUIw*cl707p_OD;8s%2Pv(t;yHs7KmfBdd&{L4
zl~FGR{aL@f5v4jP0eRhZw|E|Bj0%x5&c~b*C(1mqsI^wXjr^p$amA}sOh{o7Z2MOR
zl1qiLQiLesZ-&o+CZNtIaS8E&s=E~RN}MSWQ~(9|P3x-=QBV#_F}Z~8D+nr3zpzS>
ztL-;bbt_R%aVDv7tQ(1KjrF{a`U($K5s!k`kiSgfXbyxx2<)6%)pEIMLNp=zUA^K#
zF-U)ohRVB81diL1@Z$B1Q$ev|3N;#Xe^t*#VQ{}n6CUar0=_$c0Y3o^z%2ykfrxJ+
z@SmYUBp=$NmxrO!kOTO01iucT!G;)unMOIRR9ec>AF}q}*5JKt2-H4%r0)%@3oMLa
zX8<;k0n$MDV78H1Q853uIvz25Q9cA^L;BK2P_dyhkQv;vG2f0zFT^0SQCK5j1E4)c
zGphUjav1+39<E_)zW_Ri0DW*re)`Kd*Q{wWujJ2see~{tnA|ab=9}Z8hL^s(viC;e
z!~<=)*``OGtK;{yePA9d-u&vMW?kg@wKKcl3G9npI(4#R&-=}<pWw^QO;5d-7%waw
z{^MM6qU~6*WjX)x_ots7-<3Ijp=ba3$LDrU*c06gADLGlOOFn}^8Ce~m#@8+i1jUg
z-d--;zH9Ecr$0RS#d7Zpn?G4P_xA7iU0Bx=DSmsnYaib*zy9{Mwargnnmx6-^T!EY
GL*gHGr!js2

diff --git a/images/cycle.gif b/images/cycle.gif
deleted file mode 100644
index d472dcd90c27f838885b355f0dbcb80b5f5cf5b1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1119
zcmZ?wbh9u|lwy!(_|5<V%6`f!K;*BY8lb8fsHqvQsqLq&<F2g}q@xq7tLv+y8>FkB
zs&ANLY?NtamTYEKW^GesZPQ?5mt$w&Xzy6%;8f=9+~Vxq>*CVl>RROP+VASt>fzSq
z=HBe#-s0)r;o&~T!=ur|qs_~s!_%|f%d6kZr^eT3x{u%70RO4}fw_T!wSj>Pf<hWX
zLuZGE&JT}lii(~c8`B&cyDlz%MM7#_TI!ax^wpW^YqB!7W@T>4%37V1y*WE)drtnm
z{QTX81=|XXb`%!vEH2(zT(Y;cY+rfBj_QiN6_tA`EB94Z?yssjP*;1fu6|cj{lSL%
zLrsmV8XFHaHEn5W-q+lGq`75(OUsd#mZPn$`&(NNwYMJXY(3W6daS)|e|y`(_O>JK
z?T0$rk9M{n?d~|x(Q&Y|<5)+>@s7@Yon1%zx({}DAMWlx(c5#lr{{QI|LXq!ll>Dm
zPMC0N;)GL^CY+iwY4_wwrzTIHH+jmLDN|QXn|f^O)Du&uo}N1O%*<(LW=vl(ede**
zv)0a<bAJB3o%7~zSTO(Kg83H~EjYhq!Nr9Ok1SkxcK+fWix(eXvgFK?C6|^hy}Wek
zmF3GeEL*m5+46PEm#<&8eEss}CswRDy<)}1<ts0(Sb2TzsvWD=URb;C)Vg(7Hmtk8
zZr$~b>(8#=aAU)Uo0~S@*t+HF)~$QCZa=hr$Nn9=F7DcWaQB`|d-h(~d*H$TgAWcI
zzH<2RgJVY?9zJ&B*s(LmPCPz!_WIfL*DsuZe&O1!Yd3G-ymkB5t1qwq|Njq8J42q(
z0r@}*2pKs3Gca<<cx+hU*t}OrE9S(8hfWN9%3g0+3m3Vzit0z5xv~1xETi}>F)J56
zX<yXK{qw`B&{r2^bX;<J1WYC_o*mIK?@NcG(qVS4hAcf3VJ=U0b}k-~hz5rKfYt=L
zmlX$+CN1%rrg64vR=}ABGQ4>iM{a1JRQCvYQIRRUoTHt)O6Zo?=B9wdT_HYCH#V`I
zogl2irP{LfMd&n<B8jvuk2$VL%FD$Sc&*Tytm6?|uwufef`tnm<{F%Em>MueExhy4
zp3F^$Sp_z6OXe<c)K*+(z4({T&&{dFIJkwPRGz#Dn7%G{*Eg3(LQMx&o)gU9n&diH
zR5~cdgu{dN?m~4%pE*7*lEv)P=L<3KI9yiId3>UR+oCHsmmG8X^^RBiYJ*{-6DzMu
pgMcH$D_-?Ly#j{`2by?{3Jz%f=zS_sD5Mp(&_yI}Mt}o@H2|mC%A^1Q

diff --git a/images/devel_20.gif b/images/devel_20.gif
deleted file mode 100644
index 2e1e1aa9b92fe0b6d21197ba35df8072300caf75..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 825
zcmZ?wbhEHb6k!ly_!h{($jHbcDkj9sA|=8mE-o%FuOO-@!=s`qrm8BhZy;i3rYX&C
zrNOPPu3>IrX>aeO9vNlg<7*xfsg;&)l96FvT;i<5<7&w7X(AM4BOK{05frGO;;)q(
zY*-p)mYHZ#l%i3UZ&g=h=<eYa;O`L}9uXg(;E|pdmXi}%RG5|OnwgzlUS1L2(HTCu
zH)+b0n#q$BSFg_8vZZ+Mp8A6a8`HEqb4|O7EGL%O&Z==-+~R$3N&e9lRUI9j)2C1G
z-?3xjqD9M>FW<RyXXoitD}d<wjcwPjA79;kdSm;=O`Vr^Og?#F%B3Tdj~zdL`}XZ4
zPoKVh`|kh${|ti$DE?$&WMl|r&;hv!6ekSq{~EBmE)WcCbsQaa_?Q^L95CtRsN>|Q
z%FD;W>0s*^7zh$`w6oRLQDtFZ)YRu<&;g4%IR^OZD6=v!YS@b~G6<?F1J&6%1=wlH
zGBGgf>oPKk05OA~ubr)3gN~1)6j0DvM94*iiGe{!M_E-x!3QX)>1t$bBFYYQp0ATO
zA1||_rH#3~o&XmYP=t@c&PiLDfq_NF!^1+F4dewuUj}|{ZC(amWo>ycPjhhw79Aa@
zKn5lj76t|m16OrfPY*M3CZPME-dA@s7G{#2-r!|p#=~R_6!r7-^%Ze56lG*!lJ&Gv
z01Em6o$agXZzK$I1e2VnwJ8sSEl^C`%?RW!CKX<wpp_Ie15iv=SAYYkN+-~fm&w#j
z4Cs@<KxIZC1qzZtM;2}gZXi1_(1{gDY5Rgg)RBRSp#dxy=<DR`;s+7~h7(Z4v5~<V
E0L2Qb6#xJL

diff --git a/images/devel_menu.gif b/images/devel_menu.gif
deleted file mode 100644
index c6448aa2569cee7d65e5dd16646eca64f6948cc6..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1493
zcmc(ci#O8?0Dynnm|;)0ylvKo&_yPjXXde~bz^28g$Q{rlV@}jVN<L<LQ>=rmh`xY
zN@qhJIfRIt)afF6sGOT~N?Eu2SKRL}_|Eq+{CugjC>szK7zBWyKY#Z0^fWg&XJusv
z28UQ!SUOQ@+sI_Rt(~~OULX)wR#qk?CF$wufj}S_Zd+&?KUyZk?%ut&_Hl4%h{4>8
z#}l~8$)fIVN_;$pwLdY|CneBc2cZ@$5YErfPYp;<9CHr!(ie_Cb_xk?8GY=3fSDF!
z*)ltq(9v<c_rWQp(&>0QoJ2OnLaXyUAGMq)O`|15QG=yr7NKE_i;Hr(d}wGW=781c
z?Gkai8#}-jXJ$;J(Sm}4($dmAJm^#1(#bnQAA1x40C`d=^Jttf#nHsXq`bVmYj)Nu
z?x?e+cGUIj1Ix=9VY`)$$4<vtH=Nn$Wdq+~q~S-=`BmV5#O9YY@ZSqSLzNE{03a}=
zNKGB80n^k17m6iP*@cSAs*BZ^YA#=?t-D&^&{z%D)`COS5J(gnql1Jrb#~qD?zwmW
zL2r>R7J}Q>uBUH+LK>>!ddHtkOin$$uMN`GHZnG8$C!>7yh2VdEx%b=?ZRVQv>^mD
z;=*<V64~Iz+s*Gkww54$uqrADd<6!xWVeJU>x&BclEk@N0bc%PMh%IF+Ni_O{IeK)
z-aRFM3Lzo7<-mO;I>XGJIlW(IMRWiP)#`?=gMHx!4)M(M8x#2?BR6w%^~3!`EqKSg
zHmvuV)~C$<G{2^F$VgdRt-ScTv*riFof=8wKupCO_aqhE$~8HEI|FIrlI@z;a=L&-
zG*05Ad>`2n21JiW%%pcOM*9noVR$vdG;$wr5)`GV%P$1u^oL<Y(jZy`0YIy~W8{oR
z_dLRS$Ra~z>H(1WR&$Fyz)1p7U8}N0bGJQABTF~BMNPMIIUi1w{GMIA+hl#};Z&Ch
zFFpyIUF&E~KKHkH;F`&O=fIUeOP)r&+{*mu8J>H-qwL@WVw0OPbSQ2qePWL_(%4^3
z%Quzajby+lK|5t6Pc<I#FF-#B_+b~pb|fgVIz1`sGpQadN&vtA<f<$fucQta_hyg<
z2x3I=6#-zQenb$tK@b#;Y1ZQbSCz{ApwGc?`QpQ@l=wxi$B7~tK{PV^y2R8h03!<(
zYP`r+V>>7!kos58Xzfc^c%to$mEIa!A<_LPCQOsd6~-<vXVH1tGg(%N`iE=Xzg&|g
zo%Orm&9;WNY_Gs}c3Fmr&7j62{Ke+wW)>!a%VzH=l_A1g9Y?r=YH|>_D}WP6!?$^P
zLibXX^j$nHP`~DJtIwV!hYjjvP@eNMvvQ`&$-GH?OR$Ql_?;uUo<(;SH!dSe6Fad{
zRzi;H;MWCI@r~&vesQ!luCs`nUYH!=+b2IVSI}vM2^}{?sH3UkJ{_dnv{IM82cxcw
zoEGAn-To+Q3uLG=tBlxmcZ(M&B1W(7+-jl~YzT$gWfT&?H>#@;@_+Q7oHp%TdK@Kx
z9}r#0-&`3hOsyHnE=dVmA2qACSx+U#zO$vkEyZ*%=p}!JlFl?M&_-WYf15@56z`?8
zLCmzr=xb`Oos*11Gjsf2)7m+Aa=hbDNeen>T6rb$;i$cltF_N_d}AFXxzZa~Ia>=0
zU4rnFHx@$uUw_T$eNWb~?%Dcn4GuD$NL561!0a-x43=0#iX7P#Gj07SxYWpY8Jp$Q
z&YSEi>>kI-?bdlNOxs6|xeR$j1p6H;WhwFl_VQBgr@qCP3F~(qUb(y{n%(?H!MQ5N
zyxg&1!X5~R<N3&IzWJtV^C9{lG~^EiF(2~k=$LBj>&g@1eyq%{jif!NP5Q!6fDJ$A
R!&r4-y<r9Pr7aq8`4?okzeNB5

diff --git a/images/divide.gif b/images/divide.gif
deleted file mode 100644
index 8e540dc04c054fe9128d8950dbe316b3d58a364a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 987
zcmb_b?`vCS6n<{*o7}fCotN?!Gk>ht4_mu#dgruB#j?GDxOEVz!q5moZfnzWw^r;A
zn&CpZ%?5=r1qVYXrEAB`rihj;Qw!p|=_nJ1<hs>`$;JvDevtg|i-v9Rdb|A#o==Cv
z^PKaX=WurK+0_~CHDKUhtc9W|s;Uw~{C<BR5TKMcG&BT*!B8mF*x1<A)D#Yf9XL`-
zI*zWfaLrtj{SY>?6U4Hi_k*TUc>b(A*r(eXAE5bGr@XVQL2@qC(vRru1JgihGOk4`
z4wHlmo+1Z)hyV<rhqc7ZPB}y*<-&JeL&O0ipuC`Q*NN(caZZPfWHnlrObRM)q)5F2
zO@a#)Nvs2t41|IwV~9*Xq9PTBZwU)YSP|&V8;*mhS+t@<h9K%1-f}<)ILFN1*Q=dr
z$#g;Ktt{VCNe86P1udRl?Wim=*`Cyhrzo}etS{QPaI~<c;&Q<)f*p_4dk)5mOOxex
zB7yTj8xR0^fe;tay$q-qil$p;x~vgY0S_mDXY1f5D8oz<yDJDuP_O!cAP0~&T7SJz
zGhIPBw^lMcTAP`78jiU-I!!Qkdlu6<?3sPw%jNEPxARa1wdO+V5FXcu+J}(df(4p!
zqs=;_oMQ7V9(I4CuI5Z=3>g>AIuvi*J>U<Z1z3f$2(-iV3)aI0LWr>*wK@Trh8V>^
z8T>hl78`sVi8QK)lxZOje37;9TVs#w<DmBGgYQjQgU}6x1{B!95Re9pH?yt8GNHc@
z`tEayVO0AeZ3q)2gPILF1TlnF8;iYY9|j*o)`XRTJ_@@3R>D+0yqNI({vTuOTCcTV
z0KEu>KHa@Acj=J-nYPrGh54VSA5-{D_s8>>kAz#^xq9V|m8Fl?@63JGHoAKG*rtyB
zQR+;+I<?hYcepgCBtG3R($r<lIft-u|IMrGuPKgyL!+_n$k=@GllaETxN&~v+NoU)
z;o;KteXo7}ltN0u<(1`giC@mWbbq<G^yT&o9}MH~{I4foJTbf6z3oiguLdi3pL`(}
x&u{my3;Z>5vG=p_o=x9=|JygpPse)(e*fX^^x$_VW7GMrdn5LR&s85@`48`!9;^TW

diff --git a/images/edit.gif b/images/edit.gif
deleted file mode 100644
index 1e6858c6610207f5a52f97ea5d09d394f6de3d26..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1500
zcmWlY2~$&N7)C?5#K1(0Vi6TBaadd^f>2<DD#%hnij*p%r9f~gIu>olscUHnSW*-P
zm5K{^fv6E<i~EQymn1B*rXj|Q1q_=IBmxN`WchfD&zyPY{DJdLM7X~n%ZcJd(NjK+
zO2sTKt!Oq3n>iLR7A;!5Y?<RKS7*Pk)`bN5golSlQ?cmisMzhn@jJqH?+j02A3N|R
zcF>z~$d{3}0XyP}9rd(gZ@}1ThuQ37$JvSL?8E7&&iu(ceyNmyB{8xfDWcdO!>?uF
z*SYx3v-r&%yyy?SC>t+6Uz!wHTFSqB>r8pMP{`*AZ=V<5y(kpq3GdBuRd`<@6kdPC
z-c?yy_40dK%>t|@4y&oDs@*{&p5_zJ3yI2N;w4U0l@L{Th-v{*bB7>mM6sAC4iky0
z#J{t|Vv$55YTR$%$lcqV!f4q|YvK5|aQxb{L)&?&9rf3xIfrC2d1vSA*B8<h3b{f-
zlC5QA$KRyv2`PU{%Ab*nYVu7dN!F3%+qVa>w_WxB?Wc9i@At~?^~&${DjxKb|GewG
z^G<$8*-@aBmG$=z4y4fr@btk_&d|`%@bK`+r>WHaRqFQ=^>Dp<q(QA}P>)L0>hAGl
zwDFU)$%{dnQ<x^5p()RrYATp6-ls2))0ZUZpQh=><uk{v3}ZEhj}3<LCd2qEgSNw<
ze`A=@8Vtk6lk<$+#U_57iJxc^#F+$%rU$7enb0iBHMib2OCOlKtIfUjW@Wuu)rM5v
zNZpM-)}V<xG&zVguaIU8X&aEX9Zk(4MY?vRQy@Kw3`5BH4w-t9sUMj~5E?*eboSA{
z*{(`xy#^!!y|tjMg;5eF>R@t?MwsjajTkhIplJeaJ!l(1D}|{Rm}-Nm4w#aHt_gH)
zplb(R2k8C<oeXrHFfD~?1x&wzX%h5O(92<_6K1+#rUwkK!O#On1sLaE3C2D!DZoU6
zsTWLhJ^-dcFb#ug1k6e>4}keSnCDyts0WY|&@iAm9|iOQkRH$sAOp;f!t4itA%HOe
zH2_d*_qu;7;8P0}`YgqQ?zpuAugkPTYZ96&SRpjVc?YItK9iE|up->qyD6_EGTOnt
zs_@}H4r8_V39~~i^VOvVmSYvMS?xIt?k1bkk@5ZEbX$9ym`nR3ik1hY?Grq?9K|Vi
z$(Uu^?cl8N-}5mwxU$SDyJ%yvF@f$A@0;4w?r8n`Ugw%^u?<eD@{EiNjw#EjKkv$o
zf0%mzM4rvg)jL;h`wLGvSkgzybaYOl&VL@A^x}|g&!mG@`DK=bR~GLj<z&$FWBd+#
z`mOH?NY^yi^)qd*xQVZNvbMBZhGbR>PNxZe{noMi6fzE#g$2mf;zEzf3z@+o^QiU(
zUOzmW|6{=5Q*8DzIulXp;`sY)@<;{F<eZk}?GDI}?s?F5E6}Z3a?20jd?1;_384fX
z?eOX5i=*ASUDI7md~)K}((Qp^_-0#jAUELA;rGd_Wa<OfpQ*m%1&2Eab2`@btA1LU
z;vY4G(N>-xl5Gtn8?4jtBDVo6H#x!0UC*L$T{GOd@viUl=}NoBc@tFIgcvWywzbMj
zEv}U@j|O=zn|`UIdrT4aD=*yavx``ph4a>n=mY1j7Wzl@VXFzH*#4w{N{;W5Dv2wz
zTB2(4$cYh!7B1mCh1e!3>*#C6f!e5L`D^&18*EJ}oyl{LWLddLCNh`&<{#Va-R_gw
z<|QuNa+_MhUKQEoxYLuBV!hthg7(El0d;|An}asj>re$VZ}Saf|GB$aO<rMkQLQIL
zj1XR5qw_P4w$1;;*kjtVmPEeDBRhTLJo|_&TPn?!n6O-O!iSsV-9tQ%UY=oN6GX8{
zF7dGU%}UO|tffw&OHKv#M{Z~gEDK-SvS%?dZy{Fk@ukDoiaaW7V&N<I$-FY!3tyuh
vg}RVoOQCu09ppF*W5l#(7Yo~9i_fo45nm&byt8)8<pRywFt?tVAWFdhYN1;V

diff --git a/images/edit_20.gif b/images/edit_20.gif
deleted file mode 100644
index f853cc7a04bde3cebb64ab02224f8688d7447c88..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1258
zcmZ?wbhEHb6k!ly_<oszfq{`nR6>e@L0nv1OiEf(PF9JR-;|d@RaI3(O~cI0%*M>h
z*22`%!XlWD+gC>_ScSQmfuT;8!Nb)m*TE*&*QCx*&&9>r&(|X~$SWqoKPV_HCOSGa
zH6tc5JvGudB{4cPEvBj@Iw>hJH8VRaEj24EIWsdezqqg@uduivt+2SL$B<*X9pgj`
zktvQ0^IX{GxpMRdXg3EMPYTkV5Ma2-jcu_z$3hRzEiMcTeKi(FYHv)nYAlQIElTOB
zOPx_yv^m>-YmUdJ`qVSYe76%AZWPI%t}(uyr+m9o^J<;lqcp}RISdbTIbP?ozARvW
zR4(1unAO@`G_g5rYJb&+mgqIz#S3QC{cq#=GR5xKWb=c~As70JuP!OSzq<7Q+~EHU
zeSR$p`Mo6S_sZC(8>;`TO8CFGuB)fFZ{pOxsdFYz=$JHh($pC<=g*%%ZQkMqb30ee
zZ(T5N+LpQVS1p^pe$~`Xt6O$2>fg1oclXw5Yga8=xoYjY4V$;`Sh-{C>fO7SY}&YS
z`;OhacJ0`E@X)n+jRzNZU0&68XLI-SCCv|4w7pv1{$OMC>#bd{cl5p7KJneYX&+a$
z|J~93egFK2dsZDjxZ>!cZ3hnQzjbcatpoe7URZJK%BriEHr%<g_0H8DcW&(YdwS0M
zv+Ez;-1_YP&Yy=C|39|!=cP5@uWtEyZR?9iyAB>YeBkJbqo>cFJ$~Tkxyv`N9lmk>
z+O6AnZr*?T{q~_Jcb+|ac;ey1`|qCK{r>vEmv^WBJwEsI?a_ZP&i;FO_5a(u-#?!F
z`}y>jk2ijOx%B(noxi^>|NnpE*|VolpS^tg;`NK?FJHZW|Ng_rPwyZ6ef#k5*QdWf
zzWMj}(Z7FhzJ2@p_1lmC|G)kH`xlt_1{^5<WC6QD2SkGM1OvxE2EjQJhDk>{1eM)-
zfQ;5gW&suxhl7bnwq#yj<~!Rg_ZE=xWP`#d)=s|2%Wgi+V!2XVGA}N3on5xfMb0ex
zfJNcKtRq`8fQqY4+r74!rgt55I{J)bnpmcrPq$fbgp9=kztD}sZ-Nyan}VJsJWM-c
z5?*}ojL@^59_832$5ITRs5!E5U9<>TblL0H=JYtRw_2fUK9VY;W)=~qxlZd6F3s=_
zK9VtgrD5VJ70slB&Py{d8`!)S^_jKMZMRo1XRsRQWL5qUpi|#E&5yr)!sJ$J7S9Yr
z&lwh#8`GnJj`wqjzTA3IFF5&7kLRHyo;>nueL%&%hfNf$&zwlnR8I2|Qu9l4nOUOl
zvN03r!_}KOrmO2FPckSJQs$CtnAwub)tDpIndVer7|q7%C9H0^=)uDj?db+ST0Ijd
zv~)<W^XQzSvr+J<|MIwfH9xDu+?hKaH<oo)9cg9n)2y@j%k=EbO-(^%Cf<k#$yXE=
zP19-~Fu$6%K5^EPj2q0kQVo2JDHk>*rArx_?w`{t#OQP$6!d}`7ZO6(g5qHD)aPDo
Z4ky8pF{{ZQ9xGdJ`9Py*87m8eH2^1+K@tD}

diff --git a/images/edit_menu.gif b/images/edit_menu.gif
deleted file mode 100644
index 913c12821be01a5cfea7f314f577bda7fd3165aa..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1781
zcmd6mi9g$i0>FP*+o)7aJ$`j$ab%M^#;Yl69$kc5wHjsJ&?l~rH?2^gG<1d_jtI%f
ztSXW;LxNTiQtDM)q3APR)Df*j6Gzn1gDOeh-hc7F|HAkC4LFDP@WeXmm;g%vpw(&-
z2t;CHVoVGcmzHXC<mi`2fKWR<a{%!4@VJqNi;Rd+tJUo_wPaVwSgfJ8CUow)Ei2gm
zl~lRC{r>&-hhlPRQbK$)k6l^yn}~Q>%Q~-0K0x&_nR)tbc23HxtFA1|pjFZo%9WXy
z3Q=GG=<QS50?4mEI+V)0N~MCypf}X!%Vq@2@~)P=h;DY--nMjO{J}F`L64~K=5<^<
z3ZOIYO+TMv))Xma!|IREG-|n`C3F4Xbj|Yk?qt&*H|kx5kF>4C{xkbD07&-r9_;IQ
zz^j~@>ep(wHJY`Tjp-^uv1UcGBWd5~MSo;pP7Zd%V8TNU4aUX2f&N~*uLsrd6$^Z#
zN=VtA63h{hbJ0N71?NcYwcg&xJRVOh7B4O?UI-4{*?PLtNqzMrAWzT<b~GOx9R~m)
zEG&#dp`cJ-3k8kuwx%omfcq_N5)Pi>4=GKJOG=7eURp^_P4)Ejw6e0Q%1UK@=b-J$
z+T(mr$w}%LHFWVy6B8mei%->~cR&7ex?4b>{gbz<+K5m5*WT8e;KxAqYnfVDBF9_E
zk|9!wa7WcEnP^B$Nl+}SWRG~VhJu~PwX5|cNtO>AeXzTmBb|PvemmOQm=}5F;uIri
zXRdFz@7@-VG(kz@$9u5?^{K?9kMAZkZbo!<3C+yRyxqODL);|}u{^|;c<EI9cfp-P
z?z{CTTJ5xCvL&CG3IIA`@jpQS29}06&@fI-C27Bu7b;&qjKfB7dCjKg7AIg<>xy}q
z>?N}*D-09VDeT_Z+;Vq!Z{_oAYis{%?Ee<Wzx)CJdjWuG0G&^cxiFa`;wOhek8F5O
z?g79i=x!jNJy-&sN|Fxq?}^LcUl&`RYvqg->46QABQ!s~Lr3ALa+e&@NVP}Ws&u5S
z{$dRhc{$oQM>N4Qb&eipV)aOdu&kh(AOTO(=v#V!AFx3kFfv7#+fzjY`X<O0Uestu
z^ISEO#JeJ5=a-nd#0*#Y5z$9146i@3IxlR&HX(7@!>rgtWiX#CK;I^A_=t@^`Lf5@
z!?vaWz|1WDXs8hy;`|*Cf-d541q4$U!i_oZTQw(iDA0i?j19l=!fUsqaAdH!*U#af
z*A+of#1$~^4|&`-$DT)a-P+Yf#}9ldyrKCvQZIXa-Cg{TI~&6Vw!2hSdS@;8^F#I1
zIC=%aXJBkq6_SEGXr9=>pTwK@m|gm5O}~?xd%~|U6nuMbVBsgwP&s6g;xa+}c`Er>
zUbf|cH7Q(h4m5%hT5=8%ltrun*h+I%0PGgZ1YJV-(D4HFKKy*B^s5d&I@7n6N=k9;
zf>>K5qy9u^nzXXSxR6!RP+7#Pk%AJnz8zbBg3hG_CWS7C(x?1g<tHpBE|IM8mJr~U
zY=%y%a4BA4G7R3-ktoFbcmxm|i5y}83)IC-(q|`!D7P=5kbs-6i#-rZuP2qoz)l||
zV4GysWW{UK6;?urmqb?sT@wX-de&DauEuF}0^zyjh4~uUF7i?>_kx{m2F95XQF~9`
z5qEoNzQ~?&2D8k}d&M&U0}L}&H7BEI!)0w)x6C=({W?`>&=+p55bOMfAn~UKmaYBP
zxo}#H|9hiM78suOB!=ONiCE8{G51D4Hu;SdP*K!X7?8QuT+bnJ!hg6bk{Q*ni#-tR
zVH;w!DA1PTO>49+N2LxL($1gN&o%Khr-B;I47!4g5jqWaBMV8O`&&~*=Go&lwl|D1
zAGks<-Ja-){@D(0ok3_%Oeq{6jIpV_k~;|hHJYYxerKPr|9SjP`L;3R_M51dSi9LF
zibgKu&y%sx6<gRDrSPiY7BmSDl5*>>f%&$ph!=~lPw*C}6221D;g|SBoPdK@9cABU
zI%NbFWg8bCXOqtcA?xn8>f5C-`}B@FbN-%v(rdEj46YZaa%FeEwnAr&;P$R<o;JAl
z=y%2U6_z*TmDQ0Oq7uLT&C|y__m`!__QMY8?eZr!4|<JD9G%7{^_>LMZ;CAa)4$N}
z=g;SU<J`5@X8?sZOc+>tZ~C9i(t*+^HnJfDK`wdJ?2}L;VxZU8m5hVNwcF)@W6>9B
XEHb?*4;<ZGc;FV;XIyLt2hRK#VYbk8

diff --git a/images/editdelete.gif b/images/editdelete.gif
deleted file mode 100644
index f0a0da26f4b6ba50f29f1b628e2eab8c135296c0..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1313
zcmXxjZ7|yh90%|p(TKKgDd|y1JxnR}c$+k<?L>x1PY+Y=UbL%RS!q}8G?YG_heVsX
z?sTQg+A_vUQBRc>ZG|9`NHv1AOjA^mNF#!XKmJ*~*!Mm!zW00m`%-BUq=R%9aDi3W
z3I>D0VzE|MR@T<mcs$<L*4EC>&cVUK$;rvp)z#hI-P6<az<~pPetrP~0fB*mK|w)8
zB9TNQ1qTO*goIEil(4X{@bGXdl}e-0qN1Xrqod>E;?ggi&owt^v)Ki#oQK7QEiEmr
z4b|=3T7f{&SDxEfnJ;|wi|FxfQ3qEf5{YZ?^w$?l8ac0;9}aNK2L35|{j5UTRyFvd
zR`#-5)=@9(Y98w54vG20g0>OiOL=F5ysu3z?v(fU$mQ}kuX^4LNk;oSM+f>w2m425
zB85URF)E*!n3$ZLoSIO~^8cFau9)k6G&eh`?5$D?s+7VS<$JkGSgR7&sYG?E_rvqz
zhWUPz#(7DT`qfkQ09XB*t5&N&NuPZhZ2Kf@*O&}F*9>=PMmjWd6aK;oZ$aLvebcRd
zYto~geXrH17svV*{}t-S1iCS??wv%ZFd5KInaFfAiY0mX(u8zra&T#3e)+@j@`sV-
z>5-M0w<|NFE3;$znsWV=Og|;p&zOwq=f?HQcY38luTreePa4!844RJy&5XfdSX)%C
zEvk_E3Pd)5=;jff23gi1dJTdg>&px4E86u{-Nx$D#_IAWvbu>FP;+xMJ|0a?MXzL{
zH?E-V?Px~_s$W78J&GXc<_3x)D2f`5#&IH%Kp^07IIt`QG6gIwfK0Y)!Q>Lbk^rXj
z{;zT4S|At!#{)A900iIxfHMFYU>eM_0K5UN0we(70d_(tBO0U40AmvH(m;P1s?xwY
z1;`AjO9HKn(U`>JWwNfNtS4OBj%NTarSfWOPQn?$yTHyL%^(HfCx9dXmt)sFEfV|z
zp9na*Zyf`)bTIk@FBB5vU@;7gr+{}ErY}HN8hD=rvOioq4GD>Wrvr}BROgS?#(<Fy
zywh7x|9^b8%GMSDhk_Z-F|x6&D{mX!i`i?&?!Jl7<Jh}iEDtXLpHy<%q8n!)j-04&
zy;w=HcBJ`P<YluWiyNc({6ZTKA8g|ZaZmCf<p{@ypmO(m%8xF#E5cpRiuGVsxCe7$
z?%z$J71)+#sHLtLmgNqt?u32lIVve8zX-pJpBME9UXoE{ag+Tk_k!=C!mCZi`$N8G
z6bhd=nw?_Z2u*Ka&KGazZ{IWQn4~QJ*njVMyj!{|_Vc}kbl>VV`cZF5!-*${!p{y{
zMlds&$714W%I|)Ue{gnlQdma*isD(lCf4sCuQ~YK?>AhGvt#)#y#EjPhifPm_YTUs
z`vz`HEgzwAuS+*Em%hnT2_Nqo%g#JTJM16Ln3I-}&L=bd0<MpEL_JA5PP?8d?Go(y
zf*C<R{N%-yb9R0Fagh^4LKoNvGKoOF(bUZL_}hmXmR==kx@mu^6Mr8fTWrLhHQB9>
zZnq3EC0>b{UA;TXw`KPfJWR+Hr_Fu*nflqt?&jXzeoQRblqOQx`|8bdBKPKx2lhD<
zkDg%>d`WC((Vf-^=3ad)2kVe5)OLBrlFD;Acga&ix4ezP635iK*a-WR$+6*tBVXRh
z<z#rf-6~aa%LRX)ObqO`Wf607??&JjF`orbXLnl%Ndvy!8Sr9)vctPFn{VYwVoPgr
ORwt8&>H@Y|K=6Mo8=LL`

diff --git a/images/editdelete_20.gif b/images/editdelete_20.gif
deleted file mode 100644
index 8bd6e28d0bd14b070e93e57f9b19ae4f74a01daf..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 825
zcmZ?wbhEHb6k!ly_{IPN9Bizrs;b7u##UBVG0ys~F3!Qh!4Y90F)=YoNl7``Syd(Z
zB_$<QRaFZO^xJd7JMyC^l*D$GCihn7^wk%w_48gA=({d5Y=fuE#)5>ijEwgf7_JNO
zUsI5K%*gnJoBNHV<ZlLs$2vOytt@VMxLhnReHs?}CO+nWV*J0%wEx9<{f%X_8`3AX
z*Dh$yThvj!xVLg`PvO!D_1l_CceL02Z>w)<Y3}Li?d_k?H(}D`?w0A(r!Sk*x@=nK
z%2|E;CQVwkVA`@}%MSMS{qJb~KYh~wdGr6TU;qBpj<*;0e!0Hw`>nmduWb8wXV1~2
zN6(!*ck$$bOP4O)zJ2@st>f=*p8R+0=%=g4zT7?g>)z4-S1&(*bnorcTOXdD`TO9=
z|EK4^KD_+>?d^Y0FZ_FX_0Q|uPoF-0`{~)c&oAHqe)ImrhwpEne0=lb|MSNmKR*BW
z>B;{uum1mj^Y!c3?>~P0{r2V0ug`z~{sjjxK0@&)3s_nQM1takf&E_t6E`O_I|Q&W
zaB*{Tfj~*21uzysz>nD+!r&??;46SI{VZ(QOG*k0OMqhLg%Ghon-U8y5F4ZpDi*|>
zQozdr5#xo56<C;ab1(wU1lj=<Yk=r3DTxZ;;VUTwYUYJn0+cAuw1$Xr7nG=nf>ai{
zs^>ZKg4F@VoTc4?&Tx?z&$H(PTg(l##6jAv#6?~rHP;TJ7wjl!X%!iX)c7P@gji9D
znv&e~hUnywIAe%zPLLBr75Qbg!$KkqAu2fvic2CD#L`WrH9aB>;9}XzVrk(CCem8o
z`VhVBS;ZQXso}w%F;c2Nx)61oCB=F9$w_gszOi1uMqqV?Y}R^uIyxdE!oq?ALaclZ
XAg`1-Sy)1V1u$1w_?MJ4GFSrueF0ov

diff --git a/images/editdelete_menu.gif b/images/editdelete_menu.gif
deleted file mode 100644
index e6b4f3cbe5ed2fda84455f5f2853ba0868256a85..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1568
zcmV+*2H*KdNk%w1VJra@0LB0S|Ns9bB_(WYYFu1gGcz+&Q&T!RIsgCwVq#+b{r&g&
z_>7E=#l^*hgoE|<^?G`GmzS5)($d@8+jMkv<>lpAR#XTE1?S@4(aFGibZ@Gvs^jD1
z{QC3v?c|e-ggH4m<yKYx#>M^i^#7NV|Mcprpq3pS9l?%{%>e=budn}zh2sGMu2D^?
znvQaEa*mFURaI5Kl9K<PpRQR}?egaS^5mwLiO0vs|2jC{008yj)Bfqzrj&)zcXj&!
z0M;NL?d971>)Y<x$^YffrI3H@85yB4FV_$c|8s8g)yDeX#iNaR|JBpv3k$7IOYi&a
zxQBlKVPEgcxT1)2|L5c5EG+Kx>Ga*r_wC!Zf_46BX7}2?poMPn?C9)YT(C(*@7l-h
z(Y^5H%&~A_*hobC)V7;|XaD=`{ous@+`RPHwylhDMMOi9hkcKMc8z;%igja%Z(E0K
zSCDjFTthFWrlyULkdBa&&d$#N|NsC0|NsC0|NsC0|NsC0|NsC0EC2ui04xC%000O6
zfB*pk0|W(!hy?}*2!sfa2mq3kl$Dm3n3<ZJoSmMZprN9pq@|{(ln4n53kj~T2@C`c
zu&$`LxVgH!yuH4<stykhwXq8j1+EGT3BS(I(9zP<zzGfw%(W2^2@43>&D7%K<mKk5
z!4KfC5)%{-6BV!L^!4`l)WH+$t_}+g0xDRb6%L=lg9sCLQl(&Du~!!`WO%S@l7<bG
zHfV^pG1rD0r8-)=fsvBPOCU#5JE@SIK4K3OaOmKHqJl{WK2Z8_AX?8(H(qek0Hf4T
zOE6d-wX{K}r57v#6y5Y9C8(t>#|0BW3`7W6B1V89QDQ<xNe4`nY>L%aTTK2$hfdX%
zw9;EiE}}{@Vd*ZVx}p4Lg|?O02^1-msaU~+L<@=q54aEt;KhrdFFwHdfbl@d4;x*4
z_~4>a)6E@4lSV5^gJ#YEHo$;kqVxw7LS1jhczX6+0G=g*2H;||(*qii|0GHqr0>z4
zTfZ*JH}>m}FsxQSQ1r%iN;gb=xB=sZiK4|vOWzH=cx_3Up#xxU0wmGVum_LYnk@zj
z83`;l0}VCUU=vQ>3AaH{3078Ad9m^3LTOS_C<%l?5l0~mr7>8Y1`<+MUtHm}#utPD
zX!e&-8}Ow8g~M^j8UWS3@Ssx@0tY~e^YPWxg+gUm8D%)_#T!<2(f)?fi^6RbTvfoa
z$RLiT)mRjWP3pIwHQ&5)7!ElMsKX9C9LSnlKlTKej&bEkng%caCfY^9<rSxnnZcQy
zj*^fRSDtY8bW~gxp=aR@AUav3BpS^oRYwcr30k1@O*LmpmD-i*g(oF>5~3z-RGUag
zsR?OHjb1s`np%DmOqU6WkU$SU`~XA{LJUC!WJON)LWhqo+LKx|+Nptd4$3$xoFMfy
zr=ENkO5CJyMoVas#DZ8}j77G{VR6B-<}9@vU2CI^n-+O3tDc&;?M54jhFr9!W^~?!
zs&d+DD8UQ>P6$O9aRd@c2%*FhX6oqOcz9CCBZp1_rU8*s{-sp!d_wg^sBAQ*R8?FX
z7gZl#zf~A9c8an_Xj~ljsguT2aZG4-GhWMDi<68zNxR`DMP6$~m8xN%P)!B$s(S64
zNiY>Sz(E2`G~vV(P@oWn6iQ5y@J}XG*Hep7h1%Kylr?3U&LrPywt3B7nAd8Fnl~TA
z9J*Fo+~-x29A(A5C>gywhH4&XFp$kQpLMTBW7^<`i_=E4F;@wPvZa=k3xDJ0TFN?t
z4dM;qO)Orc`YP=S(@s-Sg%eg>fkhT5Xt4zsytdY7N+Q9D>3n+ASqY*i>27<U$4;VA
z?kOQ^)$wqdbgV|a4-fo&B?%w>j>RiTyh!;?f++Ec$i{XNNftfd5wpWzZ~gQaebndf
zrfU*R0bOw5#TPVO0Y(^Nj6p^jW=L54S~4+|SZII<tVF+LLV_~{VFyD%LmJSahBU09
zb_rBS$?yWd3?h&rZ16xE+TaE^yx|RSfI}SQFb6sa(1S3Hp?*}6fI2L|4mixA9Tsqh
S9xeb6wveF^hj@@c0028XqxVt(

diff --git a/images/equal.gif b/images/equal.gif
deleted file mode 100644
index 3aa6e64035a58aa6243a57c8735f9926bb30980e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 194
zcmZ?wbhEHbRA5kGc+3C-jEs!T%*-q-EUc`oY;0^C92}gSoLpR7JUl$Syu3m}LJ|@Z
z4Gazc!9WM31EiOMrToH4&((V^UZ36lKS84}C3D^iHU^%pVcH5uZ~i{_NrbsQz@l-E
zYI(5;WA{wPM4hBOzlF?>B_```X!m(kc&&}p@e}`0*B2kVYgyabT~jygxH;ughFfRM
o?%q=(%+}d@R-DcOe&Q7j%$gmV(QFeYPnkMx`iz;gGz1x}0Y17w8~^|S

diff --git a/images/exp.gif b/images/exp.gif
deleted file mode 100644
index af87ff8a84c1aa33e8a7897bf9de93eca2080da2..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 991
zcmcgr?`vCC7=CVYlKZwz@eoc}>Z*&N+u9A|22C=Qc{<jv)5^NB&J2QFOU6VuDI%oM
zSgx5`nLofU3Nw~UXX%Fy3X8JtgJ;7MR1k7e_DeUcR5m~4=ED3DWN^HxU;GPvKOK0V
z_kEuCdC$9V|6nHld;$slg;p4b5ex<yW1&ze91asvTU%RudwXYRXEYkUgiBiMU9VLm
z21w!wGw#_{&7dTToLL!g0U}f~3ywR+wJ8<Nn|9r<n@mfog4e!8W(>mgLD{`#%C&`N
zq-2gw1dsv*KyR4CyX<;|X_6}NgNc+4!F;lcDIGh_nUIoBB(jZkT??&9UCptk3sXZX
z)L4HXrUV#4X8VzvYL({V2vl({3oC_5cow-FIs(-+@vIHTASKF&_ck)ewBU;9X7TlH
zZaLg)C~Ife`rKuqd$T6<h2#z|1kz(m?^d?CPFQn`VfVkHedGFT)v0<f)4+LPD-Z@`
zg)s-<{tIYUYDuRqxb_$tfR9taL(QB_x+FA_D%Y^jMMQ)A#65-*z*?30X2nZ7ilnqw
z3%j?(gufJt*(Nec2ytr$(<SVmxfdvve)hNKqbfnqf#%2u;JxRIpn{W*aY2&MGlTWM
z8=ApV5~e`GL5#!jiT(zD1U3O{FqVN{_-4UcS0BL%0j~kAp%h;D6W(p;O~?<MUc=U!
zDJ2udanH(VE)GtbCCfHBqloY8VgZ>L1c4Gog5`p@#!C$U#{$+5Zwl*wh_A)(E?2cr
zU5vx3f`H*T8dcxj|0lM$Xj-=bT7f~`Lrb%lPldX+=DuF~^sDI(Lzb_N&+d6A+H>gP
z2`P=R!CowrygU%3IPY%TF}H_xJ#sOB{gwHr&TKq+W96HX{DGM#N2B8x?yi>RIvzb;
zSvWL5cy{FDokNc$R-bwMVC=!s`@g?EIz80&)&oahUA%H`d*RHnr6Y6SeSY%h>4C{q
z&w-D|fBR+S>GaDtE{^<hD^!l%8Tg{qsekXd;tsC7zh&oh<qNC%Z$H?0eDl!_Fa7%A
YCw=#A?w&aR)0>|qPaRp@k-+2s0L85y*8l(j

diff --git a/images/fabs.gif b/images/fabs.gif
deleted file mode 100644
index c39cf67954189fb90472d22c77f148b05a7de505..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 988
zcmcgr|7%lM96fox=3O<GTfM0f2g#Uh&dTzGG)d<dZn|Ra43<i<VaDw_I*}?^RETKE
zo>44VD{O3j!LIm?iqt`6`9TG}w$jia_CubGtsoXQI^4v(JP<U9a(#*agP)%++;i^v
zoWr^89UEiOogswq64hyFXfR<ar8LA_kqv?fhy(&<6cixg!U8QQ_y7v5(*urKPy|vS
z!-Ns@Fb+zQk0mVIW=aVuNA<~)Y3oD@A?s$oLJZ(la<Wa2*d|wmN|7RmG(_HA9M=;q
zX30ks6;k^{N5ljtpdzme%ZxI@g`j&w$#S%$xRO-<l_C`zx`L1>l6W(+9E3t7<A`M6
zQIQHjE8y28j0le4AA{C0EcZcF^sjqNjSvWd;W?CJP#lutamLeR!>drd>S?yY6{ti~
zKN>NIsSv4dU5lFIr}9m<<yA~bVGwMnZI5`dkS(nuZdAVkD*-Pc@`P9bb5B4eUkqC%
z&Xhw?20pAngh4rmi)gpub9t``asa8Kdn<V-Y$?v9t7`n6-tdW^dnpyTSV1fdUd?)T
z1OqvAjJ(!zx%-67bAkCf$AV($)!+=d@-%Hl{ftvV@#zR^uP?3C<-*W8(iZ#-9#`%a
z@CXP3bMV{-R>4gQMs05&_HsC7C<Q68`vsgQP(k!2bf@fAg-9tvnSRHJDaIwGT}Sks
zVd{)fpPTEAG-7^m3L45#7&cfn%}}D|-}Nh9vvD|C)M~`$0?%(3l&hWzz$hRQK)F!u
z<$IacRh8;r0G)t`ez0l$;>59e`pVSw_@xH}3q3v-!sV#}?*H=lLpQIqo?hN`_!-@Q
zdHRRO&HLNtcm3OOrhZ53ikbe<7w792^(87@z0<>s)Ai#m-A3oo$9ud(n||8$@KWbT
z+K#9ETK9>z^PlBEUn0!GRB+eSe{1@Be*CWIWb=YZY4d<*X7u2ge=K!1`hVlkYa7ek
z?r!<z&o_g7#r6xCvx`sM+4^MmUi&intm)Ry<hjA`zde0DePDKS-}-N6Qx6ZcPmKn>
H1Z&>{lM@_H

diff --git a/images/function.gif b/images/function.gif
deleted file mode 100644
index c60f72277df2b0bd21e3591365e1c86649e1d317..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 957
zcmbtT?`vCS6n>Jt&3(lTukaS5QyM>|%cj>4E=gJJ-UxH;X#Ih8=0g~_bhp`MX>HTm
z6dKB1+$<Y)XtA&#ZP|2F27ahpaIBzjTt^)KfL<6V$YdL((ho_hP8)Ridei<1o==Cv
z^PKaXhjaF}zZ8vh2N1wI>S4hu7NtnR-5Y<=9B&#W+~Bbxe^o2V*CBeap7FPsCEoz(
zFm??2*PMDU#M{XAbG^r<4uhH~ycM(iE9$e7r^F{FE%!nRg71b0^N3c!B&bL{uY@ZW
zlY|PMB%^MG0S3^cO02~y`-r4mxNj*4TVMo~7Zh$=5tT5`X)@4Ojg%#mf{GtINUZ`z
zf(tB>_)d&75DK1%BRsxIMbZy<36~Sl!%!J0#TX9gv_ZvIA9zhU^rmGHqE2?Dt5HKT
zRZx06?b=>}S7PbIdpj#0CbuS(BVlWXa-Qr`+!1R&SJ+;$y;9|Z>jbTv>KzB;#nN~=
zL?m|v;R?_UcmZA@#0FF+18Rk$W|x^N?+{c02{_QFYv39v!%Pu-EAZ7#0v-_L0aB-R
z)e38xEgS{<-E3>Hk$EZQI9Eed1FLWK%N!>1AOxJFeeQufpT9#0;r!KtXU&FGA)F>|
zti1>6U<x#AM;cW|ImNa)Y>auV=E-YN7_v4RRk)nFe*-@QO~5i-*MJZl@1SpN;zOLg
z;;D{68W0)$mBp_aG@0PTh#9B`q%?#WUC5dL=tGZeT*ig5{v&z{Y5=}o7fc`x7(ifu
zE=Y6@s&iF6&&2vrJqT$+Xt<w6)r3q#q_J$`S~o&{;6uo1(6dl8pohPVX&whJMjY?|
zXY8o+>%Rc{5nS}iw)t~kzthmroV+=IesSVq7oTbS;QWmf{-y)p-+b+E>Ez?P^Pe?m
zmTye?Jmk4;xOja!)VVOZ`u@uG=`E>y9S;qD`psNx1L<77a^c&LcTN00SzB89?!=z6
z-ZvKRr+&D^W}led_os2|-ItGzG#>sf^wa0@tNCMR{z!LzVI2QzHrjPCaruR(`j4Kz
cdZ~2w(CvSX_NjsH#o(jC{jpPnixzhM2Nv2R4gdfE

diff --git a/images/getmask.gif b/images/getmask.gif
deleted file mode 100644
index f448b1aa22839cbcc903502dc9782d627f1f4fed..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1096
zcmcJO;fvdJ06=3H@ysNXpCjN8tzG7_UAQZfK{@MD&pL8<ISkTCH^Z6j3YJ}kz^Vp}
zTi9iIgTihzNQd3%NnsHShP&LXUJp9xW=;Z&u4>V4c3R|yopf5Yu!23^AL0A_;C*`I
zlibKNGk(O6+(r--MSVUW$8kzrDT0!&>+{6CX_{`yjp9+y_3}QGM9EsMRwZjZ#W$PH
z5EhEl@jBUPwOR|@f<|ZygwZ57+U>SND3!d!^L*Bq>vTG;KpR4s1=$vC(I~A5O1ImU
zgR-V*uFp;4NsZDXSR@DX49SR5u|hg#&Ljw;4LdYV=R<kdcH?+lV1!<;$6~BySp-U$
zpwNL`gUu+cnvpUIArYVg71jv{!>L3{z=d==9f$E)AVw2RGA7%=UZ9N3d`2Y2F6`+^
zods;UT$W-Ij^aXC5OGnV)PBDoVnPswvMh5UP77%aWE|3U9oM$)dcCfzdN2?)VWC>B
z@)%F!v=8-3lBAJ3j^h@v6iF$=N{ovchLMs}HY{QomZXv{=@~?(2YaPrNoBO8lyn@Y
z1WF9X^t=5kta+rT1Qlb!h~cp?7UnQcNhukS!B8xS1q+~HnO1{ruq2xeX5(T!LPwg7
zW;7C|FscbRRG<okP$tWorU4X4xa4`BAP6)@0|1J}Vx!TpNV`VXHvW?dRVVZWp2!1}
zLMb^UtF+1hrc$YhqS){C(?q({?j-1>jHi0AAHdjltF6dNhREoe?vd33unK0u^cCv0
zx|4TwMVE0o8jkWI-g7*elGT(d;870a8)Q?&r7#wmC(;p&%Ys}C7t3TtB{UurYGi!_
zwsc}XfhSwA?fN_$6d~%ffX!fm49Ft?KRfPic>qT?k)ivk`%fSPePm_e@rjrGLkCi$
z+o!)=njsISMxSr}vP4}Gaz`%acdh5whd!fB^*!d?i8BxHeKS3A=iMn|^~m79huHkp
zb0bsL{KPlkon0I|T4f$x*?CR4bfSN3ax3!5^y=vKV|#x3qI!DgqR9-tRk*siio8Gj
ztATvW?q4~43I6q(#h(3IxaNNl+_)Gx<-B-)*U67weqo|?b41y5=+oSEd3e7w`16%l
zTFv7h@a>sxhj!n6V!%KA>e|Q}z6JUC4e`KZv)6$?z4xhu7q@0Nrh2vEvE}iva^f1c
zbYpq)F89Mb19P`Ne|_6->yML14`1H#BU-%hHgkJ@?w)u2`{h5^Zm!YK{WJFA=-ltk
l)91$*i@~Qa{5D%UwePa__Zg&f=hotr_U0y{ZGQxL_Fw-^vzY(@

diff --git a/images/gohome.gif b/images/gohome.gif
deleted file mode 100644
index a6cfab3dd368225d45aa4b7703f5c9702ebe7204..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1206
zcmaJ=`%hB`7(JI>+lzzQLOVnlmMBAwt`&$DA$nW2h+)E@;puF<QfFsbr1(HV>DmsQ
z!cbvGYDGb26i29vu{ibeFb0EYi)KT|OzRNI6ozi|!OZSpf51-8Z{H)|IVWFIvQoBG
zI}2fWg((PyLa9_58yg!Jw=h0FUZGGZmCEGg<kT5Nnt({>6Y1&c89_uwMusYwP^naz
znVDHxS=rgyIXOAGxw(0Hd1|#<6GCWb5;~nuA4=%;dV?g!Xfzf?5QT+>CX;EqgkM}-
zymRNyl6jo6#Q|kyWtLS*R;#tVynJs$cx7c}RaI4Wb@hP_`h)6?hh%<-;<+`;CdsR*
zsX2W3aGg4_uCC6$e42d)*KW5T)hu_=-1_?ZhK7by8)8pyOKom$K700TOG``Z!LruY
zR%aUJbUM$aPrq>C!o{NGOIci(%hjGsUCyC?$mLzhqpsN0SM?DL!*pq=u49&Nt^alX
zyEpRvZ|L9Y>FMcp?CS09?K4I670vGJ>$`p66Zh9^J(cS`9#4ON|J|dT?;kIC@ZiDV
znUcZ5LGQV}Ua!}8^ODc!8*+CH4GleJ9FOl@8}4d)a_8EUdpCZ&-}`j%&iMHF^MT7R
zh9ACq?4FpIU|E(A(7<cATk`CwkYy=`83#rI7GQu#fC3yq5)d#M(pwT1fSj7if891F
z@vuTZL(wjuPZAw1;&LgPM(u0JB$IK1q8<Vj|FtJqzV%ybp@k$#(HahC49MqU%nX(U
zL&N3NNS4S%LLo&_;MK!y0ZD?UL!NOgOTe4j1lJ%Xc?J;+iUBcszy$C!FdG;G(GDyC
zN&pQosV4w(VJc^IJ6Js!6JGAX6BA||KyO8g9;0HG)iX@-{!)WfQxwVH3d+z_^2L{$
z&%hTWU?TwoU<CAlII}dG)JVYJ0!p_ir2wP(U=3g-Aa`KQ0M-b`h#>>owOCL9x(KBb
zXm)^a0=3zkA|Xe0VDrIfC!PC$ESxG&{Q{_eKv3apoG0Yg-9Zc15Ae(+0_G@GGI_`K
z@*rDcrEC1t<NJc&Z*FMx%vo72Ufg{DkuE8a^MR<bZ6K*@ZPX%_L##FHy1x=f8T~)@
zT0DLMi5<1ny~xnU5aXJw=GyN5Z{9Bmn*X_VUsjkZev`}YK1s%`Qmp!}>(nBVGvLFt
zndXb%+uNpHKHu5>Q(s<F@Vxc5k=l7=NJ;1;t&Y<LZjpaNRE$?DN(_I#wa_QCSP4H~
z+8e<mnu;=(zLcMAiu2<gpLX8KKVhkgIr0bTi&S*h+qQHJoaTerbaOW?F07HR5hScn
znd`k9?Go?<{UzaATl=?RyG9(goEM#Iw@H=Cdmr^2d$T{G=(a{1r0ab1=hMI2e&?KA
z=UALeAGGfN@l5pH?Z5o>+!DTdufS`56`K8?<yOPEa+W$@uzCjB80|})K3bs&+gRKX
zSs@s{q~@ik6s{Jfj<_TD@XC*9c=TMKEja0x^w)}r7MEY(vJvr0{}sbo^69cTc?T!d
Z=p?v-eD`&}tSL>thqrCMTR{*=_!ljD$p`=d

diff --git a/images/greater.gif b/images/greater.gif
deleted file mode 100644
index d8bb1375c3999d3ab2935937e2268e6bba57b29e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 194
zcmZ?wbhEHbRA5kGc+3C-jEs!T%*-q-EUc`oY;0^C92}gSoLpR7JUl$Syu3m}LJ|@Z
z4Gazc|NjRn)&c1N>1ALkzi`rX^<InDXLtWk(CACaoVS9FfoE%&w!+byzt4RVVQvqw
zXq=;3UM#}cJ(Dp}Cn?WwA+zHg6Dennhnh=c5}h0xX8(M3=<eoYIU&2#ZX1R9TAzL;
rw#`;6n5QhLD%Y4Z(#21_lz~|@AXC+lO>^>;sne#<m^o{fAcHjkb=*78

diff --git a/images/grower.gif b/images/grower.gif
deleted file mode 100644
index d33b2ea196f8381e34ea75397d77c4c615402ea1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1250
zcmaKr`!|$%9LJwWBV%%z<1%Kx9}Q!6=wU_}Q)5wM7&9i}ScHkBa;X*@9o^QwixT6s
zh4!?<S+}-Uv@6c08Bsb#IyO3|Tt;ihs&-~4m+VQ_U$F0=Uq8Lh>n#ll7W(XU#2hg#
zhS}NK(dl#sgF&Ow?CtHxlKY>9^dW?7ZEblxp1w=FI%Qbls;<DDZ9MVEmmfZl8kX6w
zmyxCw2H1%0f_TdV^PhV32DH$FCcZ!%3WdUlz=d0erQ)~GL06APU0}1hxw#+OHD9~<
zR-tPHZ5a%6k*}qtr7PM#p8EcgY_=RG!=ai5ZW(Aa0rg`T<_C@8FzAhna2z)>G9r;k
ztE;P_p`ipiD?n;C$X8ZZ?sa$S_4@1r<$>ep6yM}Ursaes9g@ZDOOyl?+|7W}n9vE(
zYLB)PE}Mb_#L;_V_<Vl;k(~70(}|fUqEfQbc81I2l6Omj1za9-5P^Fua;*R+kLSu|
zIyiWGdN#E-)YMift1h0cI9*Vhf9lMMqxm^8ifF|4N8(x#WCGh${*^pAGZ_G!5nrKD
zBqk<GrBaAg!nXZcSy?)r&dkiLgFh1+8><e|nwpw^XjvMP&!(oP=I7_Px3_aRoar<A
ze@@NckZ2~3=x(A37ep-ms`HEcMH<p~_0;RzMbrJ|bK2h)CNp(|FhjuR#h$-ECQtUF
zF&2wuP`&#%ey9T;X|Ip32-GVu&<x#HR#r?VvzOcV2|Y=NJUz@s!#-<k>w9aSQBn}f
z^+)GDP_8pd+Xx4dOAdG+L_tR$fHnjraexRwxjRr(7|M1AHzCAlp!QARDS&tdYT=@-
z#>U13_&dc{gX9<o@yKMdgM-7hhS$K)V6j;nHf$gWA|)kd;Qk!AWIC~(1_uYb8ngg;
z2?rBgd9Jq$rU|I<_3`oId%C$1Y_`)zCX>OS+t}EIJKyXr)NTP~rRVr`yCE1|jYH~*
zT;1E|h4&qcVQ?i3s*<2H5KgwhOai(l0NG=hI|PmH;PV~~1?Z<G(40WE1h{3Ribm9R
z4%Mz@^FL!PlW+>QigDMHTYH2fhLJyG*<^cJ+jVs@*_vYGD5?uA!6|f+zO7zlV(BES
z7OCq4ahiJ)y@sV)UtuCudZ^oHOl^n=E1R|=$%XHQin~<mNPStY$a0u2pT~wlxZf{I
z9$`|^6mgU~ua453*%a9v)wRa%`dQH}WcY4r5tDFL2_x{~(;DwAP3zu~#*A^?#kyQ|
zscEY4eu4QM<F-j1w(VPs*^ldFtn1sqv><!oQ&X%U=lJ2NI?f<oVq!%k|2|u3DyyWd
zS9%j4{v4{cWXc*&n_Q$hdqmXV^|(YbyXA9o{mxfgGR&-mf~T6lMs!ncrDHwU6yoL?
zA8Lfrt+-={i#wWBZ`==@k9uKW=~yGYEVsQ8Gr2-*vTz#>f8l(XNO?y$6w*4etOo&5
zrrcqlhc8wIbieC;a-3A^ffcR8y+U`VQ%C3lIQ<RPVt_g?_1cqO_te4oYpgE7>t(c^
z_j6HyvG?%Zmk+<CJQnQ~j3u!j;IF8Xbz6gR<L@K{StS;}`6H;@M{B0K5Nh<Jhj>S9
zW5W>BE&h%PrYtKd^wnV=oXf@=#ib;xKK3*7gve6Nf1&zpir7qZO#6e8F$wei4^1Tg
A!~g&Q

diff --git a/images/hand1.gif b/images/hand1.gif
deleted file mode 100644
index 382d9a103891750101be762c2ffbd6f582c15953..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1176
zcmb7@?N3t&7{yOZX)l8I4lkp_qQz=}3tA{L2V!^Yj8LdfH|Ayn%Vp9@;~PUG1Bn4a
z-il6OXjou_4H5@3im3u&*{!l5PJyC!nc^lQ$()OAAxnGrlHHX*VJGLq^Wm2#&pFAH
zZrJhK<~$Wt@L@?Jk%$NkP=|-86)V)M)PBV(<*{@ut(4PBl8*HIC3<xzt&X7AD`}dJ
zQ~4bi7ZtZPB}td6)ukrAp1Ns=HerW0F)Mj>R))@)7Gu=ucc$*#wPjaMZcc7)Zf#v{
zU0vO|vkgt>&i5YQI?!G+&|Z3_WV;=9yImL^9i6B*OjryPP1%zc!(@YDvcG)tnq_jZ
zVX8TM%33jX%`!FEaR1_7XX`%a$T{bCP0o8ae;oSUbF;xSd2!)EpZC%}Z{G*r{(SG%
z!#=CY_w6y?M5FI+tMAbz-)yh%c^}`upTBm5A1vf=f5J~R@^@SLd!O<5+jwU?|Dc<H
z*v&t>#6SL;f84{*_VV-J@C!ry;;<kH4(VL7D}a)%A{gd9S;ed6ZIlE#*)s9wa`-2L
zu{PM^T+>p4WH^SEKnG9&;jdNSH+JeiEqR4u7+@Y4255k>S?$xV7o;FC9LI;xYyKRj
zWVFWtcQOPD9DyhW7&iz31cx>;(t!~Fwts-rW025bn0y`{4H&kdSmz3(1j5hl4uBgF
z01m&1Z8fRW5-DkLK%6|*LD7K(!zvsecd{T*97p7fXFYf!K;XbJv(n-eSC~|W*nn`p
z;IzgiqXfn#E}ljh=?@2y82AgA0oM3$_vf&QhNnT3I0>*WkOD$DJ@saQ`ve3NxFUFq
z@oN#*n86%}(FC_j5KJ8RvAxXvH+xJ@9tUCWI(ejmJpfXLkPk&L1BU<;pxRp&Enzhv
z3qkDPU_1<WJ_Iv34J5_zm?0bjcL?)l4CjG94CWZhG+>KC7J)d}Y}82nJa@uh)&Elb
z620^bApV7jP-a*=u2%-W8<R6`?HsKM7BOvv9aR>EW-X)IGq+v7CPK_Eba&lqj*LF?
zENWCV+8Dk6l;JmPrF^yi{Y@-U(PeDjv`W{xs7}wQjv_;Io_v&7MPy%|cgz-6O?{En
zx-oy2Eof28Z`3cpeN`NEIWjn{FR${4mUv^~c)R7PbY>;x&;}-aY6<6ZFMO{s4kgqF
zN56xgfA_i{Z^#VIxKLJJBo1D0?Ae`jMUD85H;!J{cH1aL5LG|aQ!-$8bdD4xZkwsK
z1TNECH{MF=Wd`m2#zbvOMb(+DCa*WmmeR6rJDYJdF>21Gx$VoWxx21@Wu)@PsTHJW
zvGfE)RE%P67ZIx3QgA0h-%)mFqOCIXe0kKFKPZLL6uGzE>?EbnO&-!HktR20j4aP)
z-<Cg=QAo;r#-3T0_)}|jM8x=$a&6H3Ozq2K`E%9EQoi$GOcRlPAbM<^IvD6wd}UGX
WDtLNU^1SxTx3c!|Pb)<tZ21q8;=+Ld

diff --git a/images/hand2.gif b/images/hand2.gif
deleted file mode 100644
index 7b3748df8e54c10db1f15681ff0d1d250f392bde..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1179
zcmaiz{ZA7I7{|Yb0}esBl5@b;VG$;9Wvgpd7|iZ)h;$%21_~L(Ds)=|z6?-clBIM+
zWS{~vwF@+uiD4sPaUIn#DcboSN(-WMx*^8JX{J=7^ft|Ihx-He&GW;PJfD2?yyRJ}
zPD@PKB!v_n%t<5?kv`s0p@C6hK~b+n35t^h$1!n?B$$y<%&S6(nTQZ3>Sg9N3Bxcl
zsSq+*q-@1oN>!3Vm85(pX=$2bNt)u#jAaoS>8i}PV>4ALYm(MvC1({B78Dj1?rGT5
z(9p1NZ&T~O=AMcb{T<c)9iN@3P94VZ@Gx(;+ef}sk2I=BT5}wYYDbgWaa`{hXmnVc
zMi1nSnrlY~8b_^7R}Zb9Fy>DT?wc5DofyAx?WD<lp~>wy^!P^aY<K=_@8;R##j`(b
z@tC!qAIm)>EuJx>=T^7pc8}*lFL%6%8`#QOw{utab0aO>n2{U*n!DQ0O>}TKy0~Au
zxLe)a<hR`9ciinB?rtCV_#`)TisyN!rEQt(Ax-flYs>SyzFg1IgdfSW1vyTygk_*<
zkfgcEB75ZW<tf(2Y6z$R8X$$urZ~@k{*WPfmbC%j1F?WFz*-2x1yDi;a2BWB_0UaV
z8sGtei{!w{LCzVY-HFwY5XgfR3eWvu(1Rervc+7UTb{_%w2dTL&6H6Uvw-#$QWUo#
zG4VGO>J9&aP@4!Z13C%#u+Uh%-7fGHYy#*7_g?UVnMGoBB`lz5k|hym0t^5V%6S{)
z@S}w)X>?%$1;JZH0Dl8FfrT$xzYEbRfB|*^niul~B86V-J-rjW7Pd0XR^nb67V5xO
zAXAHJDbH(dwyL2T-7`&jFjWCkcXZd*TFu9xqzL>-1RYQcXaVW^nixVO2eln!(d(I8
zFkK8@2b&y3CEPmjrLdLat`4U*f!PAK95r%iDnXTjENRP>69RABDO})x6+cJJ{Q}5;
zAR;B{=FT%;_-u$RxM)6NuMZHh?c^}(8pGs^Sn2zBeyT@aO}P3*@rC`;RR;^IL$vm0
zS$?1U55=OD24#t&F8n}oMu#$Y_W8^QF{4LTl|($fuZ(ea#*3DA>2hMnThxVboJ)H8
zO?|Mux9r@5CsVs46p}yb3`>taNL58tyY!L7K=!5VLay%eeu*Z0H<i3@o1GMQ=UqB-
zW{MnGd_}R{X}#RycUW0uDySvKj6T$LbIR}Uy|g$nyXX9;MNj<aMdx0;7SJgVm0Jhn
z8_o<`x6z?#4%4q<R(I>9HC8>`{+H;Ue|#I&cPHSn5#~=OhkUNIO=oLrqu0fH&FrYX
zX{~f^K6+Xl8bBMCY&>0c|JjCPVLB6g_Uxc+B<;hEM0N`yUK0KDSwh^9+p!~c(;+Fn
zCUvSVNTOM;N6A;wH|xb-l-_GzU}UN|9sS<(m4{ijzCYh{du>R%K4qT&F@1_=evmfr
W-LU4vqV$CMIR~>;2Q4dEB>xNWdAHyI

diff --git a/images/info.gif b/images/info.gif
deleted file mode 100644
index af52cdd9a8945497fdd34b02866f9f241a3817ca..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1580
zcmWlYi&GSJ0Du=*jK`7}o|1Y42&WdFiWj2diEvy8o(Cx1P~F@RkLxO0Uvx2}og;%Q
z8Ek~OkvZE*=SCN^bJ4k6Wg7|n#s@4HUfYe`tW>lzb#3HhGctxA-+%DUd|&A<<JK*8
znNTM5Cj`N<u&#}!&Js+JhxMo!l?+CM7^J8X9;qj=R+l)%sZBPfpMbM;NR`Xk2h?lS
zLJ07enLGyuYc2A9nGMj=6tMI#swRr<$5}VZ1v$iu%TXQ=a9{)|^)^E@p{mEF2(Y!0
zE;qvl7}ifw9zqN=LWF}0QE7(fA|jk)P*u5rpO9xIq6bGzE=#oqX(YuN5%VyhAEP}4
zyh3B?riCd%n&wqjjJU|LL#z<t&21D2i-;bTqM{UH)YVSLPbsT0U4c#<qm)|n{K7m4
zkajoA_F-IzSJvQ4lgnOjSM4Y5os<-2kOCV&Ex|b!(oMoiN?kk61_g74SqQQ8n4okJ
z@ES8xjf-KxT?U#X6q0Y7zc0(vocahQM@6**r#zHA!}AeARqdL;E0H|}<^#Y-h*JV$
zM5S371ZY>Y6DdRutrX=Z=n+n5Me%lAvq7V4B3wS8sdD0Oiu41towT22H2VqKN84&_
zXd?=z8(bbnrAL9E1VfD3WL7tkd=QWqIkcO$R9T2lEKi#U=i6D2OKC)HR*SNcr28n$
zO^XqL<e0?>uQEBs5M%3MKp%$t0K5o6{`WE<)tC?l{1_`ubF7a*YH*c<u(ea{#n0ut
zvsfPse7G<zO0$yM>XITXQfP-a*tjun{=oxrS~RrL^ktq5aO!4?4{_uW4=w|4N<=DL
zx<Z}Z!@>_M*^nU5ic}v(o+ZFV+I7UK+)pw=K@8I36wmaNaFzk7`Y$S*G{v#~1nYLn
zQ9)HtND+?p)0Br4!>lwTz&U1B6-M+BpdY6_q%<wCLqLcKh~6nhCB{#K0L6s_Wep*P
zS){-&&j@@(R8?b$(Mh>UF~T7ZQV4TM4WWVl<Np_+gas%jK~*-;JA6F;++#NHq)}UY
zE!~Jc29{;Nv<S+zc@H&|)TXCd6>F}v^mZb9_j9n88aY+h{hr|MUbXw3tWjM|{^^FY
z?4z!j&7LbCq@2+14Z}Iqaq{t#6?4&s;f8Eo(&1*yFUe<o>&M!4+0BV*%d&5`ZeqI4
zkA_airmc9ls&*W*H*4()x%kzSg(LkrPU4N!)p6Vzr2TKI@{8CfPcDtVKM+aT^YpSW
zUs{E}IZ|6va3kx(majkj%=qxN$5W|ip|*b<hkHw(?>u$s+##fRNoD-tXIaqFV#TeF
zMSGGOUb~(9a?iTT->{ETnv-8&y!%dGqDUlg(Dvu>g`2f+7yb3sG1;+iyull%OXhFZ
z-g{x!iK6GWEov!RJ>+_D?84?h-&%KAuIt$TDZME!Z-$tz%#R&}5@Qu}d&_sOyxx?)
z;~&lS)*U$W>iGH}&KytKnIgQtOVLinm|_zrFDRB<xY!@GR_`NkuD?+<+Pm!&O;;8~
zp6N9$ew%pVCw)5l^RSlm76;=?<`O!SZ{E%xFUls1z|#C+)_J`J+S;-Ff%iv1EVRT`
zsCGJ^Xx|dIx~?^Y(R?cC9rEWOZfJhwQeoO$L-3&u1J}zFSAuvb9_tz)mmknD6WZOH
z)1~pc`8%b$kHM)IHLLNE*9gsxjHe!JQ<h{bG<Kzy75r+xH?|lBt;LDAyrYjoR`z`5
zQnvRgO~EFhxV9~_HG3lCyWUk5zAx_kTb1~Zm`fXvDkk;GN9DT3Yri^pCjkm242;Hq
zT-*C{;+Om@`vQ@`-HsLH!O@A0)Qdm0q{Wt<*C!1SEyaVkG7p*7%8lzLw>?jOGgg#W
zcA+F`2_7mR{DW*7+k7KT?J3SJJKYJzjcQGUVaK7A)Q564e*Gf}`e32EziT2s=l7%o
zS28jt4LK<r^UBiFY>Ky11Ln-O;oH^@FkWt%D88PwuYO#yP?kBoe8oGN%^Q=>WjVhs
zx9zyM<hWTok$3lM+zEB})q|xQqG*>%x%RbVqr=}nv-6WgJ+@M;`tfA4QUPuM7q`F&
A>;M1&

diff --git a/images/info_20.gif b/images/info_20.gif
deleted file mode 100644
index 83bac5312fd6d536f4c0dca1e8908cee059695fe..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1258
zcmZ?wbhEHb6k!ly_<oH61eo}F+1a?cxfs}GI0PhRq@@{nl^J*q7({jW#FZGttVNYg
zG}JXTH8nN#EG+aCjSUT~Elq80Z5d=-8PvU44Z>NB<HRk38H|&eEK^ynGxf|IjBVT)
zoC_FSYB`+?czkMD0_u4K>jm7irNirV!|IIu6V2mleC)K`?2Mi4tv&6G+#M`jU0l4}
zoV>lf1N{60`~rdl0|UZi;$q`sGxCy?lC#s3Gcr;#Gc$|x^2$m|YO1OjJewFp+Swv{
z_`=(HqI(&lCi2Bi6pCt>2yT>(=@p2dDwr}=D0_}{>J*vW84Ah$`o;4M5}Qquy6v+1
z><Xu9moLz(Uuj&vz`Ay+S5co=>9pXY?%?t%(RH)y8tRRjRysGW^yyg_-m@}d%C^Lo
z*@=B?ayk~|O<a{ScUQ*jT~$+-*G^ekHEUz~qP^LR_LeT$J)tV9p|WIhUDE94n1+VB
zDGh1UT5@N!7tQD@pV?c}($UsGp>JAG<CIBVbEox8o-%pL?0Iu%Po6n@X49<oz4KSk
znX{mI$*u{@cFb75eZkEBb<1Y1Uom&Xin;4nFIu^J_0rWFx2##TWy|K>+c)prwX<pE
zfzD0GySAL1vir=Uz2{c!Ikn*6<u#|SZ98;o<MGS;PF>x1>GrOR4~`$+apusb^Cx#-
zIJW)C=_Sw4uY7cV!}Ck)-(Ok#^x~EmSGT;pw)@r1eV?x^`FMT9$J^UK-`{uOz=2bz
zPF_8C=;DR5H!dEzfBn?u+xPBVzjXKJ#oM=Tp1kqk+LO2UUVM0T?ZmU&M_=DL^7j6j
zXZNnXdvyBMqs#AK-v99E%#X)Me>^|;;pwH1FK#`0{OIk=M{nP}e*W&$`w#Emeg69S
z&7&_LUVi=d<@?X?-+uo1@$Kv1e}BQLa9{|<pDbXDbwDI2PcU%&V~~EaqtKbXhtnY8
zKm+69R&Hj#iYq$`AGdAIytc-*DkXOokg>#2*;sj+Y4)|eh{<Yar4~D0TNAlC?d&X5
z+xZDLo1VP^Dn755dU8=Jw|e%KWs%HBd?b53K#JScdUa!GbJ%L8_|7tQ=wEVlQsCn@
zQ^Q&FLJw_eJ2hRXaZP0N)y3|8lFVf*1h;heN@nk##E|*u-~`j`t3{5FgcmkT#7^GD
zWBB+fyP%SNz>`^FxsvPrg*c|1{Nm~<p&d3&z%j_hSvlj<0@l@Al@8AE44yLQVd_~<
z0TpGvA3H^PZyCgEGsR>cX6fwryEccxQRtA9eAI<WyuYtH9pE(ZZaGo&BIRvIzml{8
z!=$M>5*tEwD=sKpROwifc1_2VG4NoUa>K$2jK7z5weqF%m`NNIV&xJO=C=4#cxCmC
z_3<ks?rbpYW^Cn=GzxI|(7>-0xW8^?P^;Ea-t22>XEi1~XlN3v7HRzT^>w&b$P$oZ
zV31kr-+K_bgk#G3&{Y9brA)hkiZ3sVOrF9nr5kymPuBR`snFHIYl47^b7z^lS^Rjh
cv-r55tX0Xg1BJruhaid}(TGSyEG!Jx0QGhrFaQ7m

diff --git a/images/info_menu.gif b/images/info_menu.gif
deleted file mode 100644
index 267fe64fe8e21e2ca31eb157d58209c688a6a2c7..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 2152
zcmc)Ji6heu1HkcL!?uP!ZW_w5m|7mAgGZ0#410=%J<%8z(Zm!bX-3R_&5TW0b8O^s
z<wyz9Sjv?m3PY~kDN1^t_pf-rf5Yc%YiDC@LO}0P0A>N;*RNkFUDTB;SA4vE9PAzb
z{PRz1D=RfsRXshuu<O_Hm+(J#e}3Ek*7UfkuC8|T>zAd^i&trd&Q4B3VK=Y5oXsv|
zBu5LLH?FL%EN^U<JrO#Slbplp?oq53;n3IZtz1^FXi5}8FTF$O4UT^79-gQc490LD
z8xUgBOBx-|Iuc@8_3c7>b5CL0pdaU{{#_n6xl+|FikkONYiny{M1;S;ucRc5&*uvS
zf~l#g(4e4*yy}#K^0(7Vft;tl`Hy9Nv%$Dz4WBf?GMHJ~u=RCw^!-S4Lv21I*$86*
z0Dy~&i_uA&gc^Y%l?PnRu%z<#T!@F`Gfv<mm*zzuM*2-nj-{ri0uZ&J41R_1ty~}*
zN3S=%Q>;p)%XntXU5p2r5R1#JH$IC#eSACqPH1t;R9#)&#KeS5CIgCSLTbg~@Irsq
z<BxNzzlYwJcDgHWgr~FakG&iCvO2#Y8vHcc34ngHC#T-vHmi}CfE~4EU{nelDPiE2
zMb8@U?^v6Azc|)mYi=wJ0UMqpxFz$X+*1InJL(=O5~f#_jh%;Hw=a$NER3``x_QXh
z2EyIqsl3+N<#o=}*JdHvdo6<jUHeP^L3I_J-tOkrnSuGY&30ynirPn2oTKe;Fw6o{
zk9sE^ag73$O!beO=d-dzZ@X4SJ##~K{e7<hHEY1g2aq;E=^Iy;<o3O2nCY*>njM15
z>;t5=;mTU{%*3j){GRrj*<K!n5@l~=LAiB3mKIl8&Jp!iO~2%(rKfJqzTTSZ+!$|J
z8*Ny7SGDry;l>+opr21}N+dHeoEcAM#FFTg%UMz0=@A!MF&@VHn*Z7RzlZ+M2k^fa
z0O&5T2Y_mLwE}y@VKTVJK1#G0R1Rf;s^WAesTc)}byYtQq-j_ts@c_WyR%Tvc(p1@
z8&yJDt&Y77ScvbH$Ll+G*OvAcn8(1>zh}+FKqOHY`nun=OMnb9GWqt8_(SEmr23vh
z!gDZO{Umy8LQXtZMjY|EM_B)Gv?169p@^Yc?UmPc<l-m_u`v+&3n-dHW7XvIzyGfE
z$|$;*q-@~b#azp@NkJS(j+yr~)z0>6hOf>|PC?Qo%&5?P7ThieZQFFD69|YWshJ%5
z)VJnjyPVl_x}mcu<eRlfEhey|%@jqR1hFbmlhb<@G@Q8DT&;UTS+EU%7uOUl(V97m
zWkP+|+1l_;txzwX^s_^ZajV4~4^GBU>)qvfzYGNS=sWpHzye~fbn&XJnDV8Ylby=W
z6xlcxpY%0gACVancS{L_@jSDVa=|;nb^C-1^%M^$o21<|ET0UkY>Sg81y8<Em2A}%
zR|)CDK-6!f69no9YM5dOU@P7df#tTT%8PS~z;{5hOdA9+-Ac^Cy3)Z2+{xVih%R3B
zOt#WfWnu)D4WYmTO4>?t&+F%cqk`7sb+7%w?!KIV^}NZaY#h31T<%rzX*R?2zAn!^
zh?Akpc3%fi6p;AeJ~D1`s&(@NVaWO0F5cA#5tRETo}Ar<J!Jv(CwKDn2sfvT1pB>K
z`)$)seP4rUM5k-g)MI2o9P6EJ0Sfq*dxBG}TQiRWDnNx`7CquST4{mQyuaZQh8M%0
z2rv~r8ppri4=3x}>173ASuu@&;q7MFKs!pr#)`a-ge2x2a%9fi<j!R8bh`(Y{w5au
z@a`t8LqGC|cg9jjQ+}WU4G*L1!wGm*H_bS;bqTG69X|8`0o@LTrl>7;X$#wj(IKv8
zIU0?qFuU~!8$|d1gw<^SQj)mEldB1P8N;F0hMckm($2xg(E(6-srNI%y_Y1o<?n&@
z@Sx)fY{AYYl4{bKe{9*cwgUTOA*jbR%U#Zal^rccfII_0W=LsvBk5?zgo8O=nF%_q
zU{RMw{!Q<&9FOm|e6~~VtR6xOzewzod4PI^Xqbd-m4t<xmgbRNr3>;JWFwt=>zhVT
z*z6D;$aD3o6=HLv5lcq2x^dS(UW7u@h}5%&<4F4I@-nAjKyc6;T7U-Gqd3WOFuh_X
zIZ>yKYuKg1$GsLk`#h6=z2e!BJ16lF)y(??Wjt-{<5{2lj)hY2cGKCgN>)|``k>+V
z&ueC$KX9}+$Of1W^ftuTxyzueb;hOd)a{GR5sqKmvm@msgcR=UM0=)7BvtVtP4lqF
zS|YMlOKEY@O4k=7xiJQ|-&I5oW<fupj#crk#N~$Nz?&7vm%6Vn^q)pl6=H^p&QL4|
zRj$PvV~(ky{4YcdWY_pt>bYWqb+4WyF$_3JJ|W?9pi16Ronm9BjA#J%ApfL_vF<C3
zd*5@`m&V4}v0%6t(L#R0cUp;o_EMB9>_h|*BrV23%1yWfKYWWMUk<@fz2Hd)Vei#H
z`l%D{?=TS(wxK#h8!!hsrRw=yVI5B)pY7q%4&RF_IRC!Jq{s?BVz911#$7jQuORAs
z+Ub25JD;QeBPGOQ{^ey@hq17&gcO;@B4~x?j>HnR{c0$eRyoQoTM{}et2A|xwn|&o
z*cty0*(|-e`&;KbbRASC<Y3Sksc8RL?Kq5-x59H3fw_%ggmG{B&fx-?lrq1sBj<?7
vHqc8P*eGvO%I~BYqCfq`QKbDqW}X64;V+?_t8jo(|AlLi!m|?t0jB=~EiFRN

diff --git a/images/inpin_red.gif b/images/inpin_red.gif
deleted file mode 100644
index e85576af15695dacabd7bbd50e0a8b8ff6d9c69d..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1050
zcmaKr`!n2E7{*tSU{Z-zrlr>Grj}hR7YS<xJ0e@UgJDFIHe)D4mS%*GX(yScw4#Y<
zQgZ2_5`v*kjSkj5QP=IROAwdZO-jO=O{E&ywfnuEb9(v@^!??W^SsaVdEZ}7lp<0Z
zoa)AL;~1@+v<O)rm*4N<)$O^zhtC^+S1{oD-YC~++|zy5U7+Cx40s9Dd&R%=_m6n_
z{^lbb75EK$2dIU7tze&y$Di~TObOj*1fmHazgdCTgh)6e^lmv4IC?;$J#cVd?4uKj
zbiTe11H<)VQF~ZO_Yv9T!LUhx=@Y+w+5qX4zf^Phs5Ve?Cn|0tFmNb1WZM5|Pk898
z#P3m%q)8D|8J9E`7N!l2)&+(9csx-fi|&bx9gr&w(x8b5`I=N(nUvfSf1)DkOigM|
zLuyua#;46GpXe1IFT|c~Nz1NRo_ijX{xl)6D>bhsC+C;!?6o-M@`;b0C1(Db^Z7zb
za^a<-i^Z1<i;G^I%+x4z$IhNBEiJuvv;5xmYf~4#8oy9-yW)piRTa0ZDyEByt7@tr
zU%lFXx2m<KPItY$y|%8gv9bSdQ-58px~aam<^E!QLu*$@TX*Mp%Y9W(XIpPyM{iGO
zZ&&YwuCXqadT3~HbYyvC>`#q)Vs>tBetvyg`*Q9N{nC@w6+NM!r}Q)H%g;&u;?~nO
za!GG68jMCGv9fG3tiCiF*VosrYioaRY!H7L%^Ux`+T1h}gxO>=na#v&)0V~Zny>=%
zCS}DZK@e68y=kE>BuUcb7D+KI^NOZvGo)#TG9d=gv=u-CKtw@?r4WS($U+)f5M)`F
zga9!#?l6F{B1l4r7zVNw1z85LET9Pl2m}F(bx1=NF$l3VjRgQ8AQ8*Z5MmzA0Kk$6
zcX0}%@f?7Qfe;%Ji!Ce;F%YZRgJ^`2m;e||1PqO^4>E`aI0;x9TW|=kf=6RKe&Kx(
z!XyN+jx{_3vvFYo-yy=S|1Cb;bL_K>)clOoyf3oymAph{1}{nx8_(fWb~a8N#PQ?c
z`tP%ZJ1lXKqlYklhAYW@)SOpzX4l~5-8-CJa??tR4!InbscgN=)Zg23f3~5@6UuGG
zWkO}djanYh^Ux)7Cf&~Aos?6u?}p#^8HtSa<dyM;x7oj0@TF6JL}9V3L-H-BnB%M8
zY=3J{n`?r5mP?(4W7xU(^p*0tg1nLHi$67u=?l)ElE0|6jo#@TKnm?$4Fh%_jyVs4
znTBvD-#1jvAEt%dyKYZkaB_UMqS+RZ&}UfuI?eXmU7?L0kEtskl)Npgkhl6Dliz61
dZ{9jD&ULPotI(k1O(x}z_#rsUciYVg`4?75nPC6`

diff --git a/images/inverse.gif b/images/inverse.gif
deleted file mode 100644
index 16bed0d8cf7badb4e2e47d19827b782390b3db82..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 957
zcmbVL-)~e!6#jO1cJH)q!`RG98)&!&lK$YwMU>q(Y;dZyfG!eE!HwDIrLd427pU8&
zOTwnPtO_w=B5LHNDFuls@KVElurChAq)}sVFELRt8jUq-;{M<TH`-*J#s9$bG&%Xc
zbLRWbIXia`_a_dRF!2|f5eNiAp%5V?91ce!5lU%GOG|5OYg=1edwaVJS1C2%EJnvb
zCy*TPs9TH5fjoxd<T*Q=Wce5!r(X7ozbVO%#DqgXJUmwEt{9Ii0U7%4Mf8WkSAmN3
z4kKQ5xgu1G3^@@*9N++b%}5Qnl_*hE$lx~y;x0Jx4B@*8Mz|0(XQpe3isDLA`E7<Y
zsxTCUM41e3#S{mj5a~h0r!-IZL8b%2MSPTo9S7r}6eqD{(*=gUC`7{`&$}Q5LSTAi
zuGT-QxFIS1p?G6`wGvTENLjx8$=2#9R|b_dZJdg`Nls!^(9FoWU?K?~(?#dcdh4rx
zL@^<SO|bnxG1}0<^5Rsbk0_zPi0eQP5CMcohz~IR0vcY~@++LFIzbIkfQCNYfUrP0
zrpwq_MO1<6>@q<KAWhn#hF7<I#hIi+*gqGxcgMJ%m%wZT{T4X?aRz5f*gdlxER}xJ
z1Jx)mRQI76q^1W^e+x<r_h`XS#I!ZNGq_)UWg}FwU~m+C#25m)-QU3Xz#3o)fd!xs
z+C<p*1w=8(_l9bdP+7=4?iKJ`9%~$k38b>9nM!4)JaM7u{9})=U=yIu<&(!w*+;-k
zL=Okx068EFnEQ*}#J0fnZXMj1I*QsMG<VAaDWL8^<sfrda<Fg!eMce2QM6ze!1ACk
zUr1RYjhB<!^8Xzhn#|@cfL=m?uHH32`{}FU&YsNY^A|rm^FTlpd&WkeeB+_D`@foF
z2Zu|mx1Ij;_q)IBimV!VfBED5=+oz|-kFq>U$^JYN4iQQowv-d-+grDrjDn)Isb#9
zwXH*6dRLyCjb9wzxA2Pj%+=ru+H=DcH>bN|6OXz0b<b)0*)2QV2XilY$A|8my*61L
kIp5VnUhnm<-oCcEb6v;VZ1YR+9RK5qebM*se&J%<zi+z|h5!Hn

diff --git a/images/less.gif b/images/less.gif
deleted file mode 100644
index 7cdd74fca35422f87d5d919fcc8b65013587ef38..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 203
zcmZ?wbhEHbRA5kGc+3C-jEs!T%*-q-EUc`oY;0^C92}gSoLpR7JUl$Syu3m}LJ|@Z
z4Gazc|NjRn)&c1N>1AMPxNy>Q^<InDXLtWk(CACaoVS9FfoE%&w!+byzt4RVVQvqw
zXq=;3UM#}cJ(Dp}$4V+dJTPO4LWl5*uySSrgL7-P9lXkV;L39eHJ<63w;x8E{|>wM
zMs2FwZufTUAU-Z;Yp3KCV=K;v;LNPfS_Wp7!q|RQLpIGhbLY)puyE1h#exjh0A*!9
A%K!iX

diff --git a/images/list_20.gif b/images/list_20.gif
deleted file mode 100644
index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 743
zcmZ?wbhEHblwgox_-e!e0<xOw8XBf%W(?M$QUSU44h|lko)M9e85tS1wYB__jneUb
z(uw`j8B?VSdt{2{%2X|pZCn-Ey)1V2u7p|J=k}GYoL;kfPV3rb)3+_{+Pr$s_Kk~o
zUwLqT+r+E8rybb2>i&^AH;*p6d1l+)OM4z3oA>C<%4b)1p1*PT_O%m-Za#f}`^e*a
z7hXR+`{wzL4{slQ{P66{ukT=I5k)BeWMO1vFk;XFsRsFpf$g8eOdqw$CsT|_q#BSk
z1pA0^wYn}0&=8S0>as8(f`MVC&$5oANgE?{#JXXW2txx<kdeEm?eN0_2Zae9DoPV=
zlDHTg)PfI4AOwXbx+H)Ez4s&-Nbs;V9|Tj(hZ5GaFepq`jawhy{;29;-1_UUzgF#y
z0}Cp~=uORX^4J*z34;hdkV?g<b*Gjv@Dv0=1T7TAm>4FU6j*z$OW;IKkoGx{pUswt
z{R4^et-ac{<z`RVYSB&?tBFCYuQGBloJeWEnz>bJZrJLpS*1~H!&YB@#Q+p+yZmy=
zq8`8HmtU1w&GlP-`6WoO`SQx<SpqK0g$2NtHM}rkU`W|?@MOqL9VQ83hy!}uK!Qe%
zCsmYKbi^lm6z`1D6F+lY1thp&L5NnRp1A+YFF$MS=AU04BE-O8B<Y|az`?@Q2&aJN
i7)e45gOCnT&P*RRWMMEHDR$5!7dhtGK$4LH4AuZ(u?;!^

diff --git a/images/lock.gif b/images/lock.gif
deleted file mode 100644
index b63a8667011bc7faebb2d9150447743e1eef3f9f..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1124
zcmch^i!<AI008hWM^D|=ZdzM=)J5CdW~U_`o3y>UCP?hic(mPVx%46yQ_O3A9+RS)
zhNuwl3gQv15t-N;uO#GwcqI`kI1%Ep;cz2e6u<Sa*yn@w^YuhMz5p(O^*QjB55?a9
zg)1%*=@;i1Sac0XbN5TR=KrgEK#FS+bT*uh#OHY6p<8#8utAT$#TR>qG70xkx5Ki%
z!e8EwDD;SEjeB^RdLMWZN@x$jcTpv%m}DsYA}tK~5E)n^-G^A}O{}0l0!0zP`=_d6
z0=OGZzmmbnKq)s;nKe<sEvu!L3<8oGYGXiMEMUh0PAq^jkxlWSEf&0q2e`t7mPCNc
z7qq1SYyr0e0^D@qUEC6$+ZW9sVws(oGAX7)=wJC8sWO321-`6qVhu#Bp#;2=;_4(E
zh8?Lk>j|S<m=8pB5LmMs%$6lG`$HP!KQ>Xx<@0YVKt^?WJc}2}d6Q8sj^`FcH!M(I
z$&=cOqFWRhb<&Jxac-l$pc@_2IjCv`socTj4q5zbIFUb)#v7!)9`5Y|zwl+Ld}-z%
z{b_II&pU@$Vy|btwKNf>R|u$mHMD_Dx`;v(Q%l83**%K#j&Ww^*i+%iq!840j_394
z7WFN%#Eo?VHC1S&$tHM8Wc&1`rryb#0aaNa%ofcs24R*|T{WbslWN+8Q!KffEms<)
zpkrXRWk^#!WNMes@Fg>qies)^%ad!lquTm$xM2+Lm|ECb0HU#l4kav7E{aw2B9%@&
ztrJZz^X51IoP9r{S(MBe6dHZcJ6JMn=-2K_<_xkqgK--S&cZu;KsE=<bt@AK>oVPu
zLcczuGfwI@CYKB&D{C6vUt=qq3Ozi!d^o;*s4{G*4V$X9RmCc-Ufn+V0H#(BwX0kD
zt=*+<*s!~|u@CR;8}|;3I|uOo{`)=S!9T`>gF}<abaZ4g9Uq^-|2wA>J4_<!aWW|q
zks6&$Mud_{2&`WK9ypmS%&Y+nT);=4eGIHl0DRg3$EzR6HD6bD=MLGRpQTx!Im>xc
z`jTpS0F}(RR9^idJDA|$xlvV&d|^Wh%Dbc)4yy>(;(S8_xo0T86NiyFvvclUf-P%t
zROH2g`1F5!nqzIRn_qXcMIlMUj)Zrdk-wYRgH$T^X9f!6|InR0mfy|>dMBn`))G!_
z=^S|8?W*l9Xv4?P9zWWJ`ao*^<hhxpV;rqyP30WZ=Ju%?Z9b4g4J{G;9_eE58dN5Q
z+{909(O1wK&-}dhX^wX=+KP-nr7uE6onyBe^Uv0Qfw1v}a+KB#S9&^0a_{SqiqpVl
z7ZO+$Ac8gbeU?`+Xp7HpIy&EQPHa{o9~87!E{V<M!w#<BCkq&Nq*itwzBXeCmroU*
O|7PP$!W}a+aPvPnYmDgt

diff --git a/images/log.gif b/images/log.gif
deleted file mode 100644
index d570f0b3b343001eb91c3b40427b27749bca3e6f..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1640
zcmWkte^68R0e`?Z8p)$7yduae&ESbOK3%}T7^GHM78s3y^c}Qb4mHcbbMH8%Zw$G2
zG{h&eEEf?|te8;@8Es>VHBA|^SB&^T4ez)Lk?XRUQ+S6Rdq+icZMUUt?DPNU?(?(k
zSK7j#9g{<H=z9nP@c@{OJ}-!V2SBNcoA8MXysiS!W|}e6h?90X?ZBW3&hl^%$$A|i
z3kA(Y^tmX^`tTeSF_YndM_o%88;qbv%{pmuIqd3pU_BJ;^#~!Jo8VBd6=@)lX1$|_
zQ567<Rs&8DAP++<Bsi>L-Bw^RMi+Rvj}k&$c%3rlQtid5W{MB+L@fc5VNjtG76iME
zM%|3hZ5NjXcFf1Q8Q7o$ZTi^1V=4t2p7CLQG(KWS+Ki}$<i|b130HWY=e%~HK*f0<
zT%!>qJSf89!y4FPMEhyFjS2=>upQyXJh0h_IvJPKh4fOf=OV5^Q8UT*lR_knjoBT2
zc3q_oK0zRTlo(?5^?FdL5*NY_8y#I1xM4dt;tJ02uvW#d2e=fpTCAW!7o6weUJ7p3
zN56{-^FCq9K{k<~R?Q7D(d8i4OA8U6pW@U-Y9tl$4116!J)8()YrxZI6=q!I2@>;K
zbrm}OVFGhAcppR55NL}&918PeF1m$6+l?Nl6>ZWZLkv5_IC`z107GmfsMoN=c3>l;
zOF>nh3Zx?Fu$3KWxnV2ZpaboC)JAb54zL%4>vgb=49;-exX;n=K>BGh!bMjEA>hJB
z?4Z*Kv?^UC068dVA&^dzo8pibJ!m4R7D`wWVrwyc%#O4h;aUyYkAqAE?lhu9jBCh+
zc9L)tAufgaDW7`38t-SqkuX2*5*LDunE?hJ+(Sj5NAVmC>!F2EkagQZh1zag@7KYO
zv7FngQedcs3Ql;!^FeNXcnVCMXJHFTx6#1>hbu6)pF;X*F~osNwZp~)=YzsL3u-md
z<*+d0(I3`>0vxeXVBKQ_0oskx6%pD3K~e~c{Rdi&K>vlHTM!Pd<NsfPq-#)|6fOU3
z;Px5Gd%N^MT)VV)`&g!yDRkeJ*BNxs(b1FHcaJO`r;X)F=ZaIB)p^AI>ht$5$i97?
zHn@97S+A^-Wj=W}^T81n*@9j!uj?KxzqsMh+rKz=@coR{Co<D!>9CZo<S*YbU9`NO
z{#AJN$?kh<W#XB(aN4KZPWW$4TjWYziiOZMmB<@**pr&ZXTF(u5?q=(4IP(Aj|LR;
zDG4vN{@B;?D7WzsQ`UEk=6B@P@$WtCZm5!|N543cl#%+c+Q^r+K4>)M{=xH-O>Mb}
zuV1(AtiqAH$sPV~Q?jD)^rOW@|Dhunpwn+FcTq|8_m^VTlFgZaB|FKrv$xLHT^Eag
zkNwcBy>n*dD|!1T*WPM;=r+VzFG)Y_m^=0Iu%;(7|F@>NgxxZx*RO7|cEjm!#Ht;8
zqPi|~`!JJTvi}C><Tok_!z<9G*9I!nf2T{ytYev{PDnPIBHNb1#cNUJ(Uk4kH;I$z
zPnd7M?@SRjsr8D^(b)s%0^A_1dR6pad2RBQx^Ud)g)?WN3%LXF35L<FRro3wm!$eB
zWq8Ew4P{wV+#e@TW?r}oNf3i$PT1MFf7NWum~52g-Arrt@4D@J>`$#h-bHt^8u<qK
zsp!@+S(V@W(!edv9Vyn^ddeZq7E7hfpKt7g<GwJ~NK|Vd`i}Z#|71U(==w<I?8=%#
zS1VF~Gca25b0WWYlY9gA_tH<lrS4qGw^NVL<(IWS`K40(^{M5}j@C;_o2ET;=Aj0i
zX=6@t>LCvIGr4a{ooR1nS>{?x|7Zv?Z>|&{)R|Va`KraPY)#h&|E_zj$CT;se*I!v
zg2`8BiW<82mE79?Y^tQ}!GW`;yGd##lO7BFIxh!-)Ky6@l-|5)<#qLWK0kO;TDa{B
zejcZ(j*F!oa(_qKW#pi_;PUao&e`{|+>RNqaJ3|my(h0c;pt}+iE@)EVPkT6dDf;v
z?}0y+W&Wf3#|PPR&y5oF{zUw=Oknrjo-2B~HNQGk8jDS5jdkDb$n%!I0F$YUd!{3&
z>8k~0>l(^-*Okv|Gd{*|fD9M4nDFWc&#smfxr$f*B9GkbE-Jb<xL72K5ZOTTka{4I
LjBZQb0u}!c_>bhh

diff --git a/images/log_20.gif b/images/log_20.gif
deleted file mode 100644
index 8191b4af41ee5479589b794b504fd42df2a72cb1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1258
zcmZ?wbhEHb6k!ly_<oy#L6TccL{wHrno&!V!9bot--N+bfz4Ed!B&aEMvvE2nafU_
z(au=HTESFTPD5QoM@!4j$kf`>)YjIP!9|DJ)ttfGl+oLQ*VBR_!dA>dS=v`q(7{g1
z$DSe7fg#+VDaM&K%7-(;g&{nQCq957(Tyd+n<?9$KhK{bEs7;8h^;(|xjKrYAe^&2
zhPyIOF5g+CB3Q60jxjq`E!W*7E!r|I%GJ)?!_LStRxB`r)7`;5KG4!RK`J6w(z{YS
zCB!DbjxVK1ys(JF)y2ib-O0zx+dniYz%L*uFeox0C@wB8CMiBS)G0a2J3TfeJw75g
zH6}SZIXxpOGc_$ICnv9<pfD$|2ncHHt13#XDyvyqQ~A1b7`n=Yo6|&EbHzJzB&U`u
zbQYRTt5aQESJ}vwFjY5mskPtYDBsoLbq%#blNyB<bxJO8H(NK!ddD2cna#0NJMy}l
z>n8P7Y*}1;Zd*%jX={B|UmrvNWaimZ7`j%<G;f!mwvuPo0=89axOQ(~*tk!iv$1s5
zQtx>OmDe7S-F=w*;0}h9N0?4t<a~CH;qe28=GK<(&X%s;o}MY~ZF8n~Or1EPw{2Q?
z@04jBlV<cyoIG*z?1{4{O`SJw=G<A6=FFMXv}E3dMYH$r?pe5K(aKe;_O4sEdCTTK
z+c)jpvwPRs6Wgwy*?alymb;htUb}d5$Gmg<SDZY&^X}<QPmWA|e028XQ%fJ8UH#(H
zx@VU+J-M>y#ifm}uI+q%bMKKOhmW5+aq85`12-?<y>ax$^((h;Ub}bu#^EQoZ~Xl5
z=-QFzHxEC)ed_hCL$B_hdH3M>y9cMAJ-GVh!JRjcuDp42`~AyXAD*21{POCT*Y}=0
ze*EhB(>L$ly!-I(^V=sM-oO9+<@2|n-~W97^7rpw208-8pDbY8bU-91PcU%&V~`J-
zsFZq=McJ)K!qBPZ1Pi0U2AzpYuBW!;+}_Z5YMXBKw%nVmJfkP4T;G;^dr8jiZM@NH
zxwnCA5b|Eu({rn>w`)VlLBF$(N4=+Ng{<(&y<FleH7W4+s*+s+O3WPGA_5W$1`G{)
zev3D~RSM$bUdp29H$y;?DWI8)o6qLf6_3+XR2bJ?u-xc$w3FXn;-$v{hQoq>w`(Te
zVrlGJXQ7j_#X&Kne;$*iNXp5rxwqDb39ahb^ziBF9;bRUjZ=jh{OY;a*J!>jJ$y=8
zIKyX!=aNL0`3{bMY9@)Djoh4Qy~^d)<_r%ddBvbPj$0laT9$TkhN1E{XD$OL1D+F{
zGZi1Z@)&a)#U!!m-ZV_MSd;PSfEq8aLE^q00f|g3Y7NI{s4ACbw~Klj-Puy8yo_7i
zK+mtC!G&k$#;7d?F(1FM^~p*n-@Ecsp}A*{*Vdl9i&VLfi@NuE1$I7IF=Z0RLI=^C
zSGcC@MsAwrad298xSm?y2a8!I9;X*Lu1+~QiIux!3NRcKZ*TI6+?aScYiE&acG#MT
z^$i|3r{&(>;&yG3q4TseH*}*nCmpQ%$^}$>b6cRc5bG&n4Zk@SnM_MOh2<P%SXdaW
E0Se3tM*si-

diff --git a/images/log_menu.gif b/images/log_menu.gif
deleted file mode 100644
index 47b6856430d9059fca381849a3e8a106a27fb832..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 2277
zcmc(ei9gd11AxC<!!{|$Y>r`$9J3rn$kAx#7&$^(V$P@$x)^FRV(vS#(4?5stlpgE
z8Z%NMOwvznW$94rQ|bLxzu)^;ywAVzJdcML(cU2v2SNa=0PyF}pXli5U%!5R`TC{4
zqWbUeU&|}Yd#A>Iyh(TN-bsj!6K{UHyF5E0UAi~-FsYDve|BncarXZFbVE(ur}cHI
zMB;zIpGu`>Uuyu7?14xk5b6vL+YgXz0Rj$iHUMlffR!Eys|jc-3Ma<dZ1%U!?-LUu
zcLJfOzb`d4wWhEbaI%1%3j?3<gHTAYPy+WRT*DEw#oZ3>VlHoMT2Nec<#GixH;0yX
z=2(1GU_c-ep`JobI$u#Fe~t_}9ta%rBc+>5R)By|7|3O}l9#<-aG-eMX@3_Vhs6qo
zLWxAe<#Ju?m|HT)DztED@}ccXew`1L>#Km$$}NDUjq6dQlC5EC)&+rJWNr2J(&D^m
zq{q?0R!2w2!NH-Vq$D^v7yy9#9UTsFWT4=z8Z!=bBDP`3@N$#9wy&9J8rC<WSW*If
z`5=9>GPf)qb$78#N={B_arA9JBocs;cthY&NI@IAj|ZGQzpbWi>;1l>ukT*0zkT*z
zy7X#Z;DNWh$iBqv(tGm=NJ)i;CThx&eSkPJrF!R3H}L+|)78b17fWJp3rY9n{#%1O
zF{voU^l(Jl!E^@v%iFn+E0b%>58pi-_H@~$f_G7^U>s;B8z*`oauZQ3YGr--+I&xb
zb-Ct^JXH=$v-;e&0xBdowx}xa<BCxFwExX4zl>AAr=!BLo1OLO9)^{YcDGr}XWyQh
z^GB=mt7`HkQ|*PU9G6|Thy4TFxZJB8j;Dv)yT$vmna=27x3H*)+zYg_;)^WC8BuqG
zWVAje$cdJoAzgVcS(;d$<jZC{Boi%CLG}BO?-OIgjzxqXI~o*CCPf4|*zVN*x4{1+
zxPQF@|9b&|{suq*6w|Z<faTy=T>J`<4FStT*`A%8(L#;g$DfaPHVaEJ`wP%s{A;30
zb#)x&$>7iysEUy%9e7En!w}d|dOrWg<3>CauH%i6$!*oNp{_g`8c|TzClp}<3>Yjm
z(?G=X#DCfsZc`ZC&>>E4A<Bv(D6?h~m9*`WrB6DU9^$^VFKf$7ck;HQ_R+hc1~E_-
zq<azCOds&7(Dwe}-Qd%c8Y*YRc4DZyPav?tH^YW1+zaVD!+O&|ycpQ%yiZsu)y?MH
z31f}AC&=TD@#~EP=4<jiN2%&_L*_x8g@4au#h8g&2c~iHWm{^;-IRkIAlEMW)$eir
z-?;|0R=$EV^nM#>ck72{=V4n;!?M1+e$ERj4Y)A63Ghr*GmSPq%IRvFvRU+=e!Nie
zj5tL>H(l1LpqCZ2BGjWp4D#{O<6{Di@mmYJ7ts2$%ItlewNXS1u018wStNVqh1%-o
zsee(N0ySWn@}Wwa&w+fsBCDD4JfO8H8y`t`MPI<uL1UGXt#$^9hjpPlG@|GXZ8b`G
zdPE(zd!A4ioozN&zdmhDtd5{KT%?hzypGVKc&l$RDValhjS<~QQyMuWy#;?t{%1^O
zh|69TwUglPxR$%tS7psmK5Fa=qI}bs&B-3x(F!J;jiLGyu}@llM<To8PQ4H4UCTOI
z!f1p?dTU~CpE+Pw)-LEY3j|7>z+OfbO`uVusvx%!d3mcS(T>^}4hv7JQW#7h-YaT_
z`&2Cuc#s2v@CJYG4R96!)v`<?$)0^^f6W3})~0~1OaQ}d(<i8C{z6U%MvL1*e{uG0
zL&iEhhCARV@6<k^&U$%y)QBGvF8FI^zHM;*LwopDpc%I^(Q(GiVibgY7$%H#YO@}c
zH~jlkvsx+W<W=Q;KP{*D3>zV&K6!>^(nwDwD&qSGT}^APQoRg0R+{T6;NEkwYQCTL
zo@}<xs7s>9?}qCY^b%p;h7uGVRTytOa9snGJ5#dXNzgU<)sZ!QS@^PUUchO0-%MIp
zTed=ng{X7@J61#a?vuK(mZkIU*{R(gn2FoilE(8Sw)qC(Fzpst9JS!p>~X3HEN9kI
z7Bykkni+*uXtUvX8@KcLA9(BAXd7#kzI(Lv+@C*xEWX$Oas7O6$mX}V72iqr&wdVh
z{CWYt<ak3&7dqSj^}NITQwC*t+4=cmDcVl@b}qv?>FkXU4evw)1MIN}aJ9^>aZVeu
z$<&G^y^YJfPOLu8$T>dOJRDTGOE9d?ua!f@2Phu7X`sfTjPAU++R1{MA7z=>R1HIJ
z3+IZCyaq9;gFzPL*S8!@DvQN?^SUNms0P3Bvq!e~92(pZm8I?fis*lYv}2~92=}1Q
zR6-EE&BVme678B38+kqY#vjAC6h79#@8M6knH+_{r+Lb+-AB}wv%3df;fONDuxof!
zk66x<mcq37@e0+|Eob#`)}DUFWNVYQVaiikOX}6UU|kZrHLY4VXDY$hZ<mfiz|)Zr
zuz1=9HG`ME)!ml)3BD}{ON|%oyhGgE2%p`yYq*`Z`hh!M-K`R|!&Sfz=FS9RKOs?f
zJg!?|W^9{sq#1jWleW`*dQ}j^Ksd;yz}stu)ax);RXQmCC4>x>x5!}>Mk*UB2rxuz
zaJphNT4L+|UnQ~kX*7M>GsO3?X?ao&U@|ZL!e}Wp5#s_M(^OZ3)nM_b3hrq$_ndCp
zG<uXmu<=jQKW5j<if?_hsFh@$9H^dEP5foXE_-$`1nO)$sIfONmn^l@ES@5uurm_M
zUCstNVT0(V%)}UKCi@dH;Ld_roCNtWp17O+`2bI5UKHDYDfYVe99NeF7~3mtdqFn}
z4g=>5cyg`TLr-~SqJSUdxpy}w=I;eoV-9B#Et^!75(v>+X>w!MbkNr0=w_`XrJEE5
Wl6Y9_8!zAYW)x~N^Z^6{IQ|13QBht1

diff --git a/images/mask.gif b/images/mask.gif
deleted file mode 100644
index 69ca60ba6bef2cf16f0f46087d43dce919b8b5ea..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1302
zcmcK3(QgxV0KoC%Sa0RWxNXNg=Q+;pfOFhNg@RQ(S-J}6DCAfgsqDaJ>qfz5Ua*o2
z>BEH`q}2*DRMLkN7FU;|RU0Uz#)^%U?Ia~OqmHVLHl$%AG@%KVEZB!4{t<qjzpvjn
zce1a(Hrx)iL-!yEhGCK<S(X(YVjN4zvg}2@;cz&eNLzt5lg@<5Fb?CnTrP{}ER>~K
zEE*A`gLdTcLa9`mU?(J0nm_|Ze6d_Ei>R0isTPYxAtx)9N=a8H2tvV>5>cXITI9r~
zr6s{2NTQS`(=OnWD9MbNC$SKNGrW~g;c9p?jG}0nsL(VWGKSKzv;#OehO1VqdPE<M
zMo}0IW04B66wn7ny(ef7Iyt9~(xvojuc6oNbaR07`~42WVbj@YlyTXF7#5qL0zq$(
z$N43qD!XJorcWl5cAFi*fa~RWz>AcpR;w8qBLNeFAh1SOGD-}=sCYW9rem>KKA)F8
zvO#AE6On8-Ye6hDph=jt+wBrA0|25}RJ4mpB4uN3fk415xMM^dK@b<^O5>RT8mto4
zL_FbPB$wT#s%ipDFbGpys%42>2G58FF)$IZ0b4K9%Ob4k7K2z2fe`~@h+vUuI9k99
zdR(s<6bJ7x)8=BKXf<0Ygenq?9?Zj`ToO-8l7zvS9oRFO499UaLSq;fkH-szLKKhX
z@Z93l81=}g>;%pb7N%fIFbW>p!(dD*mEw86R;~F_f2CY;(k=nGt3*wQ=*y+DD2PEc
zC`)n%&qlClBpeBok$f(%hE!RU1t3^^trnvtqh<t3@VGr3u(F7yfERgS??udB)Ne*u
z1)H=1K8dG1sANI79G+h!N;2wo0#}JBr^!qVixV&z#bOMi3u4NX|6j*Hb;|(s1nP#K
z`tJ!yTZ1lZ+xjnle(i*IHFMy`#i{GQrd59bjV52Kne3!zI)2`+(GZQ?V2HZC{>6?J
z=DCBi?*;oWfs@m7U;Xxc_mQ(2zq+<Lj5T$f+qZhOaZSs%(cQl<T)3y-HaN7~)#~d1
z_UpI*el&on^5p{;j-H-s(csrNT)60b(Dxue@$UD!b`V@-Px~9;cmA49@yx-T^3bwy
z8f@O*ybc;Uc1pc{puxSbb3xni>B_=~SJIc@KN^RQw<J;vrj2LG*7BXcU#E5}JJ{$E
za}11kpL`CO0>k{w`7WPw_)Ka{85?aTXZxnFzp<jNZcF#zz{=&H^!#w_8KR?MSJ(ax
z;-lgAP4C@`GmxY1&fK}7R;;zMb^n#aCl&K4=Z}WjkCo3xeRq$OP#t-uxv&1%!@i9J
ze?Ej?mLF3eG0mot6RYO74?!>WG--coINy5rTAOJ|qe12$x6f#O{UfWG75(16x;n&!
zn7?bgR1XGB9n05D9(`Z?%6#XJUF&;BWdv$JvTWQub#>Ucc~8R!Z1dW6rd?a+JHPN#
zk+G~7TD|w^&GFGwLpQdv>r5t{ADUCUMh?&4#Lw;6lmf56wG7lBn%=&Xh>VRFfd89Y
rCd**U$n3LwX9nx*_vjUkMsqcFt;=CLbl*g_J-+|tbkI0pf_nZ1nMp}M

diff --git a/images/mlog.gif b/images/mlog.gif
deleted file mode 100644
index e0ed411d2815ad7a00c78936baf6973588cde06f..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1002
zcmcJO|7%-S7{;I6o$Q|Btv#+MENG>f3~bYC_Qj?tx^lWn+d+PC*v)KY?oKPrnkth@
z3Ju|!={lT6WTK*_u0`Asm2Tc&2Ey@67dM@n3j>E^Zo*KR%gx1zyG-SHQ^o(l_xJaG
zKhN_%@8Fh!RL`CmVz`b*S+JB+5>$h+2|aD#0yK@;8cGU8LR?2nFJJ%^xK9sx78HRL
zD6pKFsxq!9m3GQ@T!$$pq<BYfs#?$qgut@%6e)l=nltxLB;6`kib|O!VGVx)VL&{m
zYrWRAK?oH>J2!#kac~AoZJjz67(p(^YdugLlH%KJv+qJz5E2#A)V(x2({Ch;-~?14
z8`ZS(G(bMcY%-IhjO&!rN%$sBFmMCZ*IOU5KnP}rn`<dtEHrhxSgufc+vs+|6{ti;
zKbEwroG2<}c6eOt83orDUi3TN#B#-jq$b(l*I0d-1`_(sa<MN?xDbIu%{E~Uzye`_
zZ#N~Jh8mBotN{v;z-oU?BtUr}5i=Eg(5)Iu;m;&W`#^LEI^wr_iQS5~NJ@nn%J(4>
z7<j}&>iGAwi&Ld4Axa3jWp<-hdh9aM!-AT2#dETXwaB1dW8QRK5ha6T*o%3`i!g<b
zM}b0chE@JpDFK~B*vLDGFoZ||TmP5t2E&Cq+Lrvia!=@_hq%HdU@l2AkmK-b2zaH$
z-rwQ<1l5L-G-3tjnRUgKq;l(|UNSA65o!bxcrTAs1e}7#3KXV`0EWs~n&3Z2NI$&d
zKdE9@qV<zy6%SyeFw00sQ7boM`u|i{V{GFWK)*qVwhk>E`DA}M+@3wXF!$NiO(8Ke
z^zz)|3oXmq&z<+~7?=vB4qWqZS@Zeyo#BbD4s&GlYw;cP>D~u__~NCtyPy7O$GSCN
zJ>R$O*R!u5-t)k-U5Bpno$K2j4DE||-?~0NaCF_+*Y@a!s~^9*cH_gpor!ex&t*<M
zxvOJ)?z@9e+%+(M+xAC>Pvok_$iZztUwn6kyb-Rfx;t}h#p;zeUwEeT)S;nYcHZ7Q
x?p}JUyK{4w@ydzx!khWxkJpAbY(0En@aWmc-<bb${{3A~jYt3Z@KX!-{td(5FB|{>

diff --git a/images/mlog10.gif b/images/mlog10.gif
deleted file mode 100644
index 4c64ac7dd8b8fa4f883011dea48ac36055995a3c..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1011
zcmcJO?`vCC7{|Z4Iq5yaAjf$^<EAu|fz55T-CUXkwVb%obr8X6SJM&oPTE<t4yTh2
zTH>|-=x`e%iW4bw^~EMKp~!s`5Y9w*-HJ*sR9BqSDFtPko68i9V{km#{sx~H=hgE(
z=ll7d@7cO-AeY^hLJI$&RSq1bloT*fxF3}c5CKs{vH?#461pv!hZhGZaIcvR9ZwNV
zvxuDo<De97Zj&F3F~x+W$IVT3XUrr_2pU~pL9PmJfr`==voCPCB2<bp*>9i^;CkgN
zW-;&7<3v#*jU^L(4mfd5;X7GI_(ZDI$kr8ClFD;s(hOiK2#FdgWKiKC6rxl>Uquh;
zOHfY2@UY;*_Q4pY@Qdw5nYA`9M6)*XNX8%>f@1Cv8Ud7ol(^7_t^jQ&gg`BNHG_ad
zQOJ^MXf5TTUL!jO&NS!3A-5~=+gg~yCg{HgfWpUKWyvD92tAJ#pbA)kaLFkjbp|CM
zo9;@FU**iALWBejU|kcU2b5#3hOGhO3RD|?6`1irEG=cUiLeKXLux{Me6l}Ca7A^~
zu_$uaox_hu-PswR5Vq|oj2Z7Dv0_5lZY}gPig79EsBtkoTvtRtt`u(Dq#q=hiFCBR
zIX=@9m|O|hnDmigh!Efka31K;+BJt8srs;54#Ez=6ykD%w}*{?l`^Fi<A2w}Uo)zM
zj~30aq4Psh)6a%>PExvk%ABzslM$*-$H__L65tdxHLVYsDqiEi9xPHoSb+|c6l^8=
zXVp{Mx#=Wq4-6<K(ePRU{XeU_wY>EgK$j7rw+=2G`|Radth0P(VgB>k8zN$E@Xh%z
zUcWioxwIVKJ}?`}z4k}*ruAPQz9Y83J8h3{K4Oi0P|QF4^Wq!5k3aY6$cFV_zuLd!
z@>lP^wd=u`x({CBJ2!Uv7~E@R)@-x}PHcGQ8~3TZFP(a)_rAyeI-5ur=1ZsdJenS!
z`2N7tcMj}ZJN(4ZClmEb;=qpIE_}2~UWwJ(?<#$`YTfD^&%e-h`rzR2J8#SH3x0Y(
z)3v!f{`Tc9XI{)UhE5(yKY8N4`kLXz(%$dR<)1&kd-2;be{rh1=Z6O#+I`l+J^umA
Cp*iCK

diff --git a/images/multiply.gif b/images/multiply.gif
deleted file mode 100644
index 7d5e63f5cff4ea0eea76d9200f15ba03424ae8e9..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 995
zcmbtT?`vCS6n<{*+uXNV>R!t2npLIQm$jzTyAMedF>G(=k9DGKV^fO+y{k*>)l^Db
zm$}Arn~77g$^=EFbS-GzV6d`Xk$#wOiY0dHhh7~F6kR3(QAtv8Xwc#7?e-^lJ{>sE
zbIx;~!`Z#(p+x*p7-9T{^-vT=RaHVrLqmh#@28YDHZ}$VfnYG$)YR16+#CvpY}ist
zI)-hdp*5o@hag@=E<?;QFbi}TPUf`xQa`&#<AZeG8m<gfv<}I43KgFt(GOh%rF~JY
zyK3u_P{C8=s1MzM4$xP$<h^z!NF?RLcUeQX4V{2;N8_#?XGG_mroz3octz?`P;uoE
zQm;aj-~y|pCx!_fgo5|=pnKvL6{!$>MVxnFx?l|BD4E47i^w1s^;J9(1JS`L-puW*
zRnt;3A?S~O-c?QeHMXjjV_4J?>C=d(C3Wz$FW$e9bGoXoUotMZNwDLVbkDw?LUE!J
zA(DG)Z~^E9`~Y_daRKIKK;0=AZbfIZL{I}H;6b<4!3|K|GX~QI?5-jxLA}Baf;>Rh
znFs66n&C>F3Cg*7ExV(mMfVQGF<VE%fUf^EgA;iW0?x5#=7ukyFL`(5e4nGPxsVLP
zYvRM&6r_i7Xx5FlFr9jv-!+4qV_NHK-hief>!O80@#g*w`~b8AWhje41fG2`Z!Qx=
z5812MjzFd%M(}4Azl@;W0v|&%japdBv=B!>$yxuHqZ`>6sP)nK3zOy`SQx<p1s0G2
z(m?p}Tn8}?uwl?=?oJM&b{NuvFmOGKngyAG$e?Ut@em?I;G@VHFtcDIpwC}T8mfmE
zM?CxgXKY*Num1w*3KV)r@51cT@rFA)Q!5K|U!2&W@af)@bIWgp+MoJnW&gF}Tbp<0
z&vuTKm*3eMo4-!&$p>G*T|584rEez-uWcXnCBn1zaiA^wY^O8u==&RzZQMLCyE+#*
z)ppUk@Y^oZYCQbz_t7rp?WQx$F9$xgmSH}<tNh&MvyV-0y!77G=I<i*x!XS56ze)L
zc;&;);@0V3`I5af-Wr(tpz!n6FFVfoLZ!sVhfhZD4@67b|M<!|wdL5q$zy5j>$$!2
Pn=U??{{2-&!F~S$BOxRv

diff --git a/images/not.gif b/images/not.gif
deleted file mode 100644
index 0a3c3a0559a9295c67916e62cdb6b840bdc948ec..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1418
zcmeH`@lO+X0L71Qk8All&$eKv6>jt_U8$s#;x16JX=^#4kp?R3ux<n!?6fL@PCLU)
zQ%YB`F-?V}E;t({q-f_lJeNAVrEw?epi8<z7N#3fg2u)Ez#MTd8iVWpgZ%+}zrN&s
zlK1i&o9WuR10+Hszat0$K&0_2tVwf!CK`<<5{W1Xr&6hY;9!kRHk-v{_Ivm4VWin&
zv1IJ2<>lo^j~+Qdrwcf8xttT&A!)V)>-hNi%E}5Ltbj0+mc;7ns=kMfV&P}cp4or}
zD&0a&_{EDCq?w7qF(A^5jB!UG8y7_geNgX6DV53~a08J#7K>r3z}ngxRJougqOzs3
zuC(2o#Wm6S`FSsJ(mgCxkHy6i!koaAE=UE8P8O#bjI_q%@nkZ|vTRQ<i&I{x4wL$T
zIB&^pn8L}B<Af=p>`Yp{8HQoZuC&^ovbZxQS6XR~2vshN#fm8$tceK&Kdv5u`Zz(4
z`9VLF^@#Oh*T{UQJF~E`kWQyDY6Pm>Hh)&E_fxi%*qDz^F-#JwyfRBdNV!ScFQWRD
z)+DZv3F(B@ow56~P~lXVqqsUOr2Jxy7bDD26@W^YGdQp6Oa$^EYD8v9LMn)nofu&e
zs)JA+!S!*YGmX;;sLIcj-=#<IU@=7xYf3nP%?TVq;KfuyToc2T{bG$j0Nk(s*MDw6
z@*FLI$Sb4@$?M-wAiOo?Ag{D3WZ+^nlvj26bX=j7XZ%bqF!)prysz$jJ*V5uE0?q)
zy0haA7<ag9X-41~1m+TPWeqd&no#t^gj72tz{Oj8tC;JD1dw`r!O;BS#XacA#<z%&
zzN=ugraH+tcgttbec43wta~dm7v$?NL3ALL+p?oxIXib<;CUa}gCDDv6wL^!QrL2Z
zMHIhrwqJGK9rcAk<%vJ+eW4q(K;x-1FwP;b7TSlaY6GB<C*(szVrL!4S2(mfygm1z
zXzMn9jhr_+d3^f(m3KKaBZW4!vf3|qJiOf~-5}7lKJh-jq5V0wf3a+(+%}|Iw`1+)
zzV=~G=^{1o!F|)>$2hrrtV{8u#Mu8A`{rP62YTDTyR&|^N!s1fC~!SL_}v@W(6>vC
zW#HB|4)M3-@`Y|(9ugFbJElMx(Q(#0z{Nh29NBky8ZGXSk{*OJ(R;t8>>+aZ^y0Cf
zJiMAy1&#cZrMl%4{AR9hbKxQ1gYMJQ5{%o9R;M==pVjJ`8f6pn@Oj;nPmuPLy@R25
zaxSd^wiOPG`g(76i(0jPgJ-C=iyn#p;hb+P`E+iG@E*TOudgxwva!4cy~Zg4Xm!K6
z-BIq`1V0b%`_x1|h)NAhw<gc2rz$7Q47U)D;Q01j^k~I&>w&|cN58kwlJ}5VbSiB<
zh|+v*bH!lqrQ2xav7&9V;ibb<AiLb91!>~^uc&g<MV4c#-*QY>vgumU>!$kh!S%bO
t{KHp|4iwNEcHLR>6^|a>Hm3`n>SDk7GqXR`Q1o=&7b$M!$X+e7^B+sW)_ec}

diff --git a/images/off.gif b/images/off.gif
deleted file mode 100644
index 3e53637a0a44d3707de2beb90a72c5862cf3c6b9..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1457
zcmdT>OKcNY6rF@IC01-_X6#_&k--=n2$o}vXPEH~#^dK1Cn<<U!bgEcp>z`}g-^=@
z1PmbYkwA#AKt$rxLKls?Xw(IWb^#TLY(jzn350C1fK~-sp{i;&N$;DnY0Lt#<=Vc!
z_nmw0Id7)eVJPbQS<EcvpLQlGK_c6c%67JBYo+`$*;tiW@pWtW?qtVCrK?X`v$OE&
z%H>@>U8{TAyEm*_x3PCqZ{Oy=EnBz9>kcgb?5OhjiDh5xC%tDoH=pbN>TJI6a_9E%
zR_?gEe%Hm-<_|sl2lwndwBg|3m&bqJcIw8aQ@3{X|9bH7k)y{>pE!ADU;i)X`!8O;
zbmiJl*UbyJ4}5dy$iT>%@9!QT7(G8Y`t9{c12;=Q-h4cCcj(s8<KdCv(c#g1Bf}3L
zJiK%NdFg4nRL1nO{G{~k>9caV^!tn7{&@BGU$6gs^ZL!$zhe^<6U@gD;W4(Zj%jD&
z)GU1`%qQb!6UV<Vn)u>-ga1(6n7qbrFq=iXz%Z()PIO&2irCU4`a@jDiq-@cxEPjY
zJuTUSbwJN6I8JIYcXJ>V5=DQrJC>_#uyGv+6;E?`&YWoL!q%C=pwlB$m2uPFfV+!D
zxiJt)ON&yAlJf)8{UN7Are5?S#}^492DkzdsjW>)Eu2q9#E{?ugXqMdxNeY`Gn%Gw
zA`%K9EQ-N~7%_BW@pS{-;Zy~#;(mt-1>F`f$h_v+={6~uh=`)i$BQ^yP`O|m@C2gi
zbRs%CTt!jB=cyVX;0U!!$!OG)t|lLw&o4Fb9TK9k;Z*7!c-$0yEa2TxK_gHvCK6FG
zSS4HkzIwz9xB+n{7>(5K7e&FbglM{@0IumX6E$RP1WyGAEkDmI*3wP9!L+%cfh7P$
zb_Bzd76d|~AUI?VKu3c>a#`LfhHCr?^*kV-paH;;8-*E@7Kp-B_hO<|aFWaA;tpX(
z&|euDH@(gVcN`!GZ#QTVZ=B-u`*0-a^$JrO;|O9<1qDE*BD|x)=M%i>t&Wo|6f#Sx
zgN)s%Qq0A&b*y`CTv06wK-PAcQ31aIii}|*nNn0d1i%4RRj^Pq*y4vKq+2ac;Ukbi
pQU8w&EfJ?#ktrVy*2a>ztx7nKZ{nJ);5e)}-n2W+mGiv}{0BnnK%W2r

diff --git a/images/on.gif b/images/on.gif
deleted file mode 100644
index fb75f06ae5a5c1d3e0641e880bbf14075438dc68..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1966
zcmd^;>sJza7{^JWrMsG%Yi)s$_ew3xbTjoRItYf`LM&HE-9TF}rINaZh)G^iz%<QU
zSR&M-ZEaRoUMRI%g|J$y)wGf{TkF_rz3}XeR;L%cf5FboIrBZw@AG`0=Q+<zAb|7O
z&O*UZu<0h4i@VoGZ@|sVZ}U!~qi?vIPq2r7h`T?<1&HtnAbABdH||Q_NJ`#BJ>mjH
zxe>S?lz3NKh8yFE2R+#<Jl#K-Mx-!9X>@AX-T*S2NZ%L2V9~<&(SC?!?q$cs#KgwM
z#wWzbAL1n@C8qLH5AibG!?U(V<a$O3JtFe9MizLo3%0Vvo>7t=F=gJ|ia=IYK$M6W
zof)z}JBV8tz>)gJRT2+KgX1f436;ANP5=p2)FdhSP$eU!eDC3MzxcX<Bsnfc8GNW7
zpVAPLt|F(_(bCRPGF0K|wc$sVtc<FZbUy3n8an@6SmrOg`Ay8M=Dj(Wj|xOA;kmdR
zWxS|?opT|&@OoTvYhtc~SEx#qT;qw=yt39<X-Dcw&B2Q9j8mOQtGk8Sd7`|6f+AUo
zq_m=<T2R_7sJvcLSyNbZwM2fkuD1T-#Vc1XUgcNc5>$8P)ZEOH-#va>ldsZBly|FC
z?PaR2VwJ93-CJ=%SKEAB@!K6`%kQ<9bf>RAKG%A`@y1hi>%Hddx*KgbRPBS8+V#yH
zPc)iax4WSG-kp0LJp;YkUR|%QPur`%uYdUH;lP7|#{-XV>0Wm0Mtb@U51tqXpAHTD
zIW{;1zF!YL9vpr;JTx>oGCVRmVi+}yjv5TFUc53GMkiiRj89BWOioR{`Rm=Ackid(
zPfdN8o|&GWnfdtl$JyCg*#G82e}KalfYUld6GQ(53^@zinU6HJ=@gk_aQRlQ?0*&O
zf;w|8oaib>6I=6N0)y)mAjbQh80v`DwD*fGof+!LHUVOZr3w_SX%<T?l?*hlldb8Q
zb83N{oOhq!(~|VHCCK>AGrPfR0z{syk#aWV6bQ?{)K@aVTFg&bZJvmpWeZ^oms(A#
z7wR1tP!@PJ&mi)kT9H3HN?6BM*o9P~v<ooC<s^_{BJ%nh!CJGOOhhaQsjUx-6JOjp
z;gT}dS8NuAL@Y0$w}Dmr9r;au`J!ZQzXM%m6*h#n?W^_4fW0^_k>iSN>>XX0$z%FP
ziW*|Zqz-h6%cRLcq@>=T|E7V%))W_E0tk_lzmDk@6cxxskC`h;lqlhLVK>>vJNep{
zw@nUov~{4+r_6@iub`Zt^D%b>zC90bk8&`r_S{qT#NOPA?zCZ>=n)CU0U5}L#-%CT
zH#e_omfElQZsYEs<Q!;F!nOohx%#DEL7rpAsP@Zu++2#M1`<l0cJSinHW$2&3k%bt
z^$wvBGi%X9U2lwkjqEwt@r?|#?c>~@(d)dD;GAL@wSlDc$roAlNbQV6XD0usi(XW;
zdM8<vi%^P}V{maWR%nfx52cooX=OT2aljji`^q~;bhc|<7lX`rZviGqaGBS}_Ngb=
z(ji9DfQKE6ROo~>XMZM1leWI$cCn3bgnEDRF#il`j&Y(Y@T<7~K>Hb*GovCxb45Rb
zG(&G<${8#{mThP?<fGKt5q~6^b9Ba`&?kA|oz@r-ui8MCM?SkrSWlFNfJ`2_o|J38
zv?b{|4h^pRKuac8RAA{qkc$%d+Ov2WAsc0?oHer}%J3w6qv)Vz-z#RH7)+lB<gK(N
z$gM~MGH67YH^R%T!o(<ecj4*-M+}div9lL}jC0?R;XY`DhZ-ue+g0V=rszX;iPyN&
zFYixm7v`?CwT$9Z7Dd+MWsWi<UmMHyc&Q_;5k4x&vfLVRji<HAUTk5D+|Q#N*J1He
z+iW9On+F)61PhdG0kk4x6S~@?c32398{_OxVga2TkgQFfXCzqQB$h+Q`bWE%+ndzf
zJm!|L7EI(ox!6pQDTT!6$J(8%ZwSQz(k~G#4P-2_ML-c4f`8`bj>TACFk%Mj!yrV*
zfQ+vIzQ_z4Bu4oPrjVd0`4j<)t!dz34^HEySYjR6P(2{EC!XBGF#~6|CDu_DP}A6K
d>$fa8(gfwhhb=JrTC9U2lg9H$Z_y~2*FQ5x0muLV

diff --git a/images/open.gif b/images/open.gif
deleted file mode 100644
index 2aa70cdf0feb246156c1026936f026616c18d35a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1216
zcmZvbZ%k8H7{=ey3q4jeXVT6~f?Aai6ua4C)pk>NyP*8B+n~gK=#1759X4QwY}poY
zf!2w!b5JddCe_e+Yn-`ZLhcNb)XSh{lUX8UZ5Ea^I!z5KH<m1&F~+mQvX6W6%abQ3
z=e*}V$$P45c3HL_(18yAhD9L;Fh-b4gISDNj9D63R>vY;sZP&geF@f=>h)!My+Lm(
z!6xbklgVtdmSSreg|(WkR*T7|V{K(@Eycjr8d-;db(mR4U9F?e;WV;Nqt0p8IqMvr
z18&bjkLR$bjnd+2Ki2NsqxW_B{9S(kX@4*f4E6;>0ZJ&;7YdyZg)W5n7At?A=V`dW
z^WhL5=J^OOv{;1;ybu<Ih#*|$g{y)vAc(D2aX=J@MDe;99g0S&Uynvd#OO#g{)H`0
zeIy>g8IO;};}h}37$uRINF*lXiOEEAGMSu8CT9}KnPf7ROwUl#=~OyBn@rE9sWPc_
zW|or4q%)a6Gx8#`oRQ^6axUV@4R~@iJj&(dTy7ypMIufTagzZz8KQW|2&IjTw3C}{
z<fe~|wUaR)nedZIN{~$X$qY|YA(G<BtUz*vJSS8LLFkTH_2h9O&5{H!wt%-47!`1+
z@mYzyaX--%NeB(Q6b(B-s{wFzfn^Xh7!XQdjCFe-GYsK4spjJLJnun{Q865HiFf4b
zy#yCYv`8M7PiF~3Bu*L@V-3@?78Artp#Kn{10bIc$jWEPyAf0x7#5*n8nm=F2MPuQ
zWB~|(1kW-2k%ct`=)nhO#SIzC3AEE@0O+nlXx4x{4}<`HK~oOohd|bV#6hE7CC_&f
z!blRQ>oCeL3&EQDc9qxFslp5@<c6WS0W4b3AV9hf1g<B52_Q*lXJG!n%Ym_h{3Q^E
zR>Tk3?2QRhPJ3s;H30~~&cgh}zmxtyW>{QX{0o3yf&!IPU+e$Lm#@|D{pDJCyjz|B
zcBr~F96QCX+9#arT@O7f?XiyGz=P@&%-V08KX=@(^}n|1RLPZ!%j-}fYv`RD3g>U_
zdhs+oT6FN&Wi3csp}F#2zA|5pI?MaqHEOR~`~2+BQ>%V(N$IuDv*8n5_sic7*uL|f
zT)x`e)ZeT4$Qv{kJJ`~uo*vw$N!VfqQ|C5)AXW#wjY|SccNHIj(vwps4XvqB)A|Cx
zwruwaCcO4id<hd+>5exrmFvt$)je~~U)?&h`Yqb>=uo6!`=>iP=N1ay&1`NQwSRv!
zR{c$JxqDxt?NaPqMbq8>`l5o{+6}cwF5hpu*Z**A*(a4an|CO2-h6AYv1_5y{B@D)
z_6Ghgb9~{R`{Qi<vv;=HD)Hvw&aRq6t-Z{O_wC`GuR877Vojx`N~zwGh}D7K6^Ol3
zZ?^3$sxKKlj%~ji$JHx>KjJ(i)GP5eW>D#UBgTy{tq@#!ii&dcD6_osX(zM$^|?{y
la!Ws_RNDV?)bDg3-qY>Or6N`Pj}%|-Lw$!2*%S(R^B?rQ1XTb4

diff --git a/images/opendap.gif b/images/opendap.gif
deleted file mode 100644
index 346345da29b30c14547776c5ee268a34086737a6..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1292
zcmd5*{cqJ&7(RDgpqGJ^Ws))xyWMRR=0|~<lp)Yfgl_V2P#Hx7>D-XckEAMUGiNw#
zirs=*V8(8cco0nvi7`hZq$DPuWF#I)NDf9x{jC}kO8BKZiI;>#FaL$llQ(bjym^!N
zN#18m_hw^lUjujm`~v`RK$0YY16hYjCCMwf7w45qCC~FTO|wc?MOsNg0D>%VU6*rS
zU6KtQ5@;qSOGFfN8pnVF)5BpbaEu-7$W)d>NwzRTNb?hqQb0h&BRjOckOXlOW}%(f
zC_q}Qxt=E>2{Nepf)GL}^?g4G0w;A)M8h!jSw*p9-~rq7?bP->&rRJZiezX@P*Gr{
zyV&rlk9^g^Ugr4-nJ!kHSb<)Zm|)ck%_!Fs-9!vJP<0|0Qa|%mm&v-PL!B`e^O!JV
zQd0_)h(%!@%1BmS!+@?$Z50MO*HMNHX1FBAG`5805Eo;N7)B(Lgv1lhavo<a5)3Dt
z<Rr=?#JI*aE3y3QOmM=eOF1SCabh5$BMxzH1wwN*#1hIVrYz)w3Q8!GnIea>j#8cJ
zvM=jcSE;Hwnj#dKLL<yf-!wvli6j&ipQS<wp68)R9b8qCIO8;9v}!Dl15Dk((_`KC
zkU|xfWh_f%A=6Z_jN2qvg({LH5}776k!K{$BregkT9O8R!*LbU=ed9ZQvwBsjAgl~
zBqZjTV=Qu+$<V1<6fDnKCPbbRky@^cNwo`6sVX8#lj`&Quh4j>SO7qPhwoi=FB3re
z7nG%?7R8;TbL&=q?_GW_YB*BgbIMmoF6}<kE=?ai^ULRZPuIW+cU9wwc_)XLJo$Lr
z4s}Ic5BZJu4Ro*HGp`;pyyskh{Jb{)RO7a@J@t>g{GHm?S2*y)_^}$cf6-+3dJ_G*
z=33L#z`Xmi0haDQx#G|`>h3FkFk0HXFfzX~m+zQU{C(h!x~{R>^1dt|f463GWZEwc
z|Mm0W)^7?g3>jpG+&a4NEI-}?Yp;LQxVGbKOsB2M_Hg@qV>@4;GqLNcM1Q(H_Ux9P
zjjvuD8aXg`LBqO+!EUAVgt`Hza!2!zX4~?|M!=E_Cu^2H@s6dPj}9Mu(0KDMy}7ph
z$Kr3xE0<n7UVm$>?dhot4}sp<VesgcgYx0R>5avvWm0-%<$|p9nJz00i?xMz1+`B7
zz5GgDS<<`e?WMliv~5_ORZ}vgzC-%_k<R8Rr*L?<bh~f%LJ1}6`-{5L>HGEO5G?+4
zSM#;i3!CNsV9j-@6Lid>Uwl$`6_16hH`x87CG#AsxwP|Ad16yjX+e8OtJflJ-F|-m
z(OR_RrB6%x%>2og!8-#JR_{g7^kQ+=4XgCpP5g0h*UaWdec<wq!m7Jx%f(krXY|O2
ZtwUdq_q4u6Hp$K2;Hl5v9x_d^?msFX79jut

diff --git a/images/outpin_red.gif b/images/outpin_red.gif
deleted file mode 100644
index c1946afbb23d497ef1323bf22bbf317488708d7b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1016
zcmaKr{ZG?Z6voR|oi0X6B+Fow&_YXrs<4$(g)(qNY{0QN=1anqVsOArWt&-ui>49C
z1fq;>I%JF%>Zoyqi3p{2V_2ZJr3@J0XnkS4ty__aE$#R9-n+YhVCR>6?{l8#bIva}
zN4ZbFTNTa;=Xipg;=LJndHjLM*#4-UCVuqLYr?^(m=RvwXjH^hgwV#54#o<tg2em$
zox`ySzr=}0gpzwAsa4Fk3wOAp`C}sCxHw``81IaeObKJ1@#0CbsB2Hk$S#?ESJF&k
zoGU)wm5}grN~Sw8zDJSPzb9iXNimize<;~um&(VJ<+eAn>?yMEa`K%iDVE)76UkYo
z%=9Ul<kwW0L78_gztEvj*wb@escGN7TVTt`HSIevn5|rrr#km$FU#fE3Xk=^bEv8C
z1FcGZT~*dn@?q!kBDeDBg9C@Vip%w-r|0udJUUolRDGmXtA8jjU(PR`J9Olaf|Eb0
zKYeign5MewT+Qd2nySAJpR|=$I8L3eudl!KRb$)bOXFwHkDjf)(scFf=B6vnO%qi$
z&DxgV>gsx0o4d8`uFH)*y7rEaj)7LgK)cRr(3`t%&g!ps8+&i}_l<VlykqLSZNA%U
zHuaf}=AVoX;~lHTa&KgKZrJhIW_3<YPtVMFC+sWJzq_A2TwHJy?itEG>7AP=-LvbD
zmdPjXCC`$_;~^I2d`pWf9*@`S4K6P~U0o&q_V`!-dA7FZAqc<E=kxoC=f3qo;5iWl
z{xvFyPl6zV0eUS!2S}2n$#s%qSmqf`(|$<P4CO-%py?oh1b~Qw3`-#j5s-y63Lwa`
zEC~T(Xxw1{6GV`N5HSp7DGIU-U|B#D2oMMY7VD6PEMgF1X&MUvKtLjvp&`UPoB@C(
z5$@s?M&mgE7Xu+SA{JX%9AY3=u?NuzBQXImm<SjeVIO1=3vd#!G`8RnUImZFc>Kcq
zAcRQ>U>$3C24>^J0=`3pTmM^piW~AW2XQ1ET>t%v%uO>Kv^jGBEnU`y&q}|jJaKCx
z>{Y=QUgQmNzP|Q-O=zvOTYtYHTG2YmS6S-9Bn2%MZK|PdB2K4<A0yiGP2zifH#XaQ
z4C7jL@sa&HT?lvM)>jJ1B#Sk=;gleK)4`?M?ZUUJr)L}IH?(VuhU$2^GKu!h?c2^N
zEw-4btzl~E<U&baxb$eHrYXy|7$JGRBDzhm!SQ8`C~5toz>u%YerfDXW<^nE$O{{}
v+<uLqx<Pj4sk8RQxVL$V^O29sRNT<-FO+>;Dek`5smm$(WaZEBP)^#vGJB3t

diff --git a/images/pan_down.gif b/images/pan_down.gif
deleted file mode 100644
index 4ff930a62c592377d8f6295c82e6599df436d983..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 587
zcmZ?wbhEHb6k!lyIF`Tw1e{zvoIC=2g5tuW;-V6=qLPYYlJYY0s?v&j^2(YD>Sh{x
zHd@9mx~48BR&FK^5$4V@*3Lmz?g=(tDK1`No?!(6Vaa}xg+bA|zVS_d2`v$cC4ouJ
zp=oX5=^de&{b89CBD4EqvU}okdtwV`Bo$3cDV~s2Iwz~FC#`ZpX61~+>WNvki?iw$
z=hQDPsGpbHuq>}}d4AK1!lva#t*eXMR+qG`EN)v<*0!X$eQim{y3&p{r5)?ayEasG
zt*h->Ro%0ps&{jB-{zYBE%p8DoBLPRPS{#EVO#yA?M;(5Hc#5nI%PxClwHkJcePC0
z(>`rW%k(|1GxoO6*wr?3U)P+2eRKDA&pXsJ|8Vd8BNG=MoV4`hlx3%;E<Zhc?S;AP
zFa4)6Q2ZzAT$GwvlA5AWo>`Ki5R#Fq;O^-gz@Ye(g^`P)o<Ro`MWD!HVDE3JZ)$F7
zZENr7?CNCb?d$JrW6@EOl~Y#JHn4Q?_K|59kJ8gJvh@gxjExWAYG-tF)HSqr4~&S3
zlk5}?vp2SK@ehyoV(sM647Raw_6v;=>gIQ~H+Aw2QD^V2moPE5^EBtXThAk}p>HO}
p_N-o5K~;(OeLbg?q6qW1dI4$fUw;J!92Y1sunB9u5#wO61^~jC8MOca

diff --git a/images/pan_left.gif b/images/pan_left.gif
deleted file mode 100644
index 2e84b439214f6495cbb600048c90e7f17426ccf3..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 592
zcmZ?wbhEHb6k!lyIF`)-1k5a~>>OO2T-=;oJc1&UB4SdKQgV{A%JNF;s#+#my2jdi
zCc4J<miFHEE`D~dK`x#l?miJ-{!!k6u>mnf;jx*4Ni8wS1(7LL(P@<tnOzZ?-Kn`X
ziFqCI`4cjVJ5$T1Czs95EbB|Hn4ey~Ft>JYR^5{9x}`bw%gP(3<TtG-Y+hO1vZSzO
zbz$r3qP8`~?Q2Ur)>d?^sOwx%)wQakYeQxCx{B_NRXyt~dp1?|Zf@>bP~Eqsv2S@z
z|JJ$*TN@{AsGqc>Vbb=7$vYaS>};O0qif2Frm4G{r|xK;w!3Bep4RER+otd8n7*xR
z#^#QhyW3~&@0@+0XZFsnIS0Gv9_pTVc*24M6BiulTX?vC;ju}Jj?7rRbK;T{la`*G
zy7buOWv6DYK0kZy#l`DSEZTT(+2*s$w_aYk{p#ACm)GpNzG?sM|C9m6f1=Jssfi`2
zDGKG8B^e4K8L0~Hp1uJLia%KxxftphbU;xAiYx~9DGl{a%`L5M?H!$6-JJ|Qt;~W7
z{Y{)gcIMLkJR&9$mP+E?++sE<zS^oPGM(%a?x|@3I_esl^6f0bF&UZhZbo|g28N35
zvdLNL(GF%7R@S!mYVFMYTK)mfPA;w<Uf#N$Yyy(TK7m0Yp<$-o9K52QQL%9ej{U4$
ga*>HiepCLj1Q<LzHKn16#o|K4hJ}aS8yOg^0e4dwBme*a

diff --git a/images/pan_right.gif b/images/pan_right.gif
deleted file mode 100644
index a6a3a27e3e553b9a3ea4e2f65664da76aa11a2b3..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 596
zcmZ?wbhEHb6k!lyIF`i#1bhM_!eUZl60%|va^f;-60+)2vMN$?8dCD=Qt}$o3YxNt
z+H%S|3aYvas(MOl`bugB${L0$S|%!578+UxD%w`+x|W){COU@JdWIJIM%H?!&Q9KO
z&VDJ5{+X`+$!@_p-eFmOk$C}8c_DE{p{dP@+4WI5{fUK>l8Yy1luu2mn3q;DFTHAE
zM)ji1nuVD)i?eEhXh~MxlI*%AIrU3(8<yraE~{vqn%}gtsA+M2^UD0@RfWyV3xH(H
z%EFe_g{`ZLKxj>I+v@W6rDYwfOFP!rb<8U3Tvyh)zOr*gS=Wa0t_>C48!LM@RrPMJ
z?b}q-zon*sYwd(BwG+11P1sg9aa+Tr&GnOZG)>;tG-X%wwB0S!_q0vl*)jV-=bQsw
za}IXTInXm_-;{;>r!F}%d)3KR+b^x%ed9mHK=Gfbb5UwyNotBhd1gt5LP$ocg1e`0
z0E6OB7Dg_HdIlX(6oDd(fqh0peN%HwYg>CqXIB?Pce|*kRBvCiL}rGiH1m{tzLd1|
zRCUQIT;}dU(eZA|GK^hpI(nuS4z6~Ziei)7IP?vTP0S7(>1xQcwsTwA*gHDAI6FJp
z8mNi1@pyRq`uPV01bS)7akldYhlGVkM1^WAh<EbG#Ky+OS<4D{3nnBcZR${P@H`};
WrovM2t9SuRORIpgmyHMqgEaur6$DEF

diff --git a/images/pan_up.gif b/images/pan_up.gif
deleted file mode 100644
index ed12afba32c8bd20a43b7a0a982c97d32baedbdc..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 583
zcmZ?wbhEHb6k!lyI2OkM1Z?aaB4QG<@+z{5no6oVs#?bC+9n!$78-h%dPY_{CQioY
z_ExSj9)ZceL2;hJ={_+vesT3d@#W$11rZ5_At|j<DV6bQrHL73QCVGyIW6%8lM)Lj
zq!o6=7tKsBo0eDBmQp@HwPJo|&7%C8>DhHl^Xlei*DuYfUs}|-prC1aVaux0mc@mw
ztBP9J6t}G@X<u8~v8KFhLv`1Ritde7-D@j)HdXa*uI}61G;v-1r0w;Sb~H}f+CFJz
z)8x$!lXo^v+0{ICd-Js2t<!e2Putu!V^8~x-R(2?_08PSF?)Z{>>ZtR4osZ0scY_`
z?s<p$<{g-@`1qtHC#NnsJbCHK>C28zUwL}w>hrVLURb#P-14oLS8ltsVb8U#hwlBS
z3sC$g>RgnXSdyBeP@Y+mp%9Xhs^ISF8^ECWlZBCsp`Jkp6h)xOVqouXsBda+X>Duo
z=<MolX6R{SQtaz*mW<(_(!}E%>&ZM*B04HqWD1vEWPqD0d%tL?yQQ@)Uk|IdlcBMV
zhZbkIxQmgVse_llKo^^twx*7`qfekcM+c*@in5x%g|lB!xO4}%jJ%SDfu*y5NJJP*
iJFlRSn3Sx7x~_?pt6Vz+12Y%TjFk%xHZ?LZSOWk|1SH4+

diff --git a/images/player_end2.gif b/images/player_end2.gif
deleted file mode 100644
index ca90804acc832ae6f2e6bcc66044ad4798b52d04..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1145
zcmZ?wbhEHblw(j}_<oK72!x~bh2yk@!z_iP%!FfXgwmviGsT4pMMO&kMN6cF>ZOEh
z<fZBagi;lRvK580wS{suh0;xgvQ30Ctb|IHgbKBVD-?y|9fT5Hg%jL_vVDYdgM`|J
z#2Q3}8l{99Wrf?M#o8r>IweH=BxG6yC3=PBI|ZaCiO5bCmYE_fH3NvmRi_B3O%+g@
zC9E=EN@KpH+CpK?CBhm@M06Gi=qwe`S|O^vR!nb&tUeH}l`~x@XuMj^V2!--CK1z3
z!baQVEVl|-?i97(EupvA&~lZb$wmdc-D-}z#hiAlIqz3<+pFfX(a2}3iN^-3z#XQM
zd(30^nR{$>c03^FbWqIkkfhsD37?}fzNck;&qxNJk`KHf9dJQ5;;L-K4f&Xx@}7q@
z0?%kioly%pXBl=uJK~~V+%^5^+e%Rv3}Y`DB_Fm=xS*MGPciAbN$Pcrj7!GZmyI&-
zndaQL%DZP<bj7^vs!ifUrKG1y>5r7NUa4lkRVjR~QT$4y;I&ENE7Q97rlp@Xs(+|g
zelm|d?HqqPD(SLA@)g&-`%c+cg9}bM6`yskyzEqQ!>RmkNbcd>s$1!`cTy@}I@P^%
zZ}_F(@X@6CpJDqS%Z@+RZTCZ3Uix*uj_dvI*7ez=`@2`~U#E%x94G$qocbkZ=8xF<
zf1;Zn=5#&HYJFHR{cZa67kRVZ7R`BGGVfd3oR4|)zGcq;l0W}**@CY*3xB09{#Ls5
zL(Q_!bxXfjEd5cu{AbyUUxllG)~xwmv-VHzqI(OM++MljOY6E%-5Wno+xBJp&KDc^
zd|7n#{f>iQ)*Sn?{ruN`$3Gmq@a5!<_b;D*`u5@L&;S4bgHqcd1ByRc7#SGOG3bEY
z1<Df)9RC>Ra>{sYSa7hJLs%>3#D>I!ru{NjAy=}yT-v%P>1G!NDk?{8nBnNSLo31{
zjrZ-HT}xL-PHA3Qch%|ShCZFh9Z~yqX1om5N#%W9!{w?q{ajP8%SNkf6M9vB7R0=L
z)at|KXffAKGJA)sz*6sdQD--Wo;i8Iu)#twJ0jJ8fn&2;ZqU-K8G?-wKSG4A1#rmO
z*8TZ2F=bVVX1wbh)y&K65zf*X?@mlKW}mp&W4WK=vxlnuYuog8)mT1$*1OQTQ$e)q
z!J0NPgB4QKHcVRDGt(sLgh17yTanyaF(#%i4b2>rRlO##OuTseCZ|I93zL!!kB;>z
z+ZA<WyjydyM=?t(oa@JeCRQFHK{gSe50Bhuv@uGQNC+}D{c2(lx^Us*gafUt3OC9p
zFsd!M;OxvA5zxpYrSPPIN$$u60iccpj_#bP8xFQgC>>#Jmu=H{)S)sh<58y~8w-Oq
E0IRyRoB#j-

diff --git a/images/player_pause.gif b/images/player_pause.gif
deleted file mode 100644
index 9b88ec5ebd4bb5d5e476614f051c1391c546b891..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 639
zcmZ?wbhEHblxL7<_?E!{1VU+&Lg^AhY0^SzGD6wHLfIlBMS`Nmf}$l-64e3{wE{w^
z3Sv#7;;ka$Z6dO50<!G_l0Cwby~5HHgk>fP%S;xQohqU<Q9xyqfWi!6MIf3btT|Ue
zd%l4BLSfA%!Wzp(v=>VmuM#v^BW$`x$b7xL@djbzO(M2iMQwJ7*z6Lu-yv$ZTf}p}
zw9^h1ucPAL2W32tN_ZTT2s<wod_g+oqD<UP`LJUuktfw6PV2|tQcOK-n10?U>83*V
z4cn|o%Gr<A^B!uJz0$6DZCv%jqU4H0=~c(t$3D#;&D-uqw7vE2eh}OLD!%oDbKiH*
z{?DNk-p9@S5j*=;(VW*M^WK(B|B^EATiX1O6$`%RF8oru>~r0!4=pRcv~K=1Y0Kw%
zyFSl5@OkCI&s&dv-g5f$p|hV)UHyFd>8Ef1|NjSt9mPQLCkrD3Lk5Em$RtplFtFcg
z$Y^SAX>Duo=<Mq5>Fw*EFmcl4mf-lL)b!Sn=`$yIMZ`wWYVldPc!GOiRAgGK+v>Fw
zY`w$6Q(A4dZl7T38W5D+YO(*|1Vbl3|HM|klcy)>*tvKnv}#_yIziRM(K)VF`R@G*
zat788F|D%CUrvzJH8T%wmC*Y1rC(6p*wnXGz~jsB?-`68qDB)$f<7=VJmk({F(KmP
z;tPzt!rFFP2@jdMWLtScK5%@R*uW|6v`0h4!Rgf0em0v01qWRenfNkp)f`c9Y+_U5
a`uaz5%A|{nJtk|#p4zf9*tL;?!5RR=-tI>L

diff --git a/images/player_play.gif b/images/player_play.gif
deleted file mode 100644
index e610d842485044c82a7a04a2732d79f71a97612a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1052
zcmZ?wbhEHbRA5kG_}<R|1j14J!f{%{VV1&CX2LNxLWLqi^-{t$@<OQ!LfMK!+1f(6
znnLL&LfIxl8CF82N<xL&!WD`_@eV?XuEGg!LfJk-xj{nhLPCvFLXEP*ZPH@xl0uyl
zqJ0uFErL=L1ZAcOOU(cxakZ%eD)XgumI`RC5Y=8Qrnf>?f32L}HhzOO@|IhL>~~A(
zEjF}VWoWWd!EC#R-EK9f-D=MJ)!g=~xokA@*=iEF!!&Y_dF(!OkB!bw2gMu@N&23a
z2|gtsa6vZWhJ4@|?XdG!VHdO`F6zZy(~rKb6m`Kc`LKP$1<j=ECaKpgvM(ED-ZRa)
zZ&h@~yzHt?;zOmRr%G9`RI}fz6udSmd}UhtS)=-gdgUkc*wZfYr=yZCJEUH5&%5uG
zayh8rlvD9p=gP}Yg*QUW?}p?a&aJwYUVk^O@}*PVJNJfP`py3g+y7X0{ITwQ9oPNc
ztM{+d#D9(x|9H;-6W#PMr|W4}>%)RsZ;R%9%$xTubN=VDg}+i4e=A-3y<+K);^jZf
zR{Sbl{j+Ax@0zuLY8TyGxa9WAoi8>XeZS-U*L}x79J}%U<<n2!K79T8|NnnbgpC5~
zg@EEu7Dfh!eg++ok)S-m!10qoo>RtS!-9j&9Ku>LCpIi}R!x`js!=&6(BH{>Tqmi7
z`LUBx%@31P4-FTJI5e<XUJCq_FvYHxX=TdINehH6OI~rziY$IG$t?fb8^sWhi+%I@
z8!{3ftM$p**9$#-ar3a(G~MWH2To3A7wnm(yCP&~5o`A>>#{d8RTnR@Ow#n9=fIe(
z#w%x&?LCP@=?TYVwYYs#951uFoaVQy6EU2W!ZFbxhG)A;!=ct*DU-Al5sEugnFQ5C
zjwDQMYGzu(WYi)Nc!;@~(L<iqp<&_SHr5Tv!V?M{nBU!C;&y4cuz;zcQCQAmM?s7N
YEBBe%_PYdPcYS?*V{`U&IROT10BNZ&BLDyZ

diff --git a/images/player_rev.gif b/images/player_rev.gif
deleted file mode 100644
index 4fdabd3115997603c836f4b7a227e3f2a9fe4f03..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1051
zcmZ?wbhEHbRAf+K_}<F^1j14J!f{%{VV1&CX2LNxLWLqi^-{t$@<OQ!LfP6vxtc=h
zCPLXJLK#*<rAk7D+QJozLh%kliLSy4ZbI2ULb*Xg?LtD0QbLWg!fn!G?UF*B5~6()
zGA)8KQ-q~v0Fk)bQ~{OwQX2Cm)fNitEEUjNA*#JrOmBs({#rSMHS$K=<Se%e+3%Ln
zTWn~#%Ftw^g57R4r`>AK`_<g`s<~`5^4V(QvB4^EhiT*<^Vogn9vhvV4vIM*lJq?-
z6MRZO;DT(#4f((`+9BsG!!Bq?T-1xZrXPJ<De8h@@?rag3z|vSO;WF0WM4MQyl0wo
z->T?}dD&H)#D_{rPnEJ>sb;@bDR^yC_{y~Ovqtp~^~z7?k*A%LE;}S&am~B$lzlb0
z;FMGGS?9{jPUUw)au4TL-Ab>$lT!K8sqUS7!!P~je}?UUEIa;KcfOA6{_fTL*J<KE
z$BBPD=l_XrdYIGoG^_Pt!K}AMb3W$H`<6NXbJ@aQsf)jrF8yAy^hfdXpJgk46|VkS
zv*vfr+CR06?k!w$d*#j-8;`!<asKPR;~$RQc>nV0r*9v={`~*{KPak30kuOw@h1x-
z14A!^4#;3oo?zhk!63~k<FR4E!DbF&t(X%V79MU-onljSqB&qjCvUEXmkQhCLo(f8
zUPwMRT*&9v_~*yOpid7@I@EuA(|Kvr18;}oCkG0HiXZT~G_ySIyqtD+(!Ab0nOBF|
z&r6$SZDDxHdHBLy>%2=jHzytS>Yt_0b#Y-ZJMT2@@Kphs7me1)TBo0=Y5er$q=<@I
z)+B{Qww~#FzB6P5g<M?xIW2YwNKQK9F;U*mMqz3|Q)`c;N!AXBM21XdPU)aE9Sa|^
zbg;~m=8d?J=+eP2!LlT5tpjsAHwV+1Wpf`ourTeG4ovvK;MDk_SI4A4fwA%vm+&0*
S?FxH$eSLjnvpf$6gEau@t1)2!

diff --git a/images/player_start.gif b/images/player_start.gif
deleted file mode 100644
index add7c1cd99d223f47fe37d508ee106f50379708e..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1142
zcmZ?wbhEHbRAf+K_<n`~2!x~bh2yk@!z_iP%!FfXgwmviGsT4pMMO&kMN6cF>ZOEh
z<fZBagi;lRvbBYBHHFemgtAS9GOUD3m4phlg)0<=;vIw%U4;|egtC2va)X50g~S>}
zg&L)V8fAssq{Z4Lg*qif`y^yq1SNWf<vRtWCyB^T7M7VJEHwj&#8syVs7)16nkB3<
zUrJ-Xq}oDZ%_YJbOGI=Q2<R*o&{`p?y;e+bg{(dht(7xfCuqD{&R~ta@g@<|O~OXo
z<Se%eS?(0I-z}lH*wAv7p~*%CyWMJzyTzP#t2ysibK9%tveC$AtBJ=3tH2$mk$cQz
z_nCWabap%-=5$cZ@sOn3Q3;=;GQOu}e9uS*pOO!}ARTZ)HsY#m#0~kFoAREAGy>0P
zN1ag%IcFJmK|A82UfebP=-WzB7Yt)B86_XKPq?6&a!)box=HGFi;PRg*_Vwn@0sS@
zx5~R`TXe;|?5a)TL#3prO6iZ3vtFrYzf~!Gu2K9-qu{kk;VaX+_ok(vHL8E8SAH^&
zJnfux*&+FgYu<gQ?5n{Ar<{t<I#*tHs<`1)em5ldaBkJD^x8Wql`oy@-nlpY(r@@^
z()`b`{f}kGAM3XJAuTWcI$y{2es}Bo?9u(*tM{+d#D9(x|9DRQ5;OBh?EF8`O%HRr
zo@TW^ESUZ_efo>MS#OKxye^sdEp5)nym{X;=YPqc|G8|z*PMmFQWt+KUHYMB+2^{Y
z-z%2>C|>@vY{jp_)jw<2{H|I1r*_f3g-dR)T=AuK-KXx2pQmm6GJWTZjeEW<I{JRc
z!7po$ec68g>%QY3j$Qb2^2Ym@Pd|P8@b%~a|NlYhY)}HlpDc_F3}+a0K&}Jj2?maT
z3^O@pJT@#i*vuiU6?0<4!o%&oS8QrDQpFc_^3GJTo|ds_Q~drQ#hOKDrfR#&rGK?5
z6*y%Svo-7L%#JsqtG(G`A8lgg7Mt67G^8;qB;dJj;C!otGd*;!EHh_WEOB{i_f&Q7
zDc5{Nmc8ts(dgTDv1@Bru&QU{79SR~%=4m+u1yP_rfN=7aBTPzF!Mz>zqHDkjt^3%
zT_P&>wZES16i!*BV-s7(p%?l}P}#ZY(UFbC%Z|AO$8Wljz{cFR*twNsXJ+y-?&)H}
zhPg8jIXz%iZ$8%I7~IA)O-n`X+{RCbj~w7OY>@F>lyY){G>6(UL8Zlv2e}msz8EYz
z+{VQ&d@$rk!Ndg*7&w&U)_6R0Ja9zDI6%h0;sGOvpo|2|j1!MtIGNe6em}@0CSV}Q
wBdfu1&r#CF!I70U;m8FxA)kWA7U8T0#x~)+0}tC3+B6<@s7yPN=)hnN0Q<+RumAu6

diff --git a/images/player_stop.gif b/images/player_stop.gif
deleted file mode 100644
index cbceec23c1bcb3c9a2b7568532b9fe77a9e8c0cb..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 650
zcmZ?wbhEHblxL7<_?F551VZT&LTS=M*}|g5f}$l-614(CsS09EqT+2Lvh4zry~5HH
zgk>fQ%S;wlnIxb%Ls)T^u+mIn?fC-g3x(B}h-fSl)?6a2y;#a%jj-W5Vbe82#v6o9
zHi_8m61CkeYQIC&Znud29#NNF>Rw01y^e`{AC&PpCgFKpBJ8|W*ahj3i!z~?WFt?i
zMVvH_zonRd-YD^=V&ZM3q?-!akJWP@spmb^%zdI+@zSv3wQ<#Jv#J*sIoBLYuR7K}
z@Na%++jcji?X7p~2j{jAZoQv;`ag$Gcpo?6bNIBck$uk-X1^+$`?`4gmy{V_(q?~6
zng2Cw{>O@iUrLvJEno4jb=l|U<)7PDf9~G&ani=mQ@4Dcx9ijV{h!w!_`K%e=dFi7
z?>O;k*ZEJ!&wV~~_0!cmpPoGZ^zHxu|DYhF9#H(r!pOjo%Af->5)>y4?9Urgo0?l%
z+uA!iySjUN`}!wLoHV(~D>8P*%vrPI5>t8{0zwuqS-LnhEFyY-x2;>?hK-vx2PO9y
zIQs0`y=RZFe^QUGwd>Ji$B(=FCG@D7IbFDT>5{WYe2;>$<?TCn?^?OU^hj%(JbV7)
zg}HrHkFbjV$4{R>8(D<+@GENn`TOsmPL@yNl7_>b%^bpNE`rIN%FN<69&<b<K4KU3
zlo0HguyFD5etFX=9}Z?TFmq`HuL{X@WZ`#X6myv2xjD_3Q#QsxXhK8d`S}iwW-APu
gE;cfGOxB8xVqj$R3Rvtl*K6yltE(e~S(zBD0Sx*7I{*Lx

diff --git a/images/power.gif b/images/power.gif
deleted file mode 100644
index a998b721bf651c72ac0eba53ac09cb08d5d3d65b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 996
zcmbtT?{8C87=CVhx_d}&^h!>($xynPWuqHj71x$n@hmZeB_(`-Y}Hh9Hz=c8WNT+j
zq*=NmqG4!6`2fsiiG&a(VQN3v#CULlibP3of^llpX{vs)KN6!fx_G?BKf(9Y$vN-y
zyzle8@7X#u7z~Vh;l(YqLQxb#h|A@olxmvh@py8`Nh#|_N$!N$gIh_@=6Fe)0G&c8
z<!RXW6A%ZH%}koHke+nY2WVk@#)^mZeQw@O&p$p<MO7t|bKzXxji3vx9h66&Q2n(W
zlY|N$C5IgF0}P-qs^QJKszxN`!f{iDKL<t%Dz|b0oiNU6!W*dvs**`T#aB_%tU;CF
z0%bDLk7))%!6O6kPun=V4x%17O1Kn(>4(lhDGsAz(xMKZ2HsT3NDhR6bIk3!P!Ent
zrV2{G%g*%H=Gt1!ynOEC{@NImDit)Mj{0*UM%<Ls=&ngeAOz;NNf&1ey)~;%>L$UK
z|CHEn4wOsNRUeVS2S5+d2JiwQ7NFZFpjjvzR+Z_pK~M+mJOOkxqui)bCMgwjWo)fM
zlc4s)DnT9~E#%&2p<!5(>4Ng6`D1!Zx0|(5TkS#<K?96^KaXR149%}P^7$2epq!fp
zXTyTjA#At<jT4Z>Zh)q(fZO(E!#r-Y-_vyF4X6xh3vL~XUG6X7TVOp<fl>r~upNYX
zTZ4uHw!>LZL&hMI_%nlFlUN@IpF%i>x>w4W5QpB%#{V``cj{B1@pFftn=!|sd!g-9
z5C;-K4Db$TyNPK)w-;;A#_$2u_d>=Y43G>OamWNj0+l$5qwpO7Pa$i-%s@|qK6@=}
zIPH9qwk`iZ(bodE{sQP{Q0TgCiwmC|aoyDuy{0Qm$G`>?%M+bfkKfTX7Ts@kz^S>C
zH$Qyn?JF;L?s)Z)qV?02H}4xse-zwwe*4M(-r?{XlW#k@F_Y99Z*-MTKR9~k^Czc%
z-aglLqO|V9PT$otYmPts{Pneb=3V|;va$gezkJ7k>iX<AlV0V$@w0<RpIY0ww3zzc
z@z~{&jvt&aeDTk{vkM1TUVlm6?av19Upe|pd+O=Fulp*Wp6cFo<F{qwm!;E6&v3Z-
W{{G8HhR*I@jr{S<jdrI4kNykH>@HXU

diff --git a/images/pydebug b/images/pydebug
deleted file mode 100755
index 30262bfe2..000000000
--- a/images/pydebug
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/csh -f
-unsetenv PYTHONPATH
-unsetenv PYTHONHOME
-set bindir = `dirname $0`
-set pyver=`${bindir}/python -c "import sys;print 'python'+sys.version[0:3],"`
-set libdirdir = `dirname ${bindir}`
-set libdir = ${libdirdir}/lib
-# setenv PYTHONPATH "${libdir}/${pyver}/site-packages/apps:${libdir}/${pyver}/site-packages/vtk"
-# setenv LD_LIBRARY_PATH "${libdir}:${libdir}/${pyver}/site-packages/vtk:/usr/local/lib:/usr/local/X11R6/lib:/usr/lib"
-if "$1" == "" then
-   echo "Usage: pydebug python_file"
-else
-   exec $bindir/python ${libdir}/${pyver}/site-packages/pydebug/pydebug.py $*
-endif
diff --git a/images/pythonenv b/images/pythonenv
deleted file mode 100755
index f19471f01..000000000
--- a/images/pythonenv
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh -f
-echo $0
-echo "This script shows you the environment variables relevant to running python."
-echo PYTHONPATH=${PYTHONPATH:-'not set'}
-echo PYTHONSTARTUP=${PYTHONSTARTUP:-'not set'}
-echo PYTHONHOME=${PYTHONHOME:-'not set'}
-echo LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-'not set'}
-echo "You should not have PYTHONHOME set."
-echo "It is ok to have PYTHONPATH set to a directory of your own scripts."
-echo "It is ok to have PYTHONSTARTUP set to a script you want run"; \
-echo "when starting up Python interactively."
-    
-   
-
diff --git a/images/recycle.gif b/images/recycle.gif
deleted file mode 100644
index 704e054eb0b026feb9d8c4ce27107097db317b82..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1624
zcmWlYe^8V60mdVKQH*aTNW@8BDMCsmzOEAA6Qr+*G);pI4ly!yDf0*CxGv+6W*%da
zD-d%SLPU8Fdzey4-@`P0%hK?ctMNS!;=2^WYl`u0*5$HP@h$V%J0|p<>l|fc&;QTo
z`QvlXo8_9K!ZX>DY{|bQ5|9KGZ4mCTI71dLM1eAZ`z`S~POVXMF^(Cw8yyC^-w1Mm
zu3m@r!QwL!J`a(@B$5XUF&eWf_z;1+?7|$!j8SBOq(c-vN}w7<SB>&9#`um=SP%?D
zn4$zG`f=0&Q63zq03USHF)|*Z`3S`YDPwajF+^bX2t7<-4h0jW4X5;cB5v~Q(Kc9K
z1JIu$!Yn7uiFhy0j1sybgp063gaR3W3X!k|qK7Q<JZO1k8E%27VFFdc!YnJ!acqEM
zLku-aA*Wz*fj3_?AvORV2E-VlJEDun;y?*>eaP}N5wj_nF@}$E#vwiF!WoYpRsnX7
zCWoAcvj(Ei9*?q68Q_=VjL&KBx9c1A@<xEH4kcK9wc6BZ!ip7y3#UETpklyB8LU^%
z&px&XjPeXQ8NEUIDB6EpToTPKX2c3{VGa+N`8nRwX0AP7i?k@%7>iWN=`q^mGaxkz
zyUmVPqfCT`q(EJRV#S!+iaPt9OoBFDG|@o{?S)Vq46*=Hg{XTKMA)u7tW)>HOqhmL
zkor1e9y7qFAZK5%zUnAk524KnejVq+4DB*QIRI-zg?V1@L6{+nIM0(FG9F{uAX9tx
zmx>lhoMj;;&{yjfR>1ghtY0oZ7lZ`Ehd*Hgem=n(>+1|2l#kvfJS6S&7-|ftY85Cj
zeh(Mrgy?O`=N9MVY?u|71aor}5pc#650{@mH+poWhk*70xDfyih|hB%50E}mSQ60d
z5Nd@%CZNJ3<)@ec!zVaM1N0a5_Mj0Dnz0g$@mQE)obERvc?u!M2y=0Il;mS9(eI=@
z&Ul1jf~zfFn2mE04pl1H5GyVShz-*9p_mO}=NO_7k4HH~xhl(vNLKUzFGx~WBx_P&
zt#@FeJ-KtI;e@)1ZhDrbvETaR&0X3-ndJ0jL++P3_k>G^l))oO;0o5LpPK2Kye56C
z>dK8e#q{N|ngEj2dN}R(`9F8azw5lJ*m>r3v`X9aUB%IQ5qogRaqK5DSIMTHTQc|I
z-n)*!PbKgE(-#*k%V@s!@)@$}x9_jp?b@tPs*{nAK5p#{x7RO$uUoycPx2~KLi=+@
z@8@pWCS%|G^Bvpfm(B0DY2C+f{U)vMxcSMyw`wEy;2R0U<oR`1KRA9f%Md+3-7&a5
zW!)}I^(`rKreR>99uMejnFkt9My-GNndH*5rH<B(FTA?9^ef%V=`&4C+uCcYl^?5y
zyMFi5$&)DZAl=<{p!7uco@2=ldwx;ZmTlh@KAag^`oWM^DmiJCWxhJW%vK!SfnE42
z{cpy_&}&`$Kl^ZdPGwb6Nde;0BxnBMWedO6W_RrJa*OI+9=?0+wmh^tWh<|4-%v8j
z%v2P#H$85DY0><EIhehCH=Gf2yVzo*xH<RjvhM3wU%8~8W%vF9e*C3sanD3&VY48c
zJ~-7qXxm)Qj|bOZK&{f9`}8WUYzZD=RcV*bb?j)?KD4f1NLib@u5RDQM{b(7+%E?{
z%%`mrCEJLsX8yCgFLrl#L)ZwvXL`NgdnL2*k-e-!*)uZ&r7sFozsg$KZk|YmgX296
z;;)oIs*YxhO`fJM4av!tk1l6?Gf?n?a?rlG37LQ5>MWioy#bj!U#%`Gj6ZEFY$?h<
zl4-*ay|r`MG|?)JcwS$lnl#+}w5-%wsVV9>eXL8K>^%L(h+=#^0N<Z~=TrKM3IJub
zvteq(D~8uMj=T*|B)J!ozQ3D#XSpE+HoQx8z_}j|Xh*h;NbGm~IV0248O}EKB($T_
zhW@vEVDARl%{J2TKhCa^lJA*&%3Rv<&OK9#cR{L>6c(fh$)_smiI2{SmEX=hm10lb
zlGM-TPfq>w>kR1LIQ!<-!E#JC+`i_;a_^&hx_aXQv@{ACe>}RMuJu|9EZ;Xw@BXXv
zoMS)fZ4Bi<Q6~!TZT!p8)J-@3(+)PRm`V;ENY5^q0KX;+o&$U3*n|1pWFUF%f5Uwc
AVE_OC

diff --git a/images/recycle_20.gif b/images/recycle_20.gif
deleted file mode 100644
index df7fa308a177a5ab0ef2ce62496e2dae5fd9a1d2..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1258
zcmZ?wbhEHb6k!ly_`Z*Uk(+^!kB>=!fnSbIN<xxdm6=hOk<pkzNRL-TUBlSW(AvhD
zC740nMU+2-K`Ng^riRfl+{h-_+&;|CHo`(HOU5b8(l5e3+Fjkn*~QP>-P_AMFw7++
zBp@OzA}%h@Dbc|z#m&FKKcLh<AwD59H8!;<zCJ%ZIVm|KEh9H4r!cRuygawMIHj^U
zqr9xVvZ|!Ex<<Z*LA9MxtDDuJhtH&4&R`<D(Nu1WX?!-b#Vz_(9H+`UPto+AZs4{^
zAz+Dd<Z9#CmXOeC?%@;sqG$QUP701);9fo{Gjp9+;ZFb3<q1tivAt!%U1d>yl~I#x
zVrSMzHx?(&Y6xv;s-4o1Hn}->a$CW)p4zrmH4}~%PdQ%Q+S1b9)z#ZSv1dwm|AdJ%
z`&%YXXq`T%d*1Zk`7`<^Po6S;(#+|zCe55RefFGL^B2yWxo6`1MGF_tp1NTEf<+7G
zEnPQ%?ULD>SI$|yc;UiT3s<jMzJBexjcXTg*s^Zx-gUb+F5bCm<^COOw{70GbMuBB
z+js2QyL;LB30qHZIJ9fi!F~Hr9N2Q=@ZS5!7F<2K?dI8ScQ5Tf`e@suGpnAQ-}wH*
z()SlvJioN%`PCgSE^c~peebKA`#xS<|MAxL&v*C!ySMT1;ln479yxR7^ogs7&R;x#
z;ryA47ti0jarWw!%hzvQyL<cg)fZ=;-8%H<&f!=0PCvMJ=kdLVkKW#W_2~Nh=l9>g
zxcl|dp<mBVetdTM`>TteU*Gxp>guy+PhUKH_U7&D*S{WreEsC(yEmUdzWeg!^Y@?M
z|Ni^SF!X`qPZqEzbU-91PcU%&V-Q~9(AadOiC5Yz=f;G@#wG@hfCUbXOs(tV_ElW`
z_;mGj{rJ6kPj7xW24w6D`0?mzcer@mzM7w(o}Lb0VA^oz%d4y0K*f8wj%GAJ>t7zX
zw<eS6N%m20uwvKe=hlTreVMcI>1pxhdb>+jRy+hMHknsr7}Uh<p>Djj=H@FQXI3tz
ztgxQW=4@{PS&Otc8N$s#ySAA<yqJ*2I4P*9?!k^gW~Yh%25-JRD%B2MA(X(7k~l?$
zomXC<tK-v>HZINBoe@uq7}E|+(zQwvk#u=-S=~2pM^4t%?yGCW;u%6-Y;5M$44h-e
z%5|lcRWWR1z{N>S$Cx?TokLD^K3bv~py(iT<cp)|X%St`gs>}y9}aM+`^`3aI<dg9
zPqATnKw#BT79P&{cs>IGp^TOeHjyW1BEPJfv?SbT*MbjEUwbz)3-U-fJV^6f6uex;
z^w1L*Uat5(H8(d2G&OM=6$#B<rIHz>>>co6!H27-udkDs&GGT!;trvu9+RaMtFEK~
z74NON`)bkY&@~ZT680V{IVrTnC-?<e@mju=LoEl3j(Sbi3Ycdyvq=%G`0FBO_C9H=
Uf(Hi*nbXfoS!KDgvM^W!0M1YomH+?%

diff --git a/images/recycle_file.gif b/images/recycle_file.gif
deleted file mode 100644
index f1ab50cd08887ed354bad17c69af9f5ea4ba0647..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1301
zcmd7Q|3A}t00;0lSvJe#X2WKYa~$hjnbci(y6a)fmrgy7Zsl>^!;y!i<<T)*orVu>
zY_nt;A$6L?ta(&3-&wXshM1Y}myAOXDc_w3%6+=O;-0U^<Ms3FhnN3xzr$ajBZA%F
zAMlYC7g)&vHWPtu0N5sg698O3z^Q<>7+4#C&97*iOoYuKuz3k=FI(6qqHM2O*meWk
zQGlnT@p6E#0{AIFFakRjAoc*_6xiJh?8C7324HUl4o2Yk%*;84;M{_7ZA81aqTSMo
zZah0Tjitw3M{<dax6<WElM5xzn-cF$VSPhQJVL$VeT?zl38rrlD<Bv;5nN6UesuJw
z-tTFgleDKtPd8Iet9;I=eL{G_Ay1D*L`2YwBQNpK$H1~L#*T%?#l<lgjCet8LPA2~
zpTAwrO-K@6Nou;xWHOUgaVfB7Mv967!IX)RCLS7!O-oBlhuunLu~=+2I|EjDJu@?t
z11n{7G)yj+%Y#W-ycQ@6me1w$`8Qw{+#8zoY}hE2y^)fWlOqraa$9*qp)e1&l5rbW
zE-Zvq<rQjg78MoUxpSukR(Dq<5|x&gipAoxva)iRq+BAAR8&+*rBazpR#{p3<jE7c
zTwYyWU0YjQS65eGU*FKs(Ae16)K%Zy+^ke8TVP#El}e>nt2G);TU%Rud%ISv?da&}
z?Ck7<={sM*db+w{y6&Ey9-U6t3p42SdVODCpTS`0@9!TN7#JKJ93CDX85tQJ9eoKi
z{xvo>_UhHE@$qq^(Ks<NF*!LoH8nLoJv}os^ZNDc+1c4QZ{Ez!&Aom5_T9U8^Yily
z3k!>ji%UyO%gf6vD=Vw3t7~g(>+9<q8ylOOn_F93@87@w@ZrPu_V$0L`@hteABPa(
z0OU3>LD~mAF7dgpD{)$im)w6wVIvU{xupB8Spd5~VVx{;^^eAcY9v;}?H77V4lP?h
z#NGCF#zoPhmp<!YJ&4(dV?PM)D&)}5Hlg;-ctsZ1kclW-G(~jJ%xcNYO^fk&xk$M-
zExS||)t#+Z(~#M?Pt)Z)lgs#o$6>K)4!_jQ;oSpM_~Pmz;qap~_r5~y^bH*HJXd?h
zr$`<Wb^N?yu7zV1vH5manTSaQWGHCUL3;wHd0YrhI7kaGVcG5vtvyv}VyPf_o(@E^
z#(SyHEy>A#FQl#Xnvt;DgHC%Pk&WzBmb^IHbT&_VP;qEtR>480V+#(S-1yd6O%ag9
zL3T3_Lj%l_SU*XCPX@&llOSWE*shK0FvZ^(V18L4@W*%*kWEnDTS3|j^x@wvEX~I;
z=A;6`1a*fyN5mtAt>c9Ul*|aK-7V6Ac85cx$EJIwkebbDbD?&5$ocwK(@&0(p&brI
zYAyd~8w%g^*_J5KoleL}Gk1(>F(rv2<?K{wm)9<G^W1?z7aGl}75Age<OjEcXrrT}
z+H4Z-Q9on~xmSuKc%%!WKiH{BD<brM1z70#LL$^_U5mmQsLyYb^YaLtE!FcqiS3Ji
h#AuEYfj=AWiLmxxzs+$<7t&`dBo?={z9uHX`(OD-M}7bR

diff --git a/images/recycle_menu.gif b/images/recycle_menu.gif
deleted file mode 100644
index 74489e27d5e1ac59f58e4067b33767eb6d57ed0b..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 2026
zcmdthi6heu1Hkd$mYEPC=58iO#Ovy5o_Na5^f=0~Io`~DHx#*Qm^mND7@Ng5Aw(v3
zdYWUIA|%oCXfi$H9hIR{z3F-0f8+iB4WF-zn={fHdkS<6SO<U~KYkb*7$(HWhlPfP
zTn;&FbJiW@?&#oXX>KkjC-?o}`{ys8_x5%h8<|`0HoJN{84Zk*;^N_l4;q;D1N{SJ
za@EN1kFqnf8d%I4TIJyVu9>-+v9YnF#H6&;w0j+$yW;nPcdtp0Q+gkE)>KkAgu<z*
zr{ecpR9X#J(3dpmX;CfXb=#7+Fe()OrLL-0N2k@$OPV?xHovY_O=M!)Ep6&xdIcb4
z`H4yHIJ=|K!`-E=t!-^>Ev+(UVPmR!hUCnGn^i!~O64@OK!LOivolla8CToyw!Gml
z^OvS$B7)0F6#xKOTU&!<0AgEKOeYFSkuxfRxYw91yqL%5X21UPPx0nkkzg@BC0a{M
z%h%U8IXPKgTk={<$Z-PD`?SlS<k47HolhWqSYO%QT+U1hFRdymD=RaK)pV#jNgb|8
zOG_9X>J|&<Mf|5<L<>17p%W7mXg{<)_Pl&F;7f5?<F9)AIaZMJ9}RQ}{}^SqF+nCE
z^(#`rm6vQTo$)}Q>$qLV<MeDTk8LbI{3saBO%4WG0uq;iV1kd=HBV2RC(KdV7LB~$
zN`L+Qk?7U$>k9)=y+h{y7K)}v7AE@yQ*FDi9~P$u*!x+d!kr_MFW1-nwlUW&etD0a
z70hDR4|cb{n(kYF-m~(U@nN><x714?HeP=fPKy8N|1fvwfX}AkeSLhqsO1DoX*QXN
zBj+Vl;Umk^0|H$P7MEWB<K+LcQ~$^S{`UePbpU_>DR>A607GOAeXcWrTnR~CX}T+$
z#Vt{_j9uoj8z;))4#k>o-M1&n%1}cz18;W-VvIG#ZF2yLS~G9a^5p%NnTGRupL`}6
z5d;<zi>f`(9L;U9Z$#Zf^|mcAjn&Wtc%3}r5l>|tgTcrvp!?6a1wZXGL9veff|t?>
zl_*j)`_&Ohvgcs>@rPuzvwZc6k4Rs<gC1cyL15A-#{sWxi0o*sWgP#%YzJ92%jg$f
zY=NYlER00qF~eLSIW`$yTO)Z<TSc})AG+?h@Wj&IsLf}{tp0@+Iyvpsn>8p2ZcLEX
zxj4>KI3c5obY+DfR?7L6e&BWWdALEjevtl-t7nBfj=X73X;D2MMzewq?$slqKb$|o
z7h0C_s<t2}X>V6^i1G_xHb~YlC<&~B31M>604uiB6<boj5-iwit_}|#ge_!=K+$t5
zoNL|+MLR_jx;a+hH(Z$265y<WEyNNOG)wKA<s!VIAFD;<7xQm^o~|J(1tE3n3c=Ri
zfiMr`YN(TK;RfNR87*2tUI3bHq6XpEkQ<vxbB@`;j+jWLq{LMO9g}Dc%?x*brBSlh
zahOBCaj{A=L#-YbNXrG|n<&6uYh;40*-!)>Ln&;!b-L{%K|Ssb=drSHEh$ez(Q%fj
zff)w5XdWboERAbMpfK7U;HC;~*#1XOC@e4+Jx9)?s0lNby{W{KppDdKRz>r~7{xDg
zV?`xp9D27o?`z8z?&9+r?asWmTAk}#r{f~i!zrq*-TyH;d9-id4^!6_m71Zfb!{mL
zs@;?f&w(6!%bd{04Ua(Yc7waH1?$D>oqeXUlVd%?8-tzk6QSXVK9QEXNjwnwz$w8{
zq~5?)Jp^yQ#Y5S2j~nJjS!N?<*h1V#ZUN#7n3{D3cY5=zFEo2!42A30J{dWbBV{3_
zlh48ObPBD8-*ziJI&(Q)<L6~A*XCG%A}zK%rgKVAsnL|VwR7^F%CKLFAVWNo#Xd+3
z{e=IkP7k4Re0q|<Wl^;b=@1faTl}r$ub#8+S#|bw!h@qvuy_bXv*(4n<+5P669ujo
zhhHizU1(Pts-8Fv{2Uk1Tx+v$;6)b|EwZzA>f=(@OrcSNyZtTZ*Kxg2`3K86C1>x{
z<309z_g<{PklS$ip_Xm(w|{@}F~WMdA*%OR3-rRyC%ft7U&wXSrv(YWso*K=?P7zm
zmxP;I{F?8>Jeo3<mwJ!=t^<+yl`Q6wO@!Q-fU8etCx<w!{k?x3fw$L;skrvQ*za$U
z9Q;_r{rVQusHk4D;<kOEoy(ws<CN&lXU<|JFFsePe1l*h$^K<N8>}?Xkjgg<QL?IU
zUH?gxuAoSERp8GO@m;!%S5rQ810nIunB6mev@XdYm5qe0c~KF*eWqpJwl;x?o3Xpv
z#?MAD*xLfwgX?VFo#vm;m%NaT;VB*sf<qHkxrVM-DTgXSK{FC!q~GPj+*n5h*M{mE
zU=*vn#z6{5J0t!!5qU>&l!}HJqUWUjt?-rS#C1+SMNj-H$1A0A9Ag`h40_yN<vs&q
wV(%^EGcc-Rh-)${C{jb4GC-`OTvZZv%DgZXtP~B%CfZHK23VSaK)~6*0qG()H2?qr

diff --git a/images/regrid.gif b/images/regrid.gif
deleted file mode 100644
index 9777ff3cbba3a9761b0ea02bed4a9ab332ba79b4..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1098
zcmbW0|4-C)9LJw`_wMe^m&u3xFl-aOtSvo~;|dN+vNwhvOqM^ul9IO1El#=wLLhMC
zVh0Gah8v|a+=wrRq7~aUt|1NjL?n`0+(%Fvfr+EAlDi`?j<25%{SSKm^7`TRe7?Rs
zwwEboUwoWd3oRC~9tMNKWHP0trR6G$&%`y<qv0Id+Hm1(^mLug`zGL8Uts^0q@RYe
zZr!XN9<FxXK05YW@$Jd8f8D#<^t;sgN5S7S6JGb^kk{*-JvTi!H#h&w!};-t3uk;w
zv(roS%gg^PuliP3msVw2HoEP+OsL2*Nv#q_fCuPtjqN4-OfZot6O|(x813K*D06Dk
zC3_r4c*ba>w#E}TBlEII#nD>gb3-G8iExp`Sd{S~6f7eV#<Is&WNuO^2jD=403!#U
zf|ddTC<g=sKIgj!B@5xW8_pb$WRW>eWOM?~NJwgM49xZ5hgkOvFUwREGc=<{yN@Rt
z%2;N)OBojno(H9l9%rOmQp+3@nLx1n@iBg&i5Mtz>xj(!`=Jxq4yXa<Bt-J3_36+{
zZl0611YSS}{JN)oV0zH?#%M%`$uklxKZTVkRCI&rz}RUY_3GV%FqN>oJB-&+%21b&
z_+59JaH0Z)_$5ebBFYMUM?m~VC%n>t%;`kGR|)S)$OIyT;B+KN?l4Zq4pq3%S)rFS
zJSRAjAPt0ZIt6h7u~^!UU)|XH2=SC!(d~v#gHn~BARZA+Ogf<Q6hpQ^EWp1dm@dFp
zD_9Y17I?I>Y!Ss{oi^*TP#D4$fm+*2^U8&6a9RW(R=^7602V;|hV3~b=)vWKzO&Po
z3r`kgD?~kFO5n9Z&ViVNDJ#0o(B*;^!ls8%0<Hk`$jdf8@$1E6Kl1+_&#ybze*tt2
z3c5Mj(b88*gFexg=ePEs2-z5wL%-VgT6w4<b>u$x+1?t}i+TK_;hC!+ho{t!AA8~a
z{?n2AEm@bMFQit+1dL@}?z9~EXk(DCc8B3UZPo6telQry%7Zp1X{+8m`e$-`cuc}w
zC@&4SrXJd0`@AnvNh9{q4+HDZCxdVq(Lv_#tJ<F&yYpqh!KjIVE!(satifSN=?AYr
zn=eFFs_Qh_4ULnP`+6e+HaY5A3Y9+wy((NDKgCo-&9NH>*`A^&ZYIq}HS`Kz^(Zey
zUOCj#_5Mj^exxhcU>-!g!LxF4#cb}cX%30MQMmZtwfdCI=EmOc{#&i4=p)$^Z}lux
x-F^Fu!yVr){jBIRqz`tyvSzzysJz}D-FSR$arIM^vLG<@;wk&}ojiwK{{cdui?sj%

diff --git a/images/remove.gif b/images/remove.gif
deleted file mode 100644
index 12e81f86fd2dcc29472549b0e56c7fd07c2cebda..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1457
zcmd5*U2NM_7`<-NIH6sV`nug1G^A;>rjs4Vt3r}hVN04giEH0G_Q0x+2S6L+r<yc`
zST&)^tf-GXAz)0vG!4ogn8wzDG=9V=5>x^tgb*#8pxYB$`JFTbXlnUBI|3CD&m8~U
zt9#D*zH_hTTEoZ}?v0Z;`DGgc$bvA`>azmA5gHzmqN6S3;b80Dj<yNEaG)bJ(IuN>
zW83c<9o=>ReS035@(Gjc<!5~4T&wVIXZyLeo$qW|_wtbbmcDVaQh16D&)C&7_l%vt
z$^TwQ;QjP%=e5LaQTxa!e8`69w%0$|dFSUl?9cAr@x|_)pFX(jgZ+Ew4o)mQw0q%^
z@r#e`yEMM%(!TM_`yZWm4lGSRe*MXV&mKPV^h-ydpFVMH`qg8vojQ5?%&FIBzM4Ap
z&5MWUPaMDe!gJTB4qZ8V<jV9b-_D%8`o`Ix-+uGL!iDc=XRm!cw><yl;>E?4#jlsH
zF8%b~kH7wK?Z)pnR#sMU&;J|r{N@(~vW+O12)_vtmmQq{CoJ2>``-WrAuh>)X50ge
zD7S3}1MoP77tghan`qB*3zpz^pa4|Ci|71pK^LMJ^m__$Vgpq2Ld)O?Yk0WhW??fl
z!DQffs{_rerBZoIsaV`}>)HU>%uTep0ZpE3*~-hAv<esitdm3#Sj!HWu|iXzru0lk
zNu`p#aY{Xjw*}9yYGqx|q!lHZOjDUkKA`{`fet+Dhwh42;&pASt|}X9T&0wTT7*0|
z2{C!NqfynhG8|A{)zkfTn53vIIie703WC;_w3<Ppo>p+E#4y5Ne50&pbPbtm6<)Qt
z+~i>c1`h{ae$W-5CXtBIZt#b>fdPRpshXM|)X>Z;eSJMKy6kuz{;V*NQdC{_6uk+z
zLkJQ;1?V6)ioh%VH7tMuI$?{e5*7!N$&{PD!Wl0EO(pbuM}YnWxEZhY_2N)=Ru^ci
zY)s0Vuymp?5g!IcKc&&27r?Z#t?O^;l4(D$#Cv+WVzi936!HsM1`P~@-7d6AP*IU(
z8iT`$DM$xM&Nf`$U<DFDK{q@I*ggn+LLLMd(X<RJM?{c-qT9WICV6I<1{?vr48txE
zF%*?NM5Cbqx)G4TM1_?nJ~1TX4FEUNW-|c_Sxx{DcpRK$J-gQcbfB=aYa;NcM8c8C
qnk+*PuDbz%Mg?b#S&ZQyhIF$5_D6O#+*Mfq$4@Rfe_win4E+HhTRboT

diff --git a/images/save.gif b/images/save.gif
deleted file mode 100644
index bfbcf8058684519e1c10c716bff4ec5d9d50f9aa..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1259
zcmZwGYfMvT7zgmD^c0S>$UzEHMx+=-L`GR<SmTDLwF;Oj3<pYaa>}Jpa7b&J<C0A-
zs{;jTDTN-cEs%jOrkJX1DhH~dVGF@;vJab1>~0yhkqiXn?6~aXe&60VdH;F-Px2<E
z91`zQ3P1o$@P?nCpTEC9pU)2q3ro=JaU7RQrLyMc!$#v#lj%dV`52?UJtHIII7yx$
z$*im_g+h^?oqejOhad<gqpvSFH#d)AwHDZHDwV3Ru&}78NX-}=EG{lCDJdys3=Nf)
zm1#5@tyX)MarLTBrz<ZnuV7reR#{nDRaI4Ow|{Z{dTniOU0q#0<HilWUf<Br(Ae16
z)YR0>xPANb<;yKCEe3<Z$hdpAwYAk`GPSj}na$>ojt+~(LXu=>XJ=PeS9f=JZ*Om3
zU!T=#wb^X_{rv+214BbY!^6W@u3WL(?IR;2H*em&b?esX=;+wk7)4PN6BACS^WMFC
zGcz-Db8{}2YiVie)vH%4D=Vw3t7~g(uV24jUtgzb+G|fS@O%J8?U$DSd?Dn}Je)$s
zNL?ZT4{#K`K_b>CCPpJXoH;?H1HlXc7yu5i?1&H=fQJE!gB#iG2A}~bfbeAzE(Dav
zq(axMBk9Egm!?sQg0%#YY5?(Jg7cR{J;EHC7eJwCnwYge5dwJtr)xkw!X*CGb3aMC
zfFLxEQz<<kcu5Oz#K#*)3CFC*aFWIXC?>%R^Bh^E5t_oOG{^n@22UVHqc{aS^8l&=
zH~{FDWl^mVe0Y%Nfc*vt#DIk`voHQP=?Vs&ix5Tw^J+exRpEhX8ma(@e;aZ^!b@el
zKLxq~C@uVb4!+hxm>O^$$P2(Npy>jN`s_lrdX+f)7N!GKJy2CzLlgog0IrG!YNiiB
z0e~R0dIv%X0m}ze`Hox%ZY9t{pd^4C1CI>o9H7p@{28dwLUai*yQ|IvA|Eg%pt>h=
zA;+Btv=S($*Z+%obN%Ko0J;t=G&I@XH&MrV6m{}pSvbT($l?58&9;NFTLlW!3C*(u
zu_w7Pzm59)rZfu<^z7YzeZbkeMG}Iu=Py~z+m&N-H7@7J@5}tfG4!&3I(S#jkD=3s
zesfHgD!N{x=aM1Pa`H3^-?#1Bof0E|*Tp4+vUV7ev@aX8hXp}KdxMHicE;`V>JS}l
z{5dN9uV^HEO8#+tOll?q&l_4F93kTjl}f9Az;8j+gyJd2h~(yAaqj(O|6wdBJvi^Z
z8Y#Pyh4DqQuUK&ecg_8@|G^KteS>1{D=Sk?lke-IDBsBB($J|h$r*NRqJ%H{ZvM%g
z?LIHrQO-xt65LMC#-Q|#cTQQQx9cW1aqBmnC8oHWUjF(vu5s3H7%mEqbD|5A2?4>f
z$K$M0t}9ZChF25go4s|jk{X^Ro)eN&=AOJD_*{9O#U}_GC-9^5N_~j!>Ey+@O69mV
zKrWCV{@*;zy_miD_}QiD-w9yL{us@>m>Ho4U%6;+1pCm|IUCP=lo)JFJ+yOB?`?WA
z*q-HbpE_F1pPg)1HmA5`EOD8NRDAO7EO-KUs=BnXvhx<j=I2J(gf#9r9&3q6vsP?V
Iq~oyfA0Ec*3;+NC

diff --git a/images/save_20.gif b/images/save_20.gif
deleted file mode 100644
index b900e48013de1c544c8228d51b88c3a8ee6d6d66..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 585
zcmZ?wbhEHb6k!ly_-f0*$;l}rBcrOSs;Q}IY-D6>Yikw}VHFeO;^N}v<rNYV5(5Ou
z$w^5`8EI*`xw*wfMb*{S^$iUzEiD}_EnQt*Q>ILrF>TtM*|VoFSu$_=@`ZEfE(C)0
z>sKsazIyfQjqBHM-n@DHjvYI<Z{M?L&+=2JR-HSy?!tu)=gw`te0le^Yln^;IeGHr
z;oG-Q-MxF|(xqFsZk>Jn_{!tQx1Kz?_wL=(r%xZhef#Fko2Q>YzyAFB<<FmAzJC4w
z^XK2ce;L3)@h1zI)&Y?qKQXZVb7)+AF~dZx-=%>e(nPA)4aoaau{Uo0_Sp$iE8^B)
zfBm&;uUr51$6xchFS-^pd@b86*MB|xOT(VH_16?)-7MxXUU>{u61VvH>$_5mdfBEI
zKjm2{2h?X;wI`Z+%gHY>n_{`s8Nf=yvl$FkI&_Z)pDfUvt9a%0SE;>6pFTbqkh6TA
z*EZ*ie2e7t8CS;WO!7`mGX5B(7nN<Q@Rez*SBof<lbYbEGgq{4#`XFqwa#d<;PcGA
z(qUm|D9OU)<x<7qoG(=QwqibuOJk#fTc`W_=QljEly7jeZ7~q(bzz=T@dfAyf2W<j
zVS*gbCT27U^#eVY{iSNMXZqWRGh9Tt0uEgS`!oOgW)-Cg?o2BfRONtS0QN*&{-O;c
zVhfHe1_wnPkos6P{a^~vP*5;|l<bY`zrI_6ZGzkeh?4ae!6Cw5{1W7AP!KjTSOWlo
CT`XAu

diff --git a/images/save_file.gif b/images/save_file.gif
deleted file mode 100644
index dde653798fa9c4a739a960b7b29a80b2a92cdb90..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1335
zcmd^+{ZATq0EbWg)`4TInwzzPt02xd)~dNU<A?!jeMP4ZN#h8sl#-1Q(<(7*cX6mE
zMtf083AXgYEZ)!>=9nSIG{jkkwvtPuj`WA-vQ>96uDLYMuxb{vj{P0`eEx#xdFmVM
zsy^z@f%wo71c77#4@Ge&2^45;T`ka&dMn|3%7|SI(rUsNDbO1P9k^<NMeH>AKt(^N
zrEWH`6abZaSVxE_qOclQxhP=tp-T+z4I&;|tVThXgMG!S3?{SHfrnUdRuhQOaE)Fv
zPEmU?sh0&cX3&HoYRqIbA#R#n3@RtW5+{R(VmvL0+6lNqDHx&%7ePIxB)uf>24z}_
zBTkyxV?d=@GC_l$Am}3Ff5&;P7^202A%GYOVkHXHgk~|WbVUOzVb~bJmT1JonkPt5
zVG4)BQZ<I&KoJ+~@c7_56EOHx_At_cB02}LMDhB=K<&`Xv(gEQ*=59bTA;--PXP1;
zfR>28j_La`)JlK~y)uE&iVLh%^jQ>Cn|%vDfsv#l6l{$;oDQYV40OK0GgjiFrA-=%
z!J+rkzLjwFRovWbCOxEMAt=^jcqA5l7>pgpqT5mSCF`5_1y_Qqg*YhDz+DdXA&Yn!
zqzmU6e5jL@*co)3q<^KQ9v0MLaDPxb&+-N+pe5t~#NnO*;$;N`L398`+^p1|2p0w&
z4p3<lCtMi=po>r$%;0PyK@`-Wpb1CZ4B})!Pr^w8Btd~f|G!E8L(WMY;=F;9ID!WM
zw0w9fvqba6zoUTO{S9_C-130Ybbb>QBQMu;G0xh9t+JU=C3egarC?X4<yv>}^hWxs
z{r#$zmccyvrXo*vqS4|X9p;a`rJd{eYj@3&`w3biKEJo^s%qyP-(^HXUVTfhxx!X}
zCs&*6G>7T?nKt=|n!Z0M7S0NtlJ@1knPzV8m_oD6T-hve+FsilQoL*5HS~$!@n5Z%
zTicYqae3c{b;_xinp}Tgmfdbv?(y!(Lal_j`9M(4a5#DVVUWZ{s@EUXO_mGOLh{zr
z9cK<N?&vO9@tTU*etFVRTK+GS`N&u8qn{p~B0{ronhHiPT+cdJGPl+Ha_jeVN$TtN
zpCXUZGd1&6vF+&b+=t(ikE~x8VwtO`Tf7`eJ|6RYuAQkp=BC*F*Pe3^gv+Z#=W!jD
zF-LYcVxO5F$#POGiWjH8FWMhF`F0o@7V*E>jbzQ3p-VZ%ysN34DZ&B?xS=O6if(8B
zc%no~w(nwJHVi3>4H`vbQEkp2!`U;u)l|Neu&-l<O&#6EGDC8GfxC>8pWhii*eDcw
zuZbp8rlE+iQ)C&=IxZKbrgp+*>)*6_ZA1E5|4vva%eo&H^j)m5W+ZtB$M2>P+xt_~
zRqf-PlPazywf55BNcv5`2$bHG2{KCIlFhWW_m;J{EYQ>)X7h*aq*7Dk&sq1{WYEX|
E0>q>0Z2$lO

diff --git a/images/save_menu.gif b/images/save_menu.gif
deleted file mode 100644
index 17fb7640b899e88a1d60d86264d970566c4b2ce5..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1299
zcmdUs3s;f{06@R*1LP$N^l2|J40ADcJyxWaJGjUPmUA1PSz{}~)K>14!)0k_Oc2qU
zBH|<QMPm4vsc4F(#jGQu=}<YAI$x1Z(wxiG+-9x)jNN~5?+pwN@b-!H1Yw{H0N%cR
z%jfgg*Vk9q)?O_yOC*vam8!nKe|~<R$K$oOv=rp!m1woTJYGXx-OS8PdwcuL(h@Tw
zqD8M)7Z=YhE%l6#H#Rm}t=8)5>U@pHVzu5M8}sq;`7S=*`r^gyp`km&!?v|G003%g
zYAUr_nM88SY@S$H2oDdZQmICxF+M(?!{KCPWGI!&A4H<+{{GGf4;TzaW@aXZLP<(W
zGMP-HPoHXgdMty3Y&Ki1R>ww1_gSr2ESAgV@&tmoxVX{LQ9nPwcRK$sJl}Nz{(AwS
z--rMZLO@aoiLwLHX_*j^t;kX4=H(Y$FH{v3mz0*NZ<K2)Zsv$nQ!!W^-k#tflOj^J
zE&A5BJMA5J+p46FAQ6uvk;uB#w0r%(ng<4l3=#+fZh-AcPR>r`jl)vQ%(LHTZ7o%g
z9pEjH^HwCqr0aXW_SfJ442kdC(Gp(+e3pzrV#zRRj)2sD^X{PDGW>nqXRueT+?ye|
zmqpt};M9xQScLmWMa1)R0|*n`i5W^Yf~^_|5T5WCd@hAG!V{e7JqoitD7$%aivJ6K
z6P^dZCJF!yey4YL0SJ;sB+4F20+TuPNh#|hse;R_`tr(~{XBjB1#3zg#5_7BbEO&4
zR4}MX9%&JDl`^<I<W}r^R~@JkVV!1IR49-|-&%3JRm`?$kV6*AZp0kxE%itXJ*zAi
z^ipwI-88s?57fAZ{5*c$IyPzf9@ZPm(bv#8=<%$7r_s*Qh0;_%o!cer-bk;Ku@SKu
zC7YTX<=0fnnm>R<zI*A%x_+lIFp4<2LG+|?@yxe@j%6?1UQL?<T|()a9o*#cY3H%a
zCo48iCL50I>~kbzMEMR#ds+dSwQ0(qh-;!DeH2lK6dc}HQjpC0>CrYkvB9t<n%Qbs
zp3btd#m9&?p(P;Z1%?zy7~E$){z1SSU1u^(O|TqRPv>Pv5O5>qk_g`+1j@+REpa&9
zPDB#HCZb44R|p2&_E(!ruBobM*|-DI@?y7fr`VIlR|mYKa+EX^oqa^top*)Yw@Up{
zF$+(qCN4cKFFhQ6hOtX*I0Zqt?o(p49VXJfFw?vHnc_@w_(aNC(-)JMMr@XE?EGw+
z_hVO12O_GazDP@4L7)ElrOmZ7QFSr5wNdAqZ>G*wm~RP!K-3``+c6OYB|^VL@h(|C
zoq--1>6u~MPVX}V=iK%_nGHkRCfvD6`-8hQZ7&nflR65==c3D==|W&UUH_<3Y`vV9
z()xVm_}3i+0lXd>G6}QTDRhyw)ylFGb6wbXy@Eew??Z9+?$~}&s0$34Sd&MN-1ao}
z=EEqG{AF&Xo7$t!GyNeMP37!%hW>;|Sovs!`RD=aG_46!Z?H#6ytjW?;aj^ryw0L7
kLkAyD`5Orr;#Nj1#49ooFaL@;dfw=@`kU|o6$2dn7l1vnT>t<8

diff --git a/images/sin.gif b/images/sin.gif
deleted file mode 100644
index 27f1b4ff1c2ba5d9553c5ddb4b5e8750ffce6692..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 960
zcmbVLYinFp6kRj-WbV-p^b}5Xgg~w!=F!LX(aa<Y?U@Ru%@<=E+bN8CYiANJNkf?l
zO=^a4J3gvXg_hJ0br=lT&<|Cn_Cg6Qo=D0VieN5P3T-VG1EO{wezAiFk9X`paDUu;
zt<74Cz592)*c&@wz`%Vp%YmcRqEae29N%Hjv`-O{7KLo2W|q|+$b-0_iS*g!@L|vi
zoXAG%em@IoBR9Uq+H#nUj8l8W9fsEdwjBbP*Ga{JP9Oxd!cXX}f-6v&nAM|Ihbuy*
zNRkslL;()axAj7wQwbAAg$#bLBkF(?P~qxBo)cq)3qh|K=W4Nv;!09^<2-3pp(_Z9
z6%y~k3<seQi8!J&52#2+v|t&ZYY7Wc22Mez6R@I`afB%vYqR_GGyxN!_vROCm9)~C
z6!b^U-3Tw#?28q5wa2S)rV{#%sMFv?r$Qu$R)Vn}a1KgKx803ZFQk}|!XnuAz>FVg
z+$hgfyNMFMK3oJkfe;{ELOg&04c9QesvxKX3h+UXHy}*V=2jF8^w$t>1_rF@$U7j-
zDBVV9-Ha*DC8f*O-L7pNtvp1jKq`ga*qtJd=bOM?MH0-zARq!ISF3wa3{t=iqW&Qi
z!Lr-d<;7YVr-GuZ2;>3sXhX}J(78XQT^?E)0tEOAxCONP+JRLB7JzPG8nFJe5k@@q
zl2$tcm4?jV&q@4}LAwnxfkGNJL#ecsQ`d6#-&S@#n*g;xn;t)Ajer>l4+mfaV?Y`(
z_U1Z>WrFz^c5rjy5NatX8`8wkB<eQQ7~~jMZ7du>_aTTZawe=vung$vV!_mWygcFu
z|37L|lhgbK(6b28ww<L<zkaKwr8Bu)np--)E+7g!PtINaAkzNYx63czEr0ab_Wb$I
z%<9!sf!*_4f4wj_aC%cqF1hfOv*S$L$jyoOo_%7e_;mW8+wY$|SDssc?5nLezkG5q
zF!1_w{Gsb-fB0n2(P!Ry^YrKovjdmMH}@I$zB{x3!kyH$M=JX+4c;ogGP>c5{g>^Z
k_rALB;&0ba#eWaIOQ%YMOGh{K53POt#~a@~tOfD>zxS#otpET3

diff --git a/images/sinh.gif b/images/sinh.gif
deleted file mode 100644
index 207e16b23986c5b4ba34567441c03684832e50e7..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 977
zcmbVL{cBrg6n<{<F84(PyFa{f5i8A5Y3-a>VNEgu+nb@Tm4eh3cNy1l%bKJYvr&?)
z-PTa=qO~G^pqo=h%~~u)v5dkl(}L_y9bxFiUT~W-EM2Mifob9ob3q5MH}gO6{B$_a
zb3UHKIlH@`ipK^bh~O5QX2BBTf)K)&PVO?tTMrYS;(9(@Q!Anm&VJm=g}0lP;4o+b
zJ^66mi|65(C?1?PhEmF-3e8a4TyWDVrA^^G=^P?e3o-%c@K3EG?gzUMlxzKRblqZt
zP>1W}bswSt1L(`LzTK(@iJ;u^eJ>+wfe~BocdeL080WMY8LGvqf(eH@*RrIs4q1RZ
zSR%<zj581lt|bv2_wu|x(8GRT1!w%4fv5sTLB}*0QK~QmDH^wD_Q*Oxh=b10_13D-
z3pwO)`jg+@0yhAKgD=^$oo<yWLX)pWtp+19<y`My^2IvA7${wsvbU_e0ike*8w5N4
zQ{rtixl$Rgb`Zh6`EV9!2Lb@M32_0%^MHoEqPkTO)Byo_yv+@86|`9xQ!CiLj-UYb
zfZH-k7D#iIg@#sF<AO1VQp>oJ*wGeZ0qUt8Jb`$8xs1tDlh|D*z7nPhd>q5JzwSaP
za5xG+)Zc(0u<VwE8w)9na*Bt`KnWn58vc?BnRzHpxCkkb2=F&>9caBX^cti^paU2K
zj5|t#NIv+CzcvDqf|J8vqxdC<RujAcJ%w6Ch?L_T{;p{LW8^m~1yFN#?BFpY10{lB
zT7n5=ffNvVuGmHl6^eIZ`?u-`P#c6W;izDvsGAU3I9aTjSR6pd0q{JEDvVJmInWol
zu1X%?8NvUuN19p9UjRJ^iMDhtOn>pxh7IldrG=TVChwN`RM*=x7hemv?z?=c_eSN-
zdv})R+jDCd-;$mUJoe-8hflwI|DJRG@4qv1`4mg-o&C7)!nfs($L{*<)#K@&$tNZP
zhuXK9OJ()NFI&Fu8|po+ezICPa^=@uAH<xaws~bvJDuqrnK*jSA3sd^ub#U3`^gS%
zI{Vw)li!>j*mwTWL(4Z4o9fFS4Gyf%FKX%TlU<vSpXmAX@t;3^?}5zK&yQp>uSgOS
F{{nezEhhi~

diff --git a/images/spk2scr.py b/images/spk2scr.py
deleted file mode 100755
index 03fa37519..000000000
--- a/images/spk2scr.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python
-import vcs,os
-
-version = '1.0'
-general_description = """
-    Reads in and converts Ferret (spk) colormap file to vcs colormap
-    If method is set to 'blend':
-      colors will be set using the ferret % value, and blending will be used in between
-      0% in ferret corresponds to index_start
-      100% in ferret corresponds to index_end
-    If method is set to 'contiguous':
-      colors will be set starting at index_start and assigned in order as found in the ferret (spk) file, no blending between colors
-    """
-
-def spk2vcs(file,cname=None,x=None,index_start=16,index_end=239,method='blend',verbose=False):
-    """ %s
-    Usage:
-    cmap, ncolors = spk2vcs(file,cname=None,x=None)
-    Input:
-    file                     : Ferret (spk) colormap file
-    cname                    : VCS output colormap name, if None, uses ferret file name
-    x                        : vcs canvas, if None then a vcs canvas instance will be created
-    index_start              : 0%% of ferret %% index, default is 16
-    index_end                : 100%% of ferret %% index, defalut is 239
-    method                   : 'blend' or 'adjacent', defalut is 'blend'
-    Output:
-    cmap                     : vcs colormap object, with conitguous color set from index_Start if method='contiguous'
-                               or spread from index_start to index_end if method is 'blend'
-    """ 
-
-    f=open(file)
-    ln=f.readlines()
-    # Treat colormap name
-    if cname is None:
-        cname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])
-        if verbose: print 'Colormap name:',cname
-
-    if x is None:
-        x=vcs.init()
-    cmap=x.createcolormap(cname)
-    x.setcolormap(cmap.name)
-    ncolors = 0
-    last_index = index_start
-    if verbose: print 'Method:',method
-    for l in ln:
-        sp=l.split()
-        if len(sp)!=4: # Is it a line with 4 values (p,r,g,b)?
-            continue
-        p,r,g,b=sp
-        try: # Are the 4 values float?
-            p=float(p)
-            r=float(r)
-            g=float(g)
-            b=float(b)
-        except:
-            continue
-        if method == 'contiguous':
-            x.setcolorcell(index_start + ncolors, int(r), int(g), int(b))
-            if verbose: print 'Setting cell %s to: %s, %s, %s' % (index_start + ncolors, int(r), int(g), int(b))
-            cmap=x.getcolormap(cmap.name)
-            ncolors+=1
-        else:
-            index = index_start + int(p*(index_end-index_start)/100.)
-            x.setcolorcell( index, int(r), int(g), int(b))
-            cmap=x.getcolormap(cmap.name)
-            if verbose: print 'Setting cell %s to: %s, %s, %s' % (index, int(r), int(g), int(b))
-            dr = cmap.index[index][0] - cmap.index[last_index][0]
-            dg = cmap.index[index][1] - cmap.index[last_index][1]
-            db = cmap.index[index][2] - cmap.index[last_index][2]
-            for indx in range(last_index+1,index):
-                p = float(indx-last_index)/float(index-last_index)
-                r = cmap.index[last_index][0]+int(p*dr)
-                g = cmap.index[last_index][1]+int(p*dg)
-                b = cmap.index[last_index][2]+int(p*db)
-                x.setcolorcell(indx , r, g, b)
-                if verbose: print '\t Sub-setting cell %s to: %s, %s, %s' % (indx , r, g, b)
-                cmap=x.getcolormap(cmap.name)
-            last_index = index
-    return cmap
-setattr(spk2vcs,'__doc__',spk2vcs.__doc__ %  general_description)
-
-if __name__=='__main__':
-    import optparse
-    op=optparse.OptionParser(usage="%%prog [options]\n%s" % general_description,version="%%prog %s" % version)
-    op.add_option("--file",dest='file',help="Ferret (spk) colormap file to convert, [default: %default]",default="pal1.spk")
-    op.add_option("--name",dest="name",help="Name of the returned vcs colormap, [default: uses ferret (spk) file name]",default='default')
-    op.add_option("--out",dest="out",help="Name of the returned vcs script file, [default: file.scr]",default='default')
-    op.add_option("--index_start",dest="index_start",type='int',help='start index for mapping of ferret colors into vcs colormap, [default: %default]',default=16)
-    op.add_option("--index_end",dest="index_end",type='int',help='end index for mapping of ferret colors into vcs colormap, [default: %default]',default=239)
-    op.add_option("--method",dest="method",help='method for mapping of ferret colors into vcs colormap (blend or contiguous), [default: %default]',default='blend')
-    op.add_option("--blend",dest="blend",action='store_true',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True)
-    op.add_option("--contiguous",dest="blend",action='store_false',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True)
-    op.add_option("--verbose",dest="verbose",action='store_true',help='Enable verbose screen output while converting colorcells, [default: %default]',default=False)
-    
-    op,args = op.parse_args()
-
-    if op.method in [ 'contiguous','blend']:
-        method = op.method
-    else:
-        op.error("options method can ONLY be either blend or contiguous")
-
-    if op.blend is True:
-        method = 'blend'
-    else:
-        method = 'contiguous'
-
-    if op.name == 'default':
-        cname = None
-        
-    cmap  = spk2vcs(op.file,index_start=op.index_start,index_end=op.index_end,method=method,cname=cname,verbose=op.verbose)
-
-    if op.out == 'default':
-        oname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])+'.scr'
-    cmap.script(oname)
-    print 'Done, colormap converted to VCS using "%s" method from index %s to index %s\nStored in file: %s' % (method,op.index_start,op.index_end,oname)
-    
-      
diff --git a/images/splash.gif b/images/splash.gif
deleted file mode 100755
index 3fb4ad9ae6145b376ee7e4bd0340eb2ff2a02675..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 25144
zcmW(*2UJr{)4oYa0TKwkLqJL>Lg-yX6%df#(a=K=RhpOr0g*0ZRM5~A5j7MM>l=!I
zG=l=7A_i27q9CGT3qRjKcX#j1?3_J!?m4?NXP!Oe>}X~lwhk%=egXg=0Kj1U5(uQ0
zpor`~K_!f;>H(yh$v$fo*cAof1*P0k;`n_ccB<Mg*06)t%F<ra+ODGdUTA$+Mg1VO
z%O2yUcF<izGf3GyMDZX&(LPAmJXX;$3X4xrJ5VBV6eSRjl8;9yCnJ?3r1+1bHA8jf
zL*1m)q=8%+M6R+xu94Jv5p<s*s%*blmyuLdko@s*F$xh`8jlFhQt>ZWE>1=lBr4RN
zkZmoMzEvrCr%LqBMbR4<)P}DM4P8^#H9hEPcfi!yPv0x!VC-=NVvS~0ol|0sZg7)z
ze4S=|i+*ymW?GA8?rq1kI>+NzjvllP5Aq1ha*9Yw&Pwn)o}PXpE9TtsAg{Dauk1RP
z+#4>G+mR<O7E#Me>7{jrMPV0=60eyicNk~iH&1W$q7S>Cs60h)IoWb9uJIh>W`F4Q
z2PNk(H(k3_-+Yb9>c8Y${vfOF9=&s-Wb|{#%^SD=yL#*XlN)cg?%(Nna_8oQksIt+
ztcMQ=`kwYb7@U0cdX?+GZ#m<@;$76MyPB`ZWH-hI=lV=O4dLGBAKR`by%>m}c}$(|
zIRCkW`FyN)VYqAgg~7KsM!!}~cYm6GU9<o9&uQsP<j(KV7u?eCUun;mn%0(^Ca3RD
zuH2n|dw1>qoymtolMg2+hlVDHCih-@eR6Vka&~C;;n3{R9yc`m|MKkY(Bjz0*U5qZ
zP5w{yKYF$>I=e7CxiU2Qb9Uj?=)$Yn*_EN$&yy>!M!&roS@<-%{%vG(c42a1VRm-m
z*~0Sd!p7{%p0Kb-D|@yJvr9{__SluZwlcf1FuSobyYN3@VRo-?y<XXvU0;8_^7GZo
z?kmpf_xaD?Cco{z`o6no{b}XX+WPv+_nnQ6g^i7sp9`};S5|fxW_MQ>er_!7ZmfL$
zHNWw5<>y}8UD?>9pL@^7&%JSXV|Qa?cTf1A>VJy=(e9plZ}_>pH}OA<|FL_*9_avk
z^uN6TAb7wozq}KR*+)Z&X@!ii8U~8c`&{arIvO9JmNtx=AL(csuE5xp$U7S-TSg7n
ze6W=%yY#Rc7cpAr+;wH*qFI_&_sT@@HKXV&_NP+000<0I$Ift+2j~sn?Y$J^BUfjx
z2Xw`~8I@>w7%)8dYoBzPxTeL?Nd!vNVFU)@6ACfQG17dE_5C=#_U+b@3)r+Z6=O7o
z1>$$L&f+lp?8=11kmyTH3Jj&`Ig!;cbGvKhDbw!c4?4U;PTOy?F8iIjn@@vRv;9j}
zA2m_1N==C@_@W>7(#z+1kBZs|{f$$V3lpbID?pZtnf}1jAn%YIx#24ce<v<k=RDvY
zf9Bwp?wYgSUzis2=;5EAIA_#tJQF5u5V^2@O=8@o0P)gcH~>CawFuLAo+VbU7@f|w
zfh%w}8L(YXNd<UG6nQaRNcYNI`g6@85L8L4CqO2S&{<@>F_-q#dXo})#K>11&nI$-
z-3L?1kNoCUiic^c+HRB1T+(4pdyQ#!Z}&M$c!dl#z$062hGhII`ws0-stbe3c;Q=Z
zVB+aGX1ZIH$5Oa*332ISvWegMQ_+sSL;HEFhRsfI(F1NqV}HYn<WpFEbcF<08+(c9
zIpxE;^Q3nZmh0noM-Jvjw_S4_-tL2IPR%_pk_%nldS|m<*-t0MWG&KVcs@>x!3kd6
zI~VkRzBl(UI}y;)$0wJLz9KH`ind9E<g7N-snHSPb)-km11I7+)U_-7K14h<$m{K+
zOI>(81(kOfSLPG(cBME-g^)a38^*frgnPE5U0mX0(nZcnWvHGhl{@D~!o&I{O%tVK
z)~hO<djl5h_=eo^EwT~Zn8$L@A2hAl{9uV)GhHue=jtar*#J;!{P2}iiH@JbGIS4V
zx9fhM{xf9|P11*=ZIM&o(0Jjh0x@^_szDJ4`OrZTcPr09^r1oJB89Q>_-`}IF9#;B
zw}LbzG<w@R6j<Gklw&gSt9@nF!`Rg_0~VujSO%TRvVn<lJfe6qZ7ubaw?$qKJQ3$3
zutJKSYK^sb>S)6ZGiIH$WMuA7mm1ss=zb?u3Y!pH)6Xa9KSo3U&VPIEfYvvfx-RC{
z{rpg*$nZKFIWU16w$F(hR=VHx>)JWEUbXeGYEB)fJ4xyyDHb?RmyKVadSWzPCpid}
zw72;vLl{*r5O2*;l9P!d$F$0&Z;O=M^XAfqK)cF*8eRK`|1Q%N2?$4-3v<t&=zinY
zZKA7TT1<+_rAny6Ji92ObjddaLq!*`yOhP1W1R!)r|NLsGZ~e%TkrZMyIF<%NgzQx
zyp-ge2iDNX#`9zNIhp3@l4R9pak|B2g&p=lAZec*k1U7ri}tyY3{>p1x@IT)qrb?a
z4rV%51)!`5?fpP8GPifg^2^t=%WgITag7v-l0{S?E6h}k37o$<Kp75T+KSwJCfEBF
zoy!dqkhHBEtGlf}WLyrD#n9AKqa<>rzbZ5|@>{x#K;c{hx`YXlEXO;m@a@l(a+Q5n
zI%Fh#?`Fp3^m=TC-%S-O8^ItaTTb<lCmL*!P+>n!rV-Dl{eUDOHs10y^VB{a0!6Si
z6{7jK>U2PjisTJOp=mjG@D%a7!gs<Uqc|J5&V-`@9|G`FVKY$DE4jXA1EN1gPX|o*
zJI&*r_xpL4cwwiIez*adR8PSmHdV5VM;5mXx6v{jRCk-8pfhD5u+<E(?DGPs&KH=1
z5TVLp-VD+KOP2{cAM1Lm!5u~1q+`_IE6g-YpvJq~YiBAdraO=_?Rc7Dx^2}|fP>;Q
z0#pkJfRH3Me|ip8j%g#&Tm0CiajGm6CzdAn2d)_M_>>=aN&5B(RFgxdDdAN_Yd8fO
z+Zbar`Lb%l2UQJAvsn-77VVOF)bmT51+yJzZ@4=}ah7F`X6m<UTE{<0R+DV*-Q1#j
z{-J3`h6w~9XT^uP5QR-PeCXL?N9O7Pu0Xjo0zs3jy?)L*a`D1V2W103EYf86o6<^U
zPf7EKXqc8b^jIPnEXL%WjzpyC6dOB&VZC_qM@|=~(HbU-B!NEm9lCpZqmUoEHc4)f
zgLDc*O?{%-e|MxC`v}Q$m~aJx#Km>&m3#L$9n9L7&pw|P=sgwe*DdQ!!}(k#@aNKw
zCse(xnzNJ1BHGE_#1z&K;bCShkPxhn2HL>G%`0t?;UuuMG#W(=fGBR^OGxwQgpM&G
zx_qMLU3sAX*Z@1ZM+B%*YXjW87=$FCp<3GjioVmQtfGI)CBRlJt<o-d_@1+uP$Er!
zy*cP$>#XvCF+b@8H~2Ekuh-z$jg*P&HCJ7uM5|c^Y6aJylG#zZe_Rm`BwNW&50@+E
zAfoj}Tb}$nP})W~t&one1$YtC13pfgw|!j(%OA(I@cj&^?g(eb1WQ}V9d<*@rH4&2
zON?9TNH@=tv`-$$oNkD)7uHogJdFZm6HX?Z4L}b(gc+w(E5YZ7AHG7Xn`f{Er(V<c
z&vl5c4Z)CuztPSw#?16<8eqaMZc6P_<rm7J>g(vo5=;OJ)01Xf7WW9HV?N7oa^cXP
zK57h^CVrVM=tqR;El}WE7_vY%N9<hlCb`Z;8ivG>A(@*H#pa!tCY(Oud_2P0sd4Bm
z>#_W+obFq-5(VuL1BA`pO?qxB@sPT#*5&cY{WzH?61O^su3GhLM`Ra#*wW_k`u~0v
zeY6#8a~Le&YNDE%^)Aix16^W0A;#|KujO375<X0{!-2QvWs$bCX*ZwCU$QH8-F22(
zfLW-cK$c*dxnw4+ZD^719Plf9;4plvJZRd+_(pWec_NyQV~$9!;e9o!1Yk9t43R37
ze5d2>!3%viXWnthtbOl%pU1$-mwRsK5;njq_|4~6est%uHWbml@Css~B8f?NXs3wy
zknqUfhY&VA0A^tJ_x8_+8}Y|pqt811Q_D|*n<q37p?YTPg*v$f7jKW;3jTzB8JF_H
z{N>W|iJhUMic!&zx8IK(Ik0u?$zT6bcj>pRn%wG%*H6DJ--Z)}1`~Lf(k>lg|1mo`
z`Lq7pzpr+#f4vT#oVvPDcWqzv%3Q4bpYxBlf5pHpp<UFBvNjFksxoTny*9;BCR$X!
z!Y{PGQV491p7(&u0T|)_LuW-`;b>SN1s20pxz84Ey&|^2g#Y^h2w@0_%q(jH|M^P(
z(<CU4lL)WPnx2KwxdNA1h!O&=gag42Wx<!T%~=pDbb(DkvRy#IcgGwnfd4Fu|1wiB
ztun%LD5CRFWWzM6iO*JQCD*y9&|@~zo$fz)Cq?0BisIG0=w9fp0(xvKJq~M}x(V-A
zmo=mwa98!L4kMaN<-hUI-@KFd_NRQd<jL1X3dI!D>?<JN${oiDgZ#sX)V=*PfTdy?
z+<rnhENL@Cim4Ds5d6V8)r@fuwH4>X#6nbbF0q6@Glc|+XdWBoMUpNPQz3Z7X?rF7
zU<(KJ31#D<!T@B~fm(Dt>mXZjm4j@<Lp3;w2N5(&4&?V0SR5LmK~J<`L*TFiTWrCh
z;<AJIJwJ1}W{B#_oLUyb90vF7HR3-M8EYKbQ$!NEV-dJhc=$)8*l=!GR@7rtddysf
zIvk!*92IL;IW7fH!y%brhkQ51tHK=SkEdOi&>yNUKGAam@Y12f^2aG@a}AE-r1Y{Z
zof`dow?pY_*Fc{oU^5w-yDZ~a4%E|w2fyus%&p3V>PtqW`7*wS<V8TcNj|Nel~RRf
zMjkM%0qVKOG5;#T8-1rWHWMgC1WN(}ZX_5FguRl4n2@q&h-K561Z#BS?A#txK!q3B
zx0YGWl{LZ35A8sf%=l9^NfvZJxouyYSqtL#@|c!L*CC60!+u+4DQ;&f=uaw$#q>a+
zlGmG8`h-4FL^n6FwC<Pa$C-K)<a_mr*}X+4{POdp^%tb7vbw6W_T_&(birHd)cZph
zig$`RHpO#aWBWQ_TQl&wcS6k^corUhn<I9WeKGWzYy$bf6dNAP#@<ea#d^aRSS9>Z
zLPs%B(Jq6t6cc`;PXMpap>Y%Hg@<bYI6W$zWiW!c!9*%KLd^g|h;yP9w{&9XY-o1b
zm$kEaZ0(zo+Jop?M`BJg|GCt_i1a6X(UOtb{0xDj+?;EXK?ufoX~uR|1^rsekg2U?
z2|cPeFBJm^viaH|hsS?bO#E~?VrG1jg$kU~Jb@|cGM2e~D(&19$mwfQ1AqPp|6md4
zDr2o;s*{Y!HBel-Lh;uVk5f<1d3yA4ke`{t3Jf%_5|*q?6yt#S(@;1Sw`u%Q^psFE
zSzZJWl4TvNC!s?6#7ula7w(+xD<-JzgkRni{=tN2uwcP2X(gtKFS5$Q%oAV03Zl#l
zCVDRu&0Bfg)v_dWL?!0jSrY#p96V(+2YQ_Jx%b@qYPr;%$a_U8w`dhv8UA8gQ4%E$
zWxrC=NbuuK)H5#JVgUM*-JtAJNy5O7vrvCn!vDy}{cPZCsUT~OYL#qIp9-`W2D{cE
zu*eoVv1xlhL*6hbn9OiIouw0(uJd?HUNJDA@Vn@}srpika;JGlbpRv|hw3dcUnB`k
zo6ETBQ{qXeoN!?l0UFDe5>pnQ;v%zIm3|)ZpnDR20K^1OrJ7#yibce+5kGOl3m6nK
z9C~^a(aRPrsicKv6Rg-*ZKW^2G;2L^;WF__)|D5vG&FxT*|(Myq4qoHaKyQTS9s+c
ztxUIXo%dFl$0hua5xdYoujTmW{lQl5``ra-!jH6~^uFGlN5jpw_&RgonVaxo95#&`
z9onFgwW)erA0&m>j^XkzcnIFd9|BkTm_9)!Y-+2looKq2e`=)o>dWG-pYq(Gd=BUi
z_oMt%W0XjWKr~q#q=J!W*ClPj2CG0_bGP)DP#qY7QMMvn-`9r(@#jF!v-pY3Y*RAi
z^l@4T9=(l5C6n)3V{`IGuZF~ykzbYWoGObm&v{$iT7&0juwXia4QcGW!yZQA#)ZDC
zBtHbh4U&>L6Nz?7Vf$AQvKYfFDI~mYrv>sY8`XzHvOEO(IHwkP1c8&457|J^erI?{
zrA;vAI-4cDyeKq<MvbUJb$u}9%YvI|vjwiwTvZHattc+{!ef;T`bbm3&I#W;#SS0k
zAuZ|=?79<dA$%Y@q(5Ei_DvzHjLUwoZf4DwFyVAOREq0ej_H9^na{N#dkNX6xiCo2
zSql!Nl!N4}67(TK(Y0AWWoY5oXy)izlXlv6?LhqbffI2Bq(J`j9DX&~#$fb=x&l&j
z`ngA^bA?>Gk}|ps1*7V79)!)MC?1a*I~|qAM19CPv&;3|#Rz({jgBzs9Tv!SV90;!
zR1UoVCdN~a3k9;&#$!>UePYx6&5hZ_^0}CCN)>M=C_PVJKj*K!9h}`$s1sqUMj*j5
zNJ!%|Y_JG2fdER<z<{5zOFIr;#rJ(q5%w4b3^<U}%ptxo;R!O#ngttmBxtN7X7K3i
zB+&k181C<-t?WdN(gA(xyDxh$|9&!{XqiI;2Jdec#4}vHikWs|(Y{XCdd?LZUhEQI
zIX63h%_2VPPj7`v^W)d-KA|uHPyA5Orb`e8ZXG^a!iO5>sAh5CIZQZn4%UT#w2MaF
zMdNpYNmQR$G!Se7zwq%z{yTrpwU_@XICHFnG9c@jb2gCc5?8J(AIssur#6Kki>PLD
zNUzAnDgA?XcW)}-VRbCQlpVz&J09Gh1aW5Yf8+}B%cGJ=V1E{@9M@4o;y=GBxXP@r
zqQa3R@Ih?iv0v2Ol6w=_f&)OS;>FAHzb@M$249fiw`y!;2A#F<k2^%<BK;$`cJBXo
zxZ*<4g9rW<FUPz5{8CHT;B%WOn;O;$wgBA{y^Rs{-86J&((Qt;hw6xRaNuMG!7hTp
z6i(RTlL(8WMihZcGJE1b6}@`VV^zc{M^r1BXS|sDZ~j$2@`k4|K`D|cIOq)>{HpCd
zbw{cGl@L;SIy@v3Df@;~-}<%Jp9#Y`@}DO2CtpC{<%AOnG+Zq|gPi^71T>Od$}zdu
zba+~?xGX%jLq@y6k|*EU>_2`UH#2tFxhlx7x@7RqopWvvu3gOO+74hOXUs~rRmjLb
zl8b|9vhexjp<^D<6adA)Zhn^y6OBx5xrchqocz8dJBmidvfQNE^7}L|*qAnxvrbOu
zG;t_8m54>Wh5Bx8bykx&fyuwd6u!FYSgoJ#bI#!U33Zi+H~$PAT;se5w1I0cpEl$`
zF5ra9$>CLKemWB|!NP@hhYR5Wn=(kfY2q7c7VhS~XFsSmh|8PK1qaDj-Q1E(r9QMD
z&hg85htCr{zS;W0);TaV_u%Z_>V98ap@QckDKd`<PLBso=_EE3c-a;BH)VA$qW1lQ
zTn@Z`WQq4jUMLHQCYoK}ed9Wc7a+O79lo}_*SOIZ%i%kbKV!j;<-21qdokm6Pg%g7
zur`=`_08sDPiE4R^8RP%s+Q{6EAg&yFB0V3pV!GO*t^(78WUE|5r_pK5-dVcEW{AV
z^5m!C#^()mF54><{9HRbRDN|{x;EhnWGR5Z1}IbSfgLe}SAP8P3YDA9AL)Z&aK@RD
zx{ulM3qP|KR_h-$U0}Gg_)=M@lW6!MF?c6ISm);P1wr^pJQ6?cs!+PxQfP3d{y!11
z#nMS})w@Bv!5Qz03~b_pAv59x7xidcb?moTKY%Z-N8AUlfEB|uEwB|?b#06Mm#28b
z>m-;k9_mAYW&<cGB!O`e;+H`q62M2EBr;BCsRi6i82yHzukTe(?7v(sICoj$bw_-5
z;^RDi1{%JKJulxjewg<8wVTV~`TICIf6R35g`h6e+*$FQyi@1DsHwtVafDMihW2Q9
zcn7*44Nq`G0!Q_N_r>7RI(ZMO1eK~paOt^{BSrF4lOoMiBGn2q&4f{D8y7F{ZJg+>
zSITcd88Of<5MOl|G>0tM*T5g4g=T;Ibyfbig3j+tM}CvyezVhr$xXs{S+H||!mWWa
zS|#Mw(51fBPn0{QMK4OR%b)ZR>m@~H9_3fB1pgfpYW=V7gSjJiNNRbGck@m6ac02o
z=lj>M-PK5;WLMm~+7*FlOTYAmgoD2%PyHj=rjk)V#y8LnaKBApi|>FO>%^&%ro54S
zy8OxIBGc1Z7Y@X&xq{F#>ckNb786F5F_Tv<#?kBw6jWWRI-{TJ=JNjjd$%K8drnAp
zq&4u{(2IxV3ZGlv_j*OSoZBBZnHT7NW#Z)T--TjlhpL(!N~SR%Df@<}nk*`pJ?{Bj
zE5GV-{tG$itk(2(>+UZ*9W;2w4U4S5t2RCHFFFq9mm2$MKEGi};u2H5OrKP#94epx
zaR0@|!aetDzMjK#n_LZQy;YdJne)5Hr{xSUwHJ0sj-NM9UVacY7V?xA++Cm^`pT%z
zJdO4Fwdr_;0^x1I<dx9ZKN9EfW^Bw(XpGQNo)!s}xQ3mtRI%#A0p%f>G}@>k{wfKb
z)AtTxtW(3?4C~*3i8<C_ry%EzXAPGB4o^(Lz%WE4;B7Z9TO<E1aWMP&?tcuRdF?+y
zP=52W_asZDL@b)!*V<B<qovta*V%ThEq$&5jylOMDN^XYH+uaR2i&23YjkV$`fb?x
z_o8lIry)Wbo6kPodhlf{9(Nz`E*<Wp`n0n~RPA&+p42f<8;ly<FEzwAAyHjV9SuLb
zJoC6uyM6M3zM7CZYq7%AV9e;-xZxWF2F$NgSq^?@KuQCfl<^{N_HHzvey(hC{kl67
zp-9!G7PLJlgOQ5iL{EY4v~5q6txA(v&2JX9)S_{*LeXR#_YS%J`^hb(6MQvK)O?P)
z%uF-<f--zT#~x~4KDpJfuLOZ?CD5Bl43B<<@nD<gl>&*w0|JHh0&Ta31Ul?Q>K(@H
zZ>yZ=J7ei}CW8Or)|Q{%qu*OUhLtI*UQVf>!q-QX&)FO3XGon+JvwFeT<5S(N6lE*
z!QX2mO37gRGy+vnx2p_`WTb^rH%rs+QPicew2$+H-tMQ2Jti%S)cuT}QV?-9OZMjF
zH4#XaXE!1GPAU+jEYZx)x_<k~@N)j|uQ3NN9WVc`<i&IT_j1Pjq~e8{CU3QzwUe1Y
z4k>vm;uiVjRJvcq-B1bfY?C*cMi1URtU5Ko`tQ}p>k>0sDL0ga8dSv+y)H%wDtua)
zava`Le-WZA@=m`)HOt4VL&G4qoivVq;rcb(5;D;>{B6SjaAMEPt7<1vvw3Ng!CuiN
zA#36f&EB~bG44Kd78bwvPJPFFX?0<6l6ml8PpD@RGSLYl8&^q%l~^E}N{wep5f!Ij
zh2^wdiqH7H@-JWV!rEGk-r3)uC^rMyAFm%b7KR~7%#YW{HWsTKIIV5#-w*L!j~6-*
z)wgF2J!^oAxFytT-^Nt&CEZQz<9CpzqRE&7WpN6ESD4%XCbdTQk;p0Bau70n$-(T2
z>X3m`s@FRULs>0n_kzndWhOfv!Y5za$z&Eo^jD|MrH3j_^Uwe&^~05tyEoJ2o=c}1
zvE>V#O5L8MoPmLj72dcVij)<Ud2FwKY2Ladd|Pus<~LB5$r%vjV}Lcj*a+v$H!1yC
zJ#@ezd?@#QL}{#ilXZG{MP9Lfd4$os$@RV=%QpQo8A?;@Hqr4QEVUxvYM%@|yCP|o
zB9w!EtUs3`oXesK@!`o3#dPDFdCDk1JRT`uKtEy?A>u(|qbdy=zwiGPFXtk_%jDda
zpg3uVu2Ej*;}A{{u#Rb!w2(`<0<bs+fbz`PpaL>fl!w5bcrACggxoibxROkcAi!p#
zWD-=|FP>_WI%@#AD-z8wRCynAaEPz_K>DKNAud=ca>rf9v05P-hrhTyT~F;@GnejS
zq<(R}dcwUZJcT-!T5Wj$1Ql+zq(5>z>o_cTUZgy{hUl;_&eP!Nc4Or<@sR$jp<?QE
z(>}FWXT4W+v1TLKqs~5lX*K{K1Yia%if2BT-}ik3Dsf~$f1#?vJ75uJ_qb0O*C%-F
zJqYE|%jPEl%Gb$m0bft6@$fa0on$ASzkevo7*GDg1-Frrn0U!zfG<px3&^hmsD3=%
zkQ`e_?eILdo^uDYKh3GYhF2ZIm5@<?a+)%)n()ra@WRL`oY3*!z3L&eY^<XIWrqRF
zFOdRgB84tp+#jGh&MidkeO|gs=qLAc7Sl)DnXUI3YZ!IQkK^tZsCpF}A^_9I79*2c
zP)pa}!CaVs?*R0sTq6c@!9WdeM&i&CUO|kkROEkT9m=qLRqj;`7WQL;kxVk(VFxVL
z!L^e};%Y0*i)xuMWxYdUC4|n7T(86p3MUb0e@a>sYkCXRrakET1vc!kO)^x7i4&Rz
zkOZuq%rpybKG@f8$4n3rxXOqTQXhcm_!mL^Z)ya17&unDnz%if5Po(2=%P-UJSO*I
z#7>{^vz;NHN#|GF$z=v1uh>@H*7DWt2Ubywolgqa=jx_C{xVq>Aq1Dum<@wN@YD5~
zb*&)Hnm*J}+JNSj!y&k$z;VJOiPR^@ox|7IXzqjq=}knTSh^SLBIAqxJv>zYf#^Ax
z4hrbfZ<m`ls^M$K1*)o`Q;LK57PB1zRji2L;TmP6Ak0oWt`aom!iId8xpYz)30Luu
zrZ|j%<-h!P{f{I~$_q3m*viZN7lj+leaR&xh6W6tf)yx7rVvBgU%r~#N3c*g=?nLS
zf(K)|fx<UwUK;3VxB$~dy|qr2boG-@Ff&KsX;0H%v}|RV@Mp%b)vKz<$2kKE+pOVx
zycchha;ELFzcYs|S3@h(U%yuTRvu};@>M8zP>%bJsoVE9aU=Vw<(nUsrURcolX72N
zT$rxk>C;HsAmufH^__GJbhB#|+ZScF9nA(__X$NYDFUZGU~JIQ(}o_40=Axfva2NM
z(eHRvh0`JHxDMxwY2gy-2(b92-?#rAT@sCxhM?^K0MdgPSd_kf^Zrn!_FVj@hu$P<
z-XFS77I33aM_q)4+vlCud(*s4n{X+RA$&F7=}ef{jUyp*$y>5@54vfJzwvO507;?1
zENaX$6ZM8f+k;NQl7G2n;fL}~YwtB&)H!x<%x-;ayo0gqEtvRx@rjv3jeE^4o?&N&
zZ?o7MaroX#<>a|8yEh+eA6mZh6waJ~^y7WzKg;E?KPR8k6dox(y8GdC`N<cLetevL
z_~pdLpV^DA_iP`14L*{){O*$Cx*1(6+{JrE_l19)X92cR#QCG;NyS*aonoVMUUK9)
zF(RI($XG0QO8R+?K4)({XH%j#cg<)A{KbL{C*BHf<bxJ;8vqbt4sheS1+>Ws-VPGp
zte5q`p&sCGSwd9UWH1ImFae=WKvtS&TuHktQtGjf2^IxQ+cT9tMDG<dqc5o6Cx|LM
zFdpitPVSV_K2J+z6-Bf{30P<>p(unw6`(<eM`82Ds)h(E76Ufl;nCBot@gX}6U`un
zYgd;zjQq|AAJGb95uK|T>k1o%4`d;Iacn85;uC2A7eEW%gk_G{S<l-YShqX)*A9=g
zw~@EEeLtWl4NMu3-IvbFuWGr?(cFt+Hrs`M9?&GtfU$I&F{UtSZXlD}A-76#<rW-b
z)AX_chNu$UjKWwtYs2KT18{&KOP0BZVVxIzfEOHR{~p1IKEP{oAL|1UAsm1h0d9J;
zVA>EJG!+a`JH)OK1jvz=WK1P>DC<hU2udj8c5krpXyt8%2c>;m>e_d!$Ckh*1c)U<
zZ0{FDF(HPlR82GReljh;4`spy>$1U8WBvPi^R}6rRDd}&X6c0YRCq64KNT?&KW25J
z#Az;U|N4lDsZO6UmQGr=w_10~+;pn^>r^G~Y$fhouIikLfY(G8o!NG(v2w0Se^M*|
zq_X`<1rkp5pqGh5wah?TPBarv!BK9Ze^$Z8CNLHUf_b>K*aNTtKr{>FNG$Ltz-rfF
z<w$r%DgE+ch&2~xg@pUgK@OSGw3+`I?@+P5VB>%S(^WF1k{vSt_}I2Xa65Z}-!T+9
zbkwNt;d=*tmXQz}IHl~WMgu+sAjCp&nq(W5EZ4hCk-gNB{k3Ye`F@J(p$A=}y3$uU
z(i(%5y{a?p<&Km!j;5Ri=9%|G$O*y+NVG0ds!o7gKEmnsI>HoV2_c`sywiBDZ+9ZR
zCth19)==E?u1RsMbZu*q(JI)Y6<&^`dotWJxd?9}EveV-b90&3V)0eyQ-50}e$3T<
zsVmc&hHu-S(SM03;~+K?j+`8k=MkS?iIpWkIPzm2v90s8#_X!s*CSi+i?_!6XaB-2
z0I*7cPTGaC2!@V;Hh7vr1HYM+AE03oR5%7OEa=G}y*ho$X)Nu`6m`UeqA*n@X6nqZ
zhoy*0Q_ulq43nmbprED6U{=wd67508GC41qG$>~ZQ%E&IGs4B7(5xvf=2<F*DvG1%
z0X=VsJcvdi_&D~A7Zwo1f;8TEY1{xCg%x3lWKkkuCCv!Ll3@X4EVe0_Le*-aMIfk+
zG;eGx*oyG1mO-^B1V`_{^;W$t3!lS?fY|EPYeJze7c6ysq8ta+Yo&UOfkmqYXNq-{
z_M80Fq5ot6m<Xr1E=I2Gz^P`FTv@xN8nPfm=${i!QUMg8Kmj4Bnq*(O6Gd9A8DM0(
zhDq};n?V;-*Qaewif3RrFT+?M;2+rf0}6wth+sicSTRpD`B>SE2#Xs10hNgbpd?QV
z{Fyuk7=#9it<v*W!SRKD0Vt3Knhaw5NwrYJ{uL!*0T9_&gh)HdV^Dz|zib5x6b(fE
z1J{}z`Tch!3P&5Z0{5MfGvA@*z4v|6Btdkdp`-^2V@J%)N6__RT&od!L64U?-9L&9
zk|xWds3B)(<UYf+_i{mmzw{E#2rF%Y07;>q1vpH<AwWkA8MO)&7#BwKoD~7Q4X;r_
zT+dZiF<CD%G=Q90ACzc)CJ7o~QTgmm)(Z^uxd{SjmIxpfz972=sawOk3VpGwFHX*Q
zCcN<kVaXc*o|o@Hu<a<B8&u`ZL&N=a%K&<D;~aUuIFC?tppuH}71t!3iMO2cP?&k<
zDQzL*drH$Y?u^*zhhii03=~1us%!=m0)zLHdB*{?RjKkB1>m@>^s$U7=|b>R1?U?N
z+`@ybJ2z{13QPzsY$}}5HWSNTr3JEs4zMA5Sm3GkQ9p!ta-pv&UJO(G0y;8@ktR!`
zDEPt`$9)$>JOGdfc|9@IYUf3OFObQFm=Gc7Pt0hMAQo)@sEIe_44NeimC2<S)=wmG
zgZgJ|gT59ep$oB<)V=jNA`1Jw_5xC8R8vUYHy^m-{?W$<+Bk@ID^(LK@$&2pfGEmg
z6<s2}1euZFo^GWJR%WjvPGr4$Np5~gfWM4-Beh&sR~RZ{0-F1NtacTKMSxoNGvZqy
zk!oVi(B~?Xs78G;Pb^R$#Uqy+h$Uw&h>#!+%4d8LZ$(x;Bc#=PzQZ$%!I-QUSqdPn
zR&Yn5ul@HI9iil<55)k>kL0`0$j=YTdVMm$ErC_{YO^Q=S9p*;y?fj3a&}Qv0NSt>
zqK)u1Y!4Z;BqRJ7A_~Q*Rp3iwF@`}FtfU%}AV(g(Jjr||f(9^7&e2?0Gz%P@2qZhv
z6i<UqR$+QsQCO^x-YQHPJFV60Rn92V#rj~0q2;S!q5^<rfZzTrGU7x@Jt})qIF-$X
zH7fv3i517_V7+2~eF+NTA9dE6u0<s8ZG{B_v@Hq<c=MsS5#r_oG3oWDgwYh60?YA1
z+D=eC-VRs~7j)c-1`>zHKk`1h4AI+MIsCV1s|4&(ZhHU=5PbokA-m&*BF|!<19N`4
zM4BcMa&$5%;KS;+8*T4wkG_e*jJ(*}NO_xJ^Vx+{lLMZNVB=^yfjX1+51yAe<^LFx
z$ylZ4`I_KAY<)kfXBK*rS(nHvBXQ}8+(jGp9$-N@7fUznB_A+@pnA#gm?i@B_na^S
z#EgYl1>7?s2e`BYX7N@`ngu@IbCq_72agyz3PR6D^nQxqh6Y*#(XC*j1Q~;f2#A{U
zEuVoB$r3{l4ePhqR`^MhZ-NAne}n3eqF`F1V`c-#qUjIlzA$NMW<WHLP)Lna45Hki
z=B!dh2`|vhxAOs>jZv?wkziSgV?7_D9^Xs|yAi{UqQRJ+w0i2^;dxvAdEY`YCICna
zr6(1G`-MPQPAG!~w_+cqJdRJo7U~zy5>C9zVSo)jEQ^|XR|ru|xO5DOn3P1$X(@|9
zkPVXdu=yA363`@eysTD)7LI;qHZb;cxH)W2ZxfcZN;kU_^zBjnUWMQA3;paIbw+;T
z>Lt?Uj(sMr?+xc5Nm#ljdOvr2dP;68>LR&h72-jTKY)gIyMOKS``VSM;m7=>)f*pS
z28&>VgROxGkElTCo`VFbe>3Q;84!bdk`rHl+t(D$qj$7SfpGC(W&@#!@YlUzV~;-7
z`T_?SP_o-rvv0KWF|w9Z;kw%vy48KYw@9cb32H_^j06)BUtD}l)2;B$|5von9f~2(
zzz9Wv6l|kqN)#)B;RIswFcWNq-MOT*j3PRH7Ft;th^BiomjZ56!+O4`CZ|;>g8Ut0
zC-UFD;zGWmKvrBjmbezg`g(tecDoGBOImCAwj{E-fMtI&%bzjALj78|0pe8J_x*ov
z$RDaF^_Rt+B=1XFg_v?9NyT&v26d-3WXe<8>4%T=CBOjpNowPV)G*}sf%p?_$b=A}
z$4K{cIxc@a(2x5y@OxOW1UX|j<RQ-wz>L7P&>;5YmKLK(kFmTL)jSd4BuTOm49sgT
zlcRfP6s22X3q=FS;7zJf((5f-Qf&*(6AOZo!6tZ8PET15K4?WUz&r|OW}4mHLe1|D
z)s;vzydsh7_-p+;)u0@Fp!fx0KN-X=wyp4sSOwR|yuz^Iubnb8*^naMf-kl((217%
z@8~C85Rkoq#>_?4FD0WWM%-UnO5})Ml*vv8MWHZgBvwiS=7**S5(>XWz4k$X+m%3`
ztKe^EeF^@#z#)<z7W$u*dK3zDkmaJ>ek5Y|t-!gsTrSLdmF)Zy62ygQn~`JT@fIRC
zAp7DqooGEn5Z=?>JdW@Qt4;U;>~THfZ?6ETH|LY6dtrtWfZ@`GB+1mC8R<e_s2N#F
ziEPAzn`DKzM4hmzgYxMCqC=<?MA|xvdi2(e2m!FbLKTf+xx|++E+PrPsa34%fh8NQ
z!ZQ(nb)B+cGqDy$GrEjI^M6Y<8=e+Kazb>GwGs%sDV1vi)58Z{Uy1#c1VjP|z1?*X
zzkZ{AyAjl}l)N;2`r%1Y4fpPt3I#<FQmXW$q1GrsKPq4(EkYGmC_~W-<V9M7K~NQn
zP68u(RKLVx*H^?Ne30rjEFg}PZWMp!)$Y|(0M;r#awe~lFM#BMe<GR{&>?`%DBzv*
zY;=@BU<~={Bj$OLe};60E+&f{e|97Or|!aooS$27Wc1%Y%#*zPwJQ9>gj{g*Gb;>%
zU|WEIsmNDivQyzWuX*m8D_ppq(|G7KuV^I(0rGWeSu$)Bm!Lxgn%h(?i}kTcX!Ear
z4;!@Ue}b&j&!Pg)Ufw6}dhMLvXi`#vr*@@zI9osW=lU}ng-CJi>eyOM9I)QB1>0E9
z(3WX*G3}2<*x7_WUJCU*=*F3PQ+Zb32FF)ut#;Q(Si$#wnrkX#cA%W%Cmm5r@hc<P
zn*KvH)Jc4s{^pK8c-BKe35q>OxBkL0Ja_C1$MX!hso0^~Ih`-1R>~T+%5(7Xz=%D!
zIkvgFApS03u+cq~kH7&pBE0cF%Eg@}0WHlwV&YkVh?-I`uN&}F&#-aP1o4qCNabyU
zTml6PIdt6cUmK*+7)hka^!4}K8kbM}{uQ%#LGk(1s&#fMUEJ$I%D8@lQy9>^mt1H@
zRl0-?$s|?wTq9n1wm5XKpQ(-)SM}3eNN5_&R6q~!Xpp5Ig-Y?s!8A!^{rU8(^9ZDC
zpIgd(g)`%^NSRy(g>~*Bf4O7%!$l_{bA2T`p~WK}5VmZ@%*E14gTvx49u1dwX10WJ
zY({NtT*#2r2!i<f;EIDEWGDtB5#mO9MSfqB3fL)UDTms?t9Yr%hptODNq5u3e8`B0
zRDT!H{7q552bj;T)%-&Mzod@Mv=LZ=ZPM;7?y1j)YkM>d(F4M#MDX50`M!-Vf~(^m
z;>;RNa^X>KLo_;C-^ZcuUrj$uE^zRx#<b^Qh3_Fh5Pq$e^BnNu{iU=0j^Z`$ewxj2
zkCeSTv27D{xg^%Vp4TaYKFkDgjX^Ee`-$W0Hi!{s{CWouW-mMNp?~NcWUfTa@K)+$
zc-$i|8wYfr{Bx9L&PV$qPn()&bPALl?m`oko&$`ASyb7&yVCoPLSlUTkjqY9stX-b
z<<AUy<`Hvf!zrTxaBZn0!$osjG1$Rq7}xaIhQ-5qllb9Jsb9%)Gw$cC^IFCCDT&`N
zDnift>KD3rq)$a?;RXI?Gxix_1dLuc<*9otHYF1z@`_bTG;bO)rBcRGMq_WtdCL+;
z*PO*24BzUMeyzwR4anv1*d4fXr4N<L(Syj0RO#C4COZ{*4wDAN)m!?X*pC99EULtu
z<Q<a-6M~+51xIx>)R0q&674{FfVp(vO=Sdtp8@r!$$9e$6+^PvcWb@LA*<@fM_In&
zj*D$?+Ns4~K!A2Zn83W*Mx^*+oiiCPfYD{Ay#bAi6HHG>R8oTzxg}Ehjey07uH^Vg
z`DfK|Cc1zCYF!+7466~$l4(L2@HB1uWjh6;xu1+1v7?Twgwl)eRRq4Q+3L35aWt*L
z5^O=Uo!DpAuCPLh>i{+P8NesM)~bBF035gjFVJd#ia&~n8I45Uzm&%>e4StixSumE
zWZu$r=@b&bLg*&TjPVug9Om#2)dIaw1g}>z?f*{orBtWI`HIyzEa(eoX6-vLPpH8?
zAOXW6R}#wg*|25~rCWbLDq%B5s*%g!C$f>TW<*)OLU;rUgZLv00Cgj#FM%)#-vSA=
z1i+_<JS6UFLC%uKi>i-aLHUvfHei+NSOi9*-HdK9KUGn0G^Pq|6}FSc7x{_{VVRpx
z-Tn>V(Bk$9dJ}j~=uJXB))gc(yy<vg(7DQYL`XPxrx^lQMM7+Bq)`{%t~C+_@;buB
zy%#^$w2{0`y5A=^mQul!E0?rzqjFymkw1eP$-12?0s(Cd=S?=fwT27qKV|!8+@B1>
zQ?OQs^q`u;W-V?qh_`QSqz%*AD}cYHZ{s~RA3=u<J~a;R6i^+u)o&^!6!&8_5+Xib
zEe?1erxF>H=XJa^us00y(}*oVi3bF;*tp&87}*D;Guhx5xLDj?tZb+lk`W9Q0Pt^x
z&?S#RHWEC3yhJ;$SKGx?T%u&oM%)ANY#*%zuiDCbM2v_c2;x^p9vk$FI2kRMo8L--
z@Zz$3sW9XOUd;n9PL>8AHD+F~YFRr6j1<-KDNFnp&sKr^l;}!JhpLgm+M8!fI#_)c
zegWPR;vQhTt#<M0izOw2@>HV-HFDRQN7T+2)cE!)yM&xTm656HCKDMQ+J5ejF!4ht
zn;SaQ(PKQYAN6u`yZY*q%%pt*ymfHv?ALEpY>iiKS7nSur?ugQjnqlBL21E;m7{KS
zim4tS1>j>$Co7!+2}BYgDSZ7P0o)O=Jsv`*+>{cvMkfPbMwO+3te>?4KA=5s(Kc~y
z<TiMv|HCXG{GLf;s7D3&VUDHWiNhiq_gyknjBn^d8rhv*rAx<cCp8s$L&SE`?Q#5;
zos>A{v`alO#W#k1>>UXaxLF&6zs{!J3Y<rOGf+KtlfTMox_LI@Ym7K*PPp{$KiM!J
zT;(p-b5Odn##5dN@;{0XaGV;8P?Hc=`gAITHADA(P^0jL!>?2wTW!*N#h!da*Eh)P
zP4f@46IqqfZk>pNHioL(^<Gt;E2hb7wcud_;CR7KDEQT@A3a~TYwI?K;2egsfDNKM
zV%?Pp8F{dr`NOdBA@IiG5KY*Z+z-G=WUb<*6%QgF?R0z_c9JgwH6kL8ljPI0S6GW{
zq=E~C{Qv8)c`ZO7znv${Y8e6RM@~-2-~XCi&sF~Dd+=JkACFRVIY0Dy;6{JI>K9Wz
zMn9?}9*`SdAlDnjJ#oBD)&7Mm#K_kR=_cbPWt8{=PbLA#tb5$&_*&)>P_|m9go59f
zZ;X%wsGIuh2_8!?ZrLesw>_%6`%r2drrOf3`RDiZA`_$AOOL)YDPEkj_XFlnhUr`(
zKh&z|bE|s4KRI6kpTVr$!$TF!JQ)X+vW))DYHO<J%RR1T-g<_MX+D%4@pxGP$7jB7
z_x{UUOri}@B&U~FuWGcY<C=2kKr6pdI)Nq?Kp6{UgMhyGP|(C1@9_`b;s_gb0@-eD
zz=1^5@AcB$d@b*W@W))d(8_;`Yzx0BPee|Po}lmY$W$e!J(kHQheTzKAQV>L2GkLV
zm0WD|o9gd0Q+Ye6*C}i}7dxWhRUj;o>Ug&FF=w0#U7pZs1uOM}uzbqeI<&H1k1adE
zr<i<-JCwjgdF}UQf+|pLF6hQ0MdThGYrhD+0xHZP$d=d=3#b7*Ab}EFt&vddGzNXs
z_SM>e>so<z7Q}FzI?u1H4GcWX8;Yr9Uf4OKVxYn=G;y%1dY4uD?E%fb7vhF1kg#uv
zXZR8~`*a4M82Df`$%}|gZp<7drl3)9Qu&7RvvGo#1i47l!`_J1hi3OFY};LbsDzuG
z#WSBLW<&UmD{##r3&jLtx7$Ir7reB!c@&g9J~iLWnU8R#+p{31q(b2@?$URw18<yK
z^$j=wacuPzhM%e|dg+}3UFK{Zv$UAGJ6;tEeP$^`t2<>Y@cSuL-9hJG7<{zeRHaZ<
z)Dxmv7&KSzkiG}Jy2aFCWMPYDTOiGmH}Qi_7qS|6*%7qyfN$p|yow9s$^8M0v-AJB
zR&BF~Gwed=Af|YTq(GRIngjm@%)W(4i$;5M+s62D5f;#hEjZ<!zNoQWHotdWq}^7F
z;k)NHf7?5)k|9prdyOH5sKP---&5{$eY)WuIC|7&K9?uA1M_e$b;{7)H_e8>&qx7;
z#%voYodxDh2zcQ+$ccw>J)nA{%t85r?YFb<cGEG6%y4tMqN0y#*iy5S(J}m11VO0G
zHoW{8Yal5$(ig1iSaLMHZ5A3ta8r42p?J&)r)pZRVr#-NXpG!d7FASvZ1AIbcT=wC
zEU^2*4K1mH-fD7=ZVtQ=&A6><NqQ<FsbZ58!T-DY%l^g$6AhJz;)aAJd$fRQ!qP<_
zXXPbzE7*I4WAg>q4+zH(yAQ%-@6DYr03^nn&U@Nxx`L!eY&4Mrq!P6gAEAjIFA--s
zNt#Wo#!au~#Y5C((=%)&>W;`m`^H|r3{@oCKNwK*CFp?B39xARC_?)85rLemmL^JC
z^abQ4)f<nI&b&;5<~Z22Rl&}p8AoW4(}HR<`n4Ef$)m0De<MzXFP49ASv_TY*q|A+
z#S~U=E_?)o(`6FMRk6g?C{VO_Z*|<0)zZv{QIWMbmCb=2RO^Ma5(WdXQS5((o{|Pj
zML(3Z=>*8_=;+x1t-wKjqd@0vS1&^3f~rIOwi0m;6xOJzh;O+NuoiX7_ROyW;|A4X
zPd;lBO%vZDzXLKM3|LKTH0N+m1Qq<(aV9^1jMwn)Dhq^CJ4Y5EO03H3`BG%WTk$h>
zP^Uq@Hlb446{jzg$fL|Um1oGtMa%HW@%L^PkLb|RxH>H;qIQ+Vf;0e2|M(&Ct<F+q
zDm;Hup=PD6tw2MYalG46rre`x+mAGNM5BTHK*><4SGuR;P`KC9YINHbr%v$86qU0#
z`%B6jd7l~AdHc@FUp1Jk&<ecr(dfFDdK0dc5w}e2leiKv&eYoekUUG&wUTzI9Mdm$
z<u3qa?rAfULfo{LM^6xMJj0$(4?D}a>YBb#*QgX43DMtaJoq=P`DT0Ds8Y-F%Ehpc
zW<BA-kD@|8)`gU7QACLpKTw(j8}?V(QeU9f>+}>ag7+_?OTAF^ne6$}X6gRbd;23w
zlL%e1pHHV2>W~V*A>}Zg14=hq(OQcIjLHK?=Z_rSPhS7@Y@tj#?rGV-0ihusNjh)J
z?&yuWsvDxIk9Gbo-=d44i{B1vJ^m!}*(N-(Yx&r$fVSa#{-2R${0_QJ_|?5y<g9wq
z2d`_=%SvZCL7z2WmbHu+MB2L=kx$%c?RvFZa3ON%Z^YSgs!n>tu~BOGl4lGE?(iib
zlmV6_*vQXO6e1yGHxvFYUTqm_ud8poa&O`wh+rhJI$0gJ=j1g<5ZaXuX>My9J$&b5
zgc&`8udwRCh5h8p&)qr6(jWRuWNH7=uWQ<Ku;sU2=8u(RYMa{AOp!5|sxeFbcs_jr
zBa+=F`^@Rds`nQvajOvqnVxm_l6FT6>>i!h*I9or%lbI7k>raz78VQE(=p@~BuamM
z{kwmf)_F_RgmtI?7TEHL(dw;Zd`n8k!iIb-zRNg)+`~}1r@<Z&y|MqJdK2X2enVEW
za%v*9sA;ahm6zgRFr@cax8?N5eSbbSf%Iq+c0{iSlA#am_F#vNp*|&}xIZYp6Yd|!
z6!k{mxY2)KmP>EDI@BVAZf$kcdpcAF4cUOH2FNS0-@8$i;2;PZfS9o%R&0JNHb!`h
z-wfCXH-Jc!A$0)K)#hpW$L*sA>M@;a-`3TC>O4)|(#TC!E9z7`XYkYn&rt@zII`CD
zRIn-=%<9xK#Dhn+wD$ozU0d3EcyN2F-ZulicLqAbhWdhCxNinLefT&14Ff|BLwyZH
z{`pkx0R#R@wtz<;Ho^cKW1xAw3kwG^K@>E)?^npy<WP!Kdfx<?DBJCZGC?aa`hJI^
zC6@Ip=O}2ljr<76+WxFPG5**%742#xPu4e??~^{0f|E>cU?<34mk&0$6@1{<<dc+Z
zUdfvA7odE7eB8?Kqw=vL_<a@K5M-+JnQdoF0NHLN@t|@$ynFJ|w%WNkjYAu)N7E@w
z>DUOPr_Z+C$BZZ{s}Lkv`(3Aw)NP~TRJ|9SI3;6)7hBrT4YZ$k>K)G5uWh^^Zn)p>
zwwK*)|77EUl~kkWTLHp`dS|{H4|l8m=??nTZL-pB?$6s2FHNKDK_VUo--EWV9yoXK
z3CHpqXt#wT_4bse4|;cKI&<){eQUzeIhp}(;=mY1ejH@0ogQ!@MgNjoQw>$8xIgIk
z+ue+|iT;j|Wv9udbosGvrC`dHPymf41MQv8Pfhk+HA;Musd^|C$p%cGcWi&`cK_Dx
zdfzzZt&xY+9n!mWimu|^cLUu!2EO;Vy#8!yTbg>&ZsUZ%`RRT4j=$r7@{T{GE3o2t
zhPR;)4WFd}@H6nqZ@Wo{^ip?C^zPm!MHqM@jjmlun|!R6>efhH$7(h+Wn3v6uRtd6
zH$oMjz`sNAf1pM?piO~WZI{q;1uzj+iu`<^DKEeT$16Noqn7mWWm4ajdXtEQ<{SC@
zg5<}dq?l=O{@ZzCtKuFu5JV>gZ{Tv#DCu2K`HLS(18kTT8)8cl->i0!%Z~f6x4J0q
z^jOO2qPA*F^O}8k&s@#4DAKAO)^|VD=X2zzulIMb&$k>qki=#?y4e6+XKa{kY;gLx
zpLd4QKEwS@$Nev77@YwESMKJ#%l2t9z4Z3i<uiad>&KZDry7l4Rbx@tI?ZaOEE<Em
z&(OZ<uHCVC*Pbdf9o*A(wc!5(n;~T09spWoiGqe33;_}FqBPjUJSccXXe(o@YA_JN
z5om!H%)vHDL<UJ!KiI)O&%>&7c$#50!*JuJ#saN<cK)Yy>TzBA&3>vn4=D6PuZ&N@
z1?YH>Pdg!}^p5ZNBG>i_NHPqtc4<Rbs4Dxif4hEHbhDegr8E1Ao4bmWHoGgjCpCZx
zKmol+0TuXh6eQU6QhJUcC?K1F2{ZsyGXQ#9wN(!@pA)=#XEjvcxq552pFezAlR%g|
z{GFpU6<mQjS9;D3Xt^spy03V<v-rrbyOw`^vR`_CyR$3GGY|{`7ubO;978Dtfe<`F
zhGRnrE%-k)xIMfB9ZUfc6oD4N0US{FH|)ATWDu`+_&<C&F&5+fKJ+rutV9E#^QOGk
zcRU=!kQG3|wTHdfkA2v~fDG(*6nv_?Te{k7{=M7pH`~K~y1Tulmoup*d!@HMbZL1U
zolnYSH!qU_2rvK%2=kwB^`EPEm{&KR55AvY{KEVB;`4dq?>Ty#zzSHw+Uxzz%6;c2
zyXSK~-6vXd9T(HI&8BxC8@&7xaDnB_LKR?v7EE?F97$9~Rf9KZG7!PbCxI4h!4}+s
zgf~6Qk{qSl!^0946{2O*dbXzeXY;z{KN%GDkAAZI{RNQy*gwGbi~R}gHWgUE_>VvN
zm%sUwf6Fz1E0gsrQ#|EMe0Aq}HCOze6TaeC^ZhS8pyU6auYd~x#ELIo2o^MW5Me@v
z3m2v_lg101G-uLCwCJV{9gTGO;NkQBk<Sz%LWU4I;)hL<Cw8#ebLlUizkf0RvBN{h
z43Hs3v~2OY<xjtVLx~nOdK76=rAwJMb^0`^y`e7c1u0M^fPer20AwvNWmTzCqCno%
z=mssDHWt%VZ0n`%#ENj~LUb7A0RaLG7&!3z7jWPcg8dFIe0Z>72Qh9~c?ef-U5bz&
zN47lKZp6Bq*<g0~mJM9YbVEy|-1c#3)NeCG4Bcf40|W?2kVrs+f$iG2Z!@^PKsRm>
zyldOOT|0Mh+{Jg-22NrHl`8}@tM<%Uvfa_EPit-snPO?mY%jW@W1geOKBC0(F+;Kl
z5iW7!z?p-Mp1pih?Xd&KhYkKSh$yKDm41R_i~IJeX_;`|amOBSzWGKds4Tn?!wjL~
z?;n?d0H6SNyzz#cE)FQ5tF5-`3Wy*GV&gnI*l=UUiYy|}tsC7k124Y%3Jl1@08<Px
zArm_c1{kb>3&$pNe9}oaB!be$xSXU?%C<_wQp@nTtf-0y6qt<!;C@>U%`%Zov$*7n
zdyY9bm8;E!=)TbMNhl-AGs+yj#1TqBbtDfhv^GlbBT@P&#gtk~p@Rq@geW43C*WY|
zC8_=arVAn{{i&w|<&a~JJN7}z3_9$XLk>IspfIU>=DA11T5Y|xC{3vXVygn&35P^(
zj>)ADH@ql7tE^PqO8%=B(@2ZPwFa%yA|3_!aYzS_JhI4bPf+YzZN=Rd1}bi-0E#H6
znBt)<-Nka+cje7d$|%XZ_gQ-brMAi{WJBOM<^(=Y&f*dX_+Wt*UbwauZm@D+cWacm
z$$0g}w_a*##Hi3i>A=GdkN8+mtXXC`=9rG$&?yNg<j}^Ym|_a250XOt=_gX>(54@I
z?)Zb9T!8t57+8<WCmJt?$YPE^aD5u;4Lj7wo&!{LXBlY1;pUWM&fs7TGGrlzlqsU%
zYS|RC0%D3b3Vpj!w>O^RfgdXv*MSNQ+ZOL|xlPPlZNDA-1RG+AAxXqfpu!7iH+FnR
z$T==Fa>qH2{v313<(B;M$}7KIayF(AAc5hIBUm{Tm?Mr(hFxFSg)6>Lt8>dEUyE|I
z<jy>IxOwj#?%)kY&*P3j5{11|WSJ!wl)2%?m?}KAR6+e@+Ru$Cc*6OoowfN!m`<}8
z^$u;eSuiPYvWG*CJBVV3j*-|{ChGd3ma6A`kj<){4?d`&3?Bdk7^2`m8i+*{e&_=q
zG;%f-2_Qvj@R)fh7$fso;R1gFT;KY}x4{XHa3K`ow_-p-7#xl<Rv1XWD%e5`p5=uu
zlwk~ENJAIK=!G_{AzG@?0N(`cbwL!O>sH``*j-_VNxUI?W~jp@Ix&YH^BBlPmZ(0A
zLU|_sbKdig!3L5%fmC<E#)K+l4?1w+2siTy9N=K8ee}Z*Q#gWs6!gZU*n=K<ECm~~
z5QjMc!w-DOo)Cz@K0DYU7|P&={TO*6spz90`mkD8-oXL?{f`D4utN<#5CE~1f)8XN
zOGoe^k+XG0f>KnajI1!OyPZI9fD0iA2}jEj-tv|uw80Gy$IBbQ-~<n-LPl0uOk*As
znaM;Z8=z1(fvHZ2DkzRLsmYvbUXujWyulT&Kul$R6P)3c;!QddGLVFlWTi0K$&T^7
z=aGmFL?8n7N_7W*48<Sp@WXpXA`+6ck0^8~KoQ^&2Ybq-DD>dNNJ=6HVU&W9TY&z9
z5P(nsok$c8MUYewgg}NhoT8B-U5Zp-(vKi8iyl+pWDRHl1Tt`e1_7wTEJ(2kvGjop
z=QJK2xZ)8iQn8$PpduacBDh%M5~)cgTnSIWRHu?~sV6w04RC<C8{psyXn5Ple##NA
z3N>V473x>RI#dRVm7H7+>sPzVK)13rWNZ~G8={aLX`&#T)2vN38)nVCViT|{ctI+K
zT3ES~wXktLD_Yz7v9LaNtY=-TWPJ)*<B6<_BU4oJ)TzZSa)TYAbw??tK!hZ0;SS~F
zhd!1v3_EzEDBQ3E8{%MxI0U612tC?7H1&^T;A9F@C`nI-P=EpuzyK1c!TuP^poVL}
z<0#%ZMICA(()#g7A5%b^J=(#^|B>MXu{Z!9W>JeD_}~XUz(Ns*fCY4Z1P@#(%Q<}t
z-;dDEmP(CkQ=vMz68@LJPYrN@0em<D$3O;D#VS@MXgvDP*S<QfVrC;OVVzdE!V|tQ
zNQerszvgwXA65Z2vpHgFo*2c@X#*o1{NN31_`)!DF=k_2<N9v+oSQ}7Cugy|VI5-`
z(cnfmyb)S<w4)yN0FWQ=(6m2fV=1hS?IHa@3_1Yg8~f0$h0Hhw;~v)l5y$`-%1~xZ
zVB!sX=)@_`Pz5ubp$^foZq#(u1{Fo<lK=ZaE?9xGWANa-J`k=E{)S)#BKScRq~JqW
z7Py{|ERZY<1W6yPuz~#Y7t<yjaDfNh=}jA$(**`~QxPWv9ONJeIKTlKIHFTYk8IKx
ze)X%*N$W@BnAWq7HI6@G1qJL?1tti#G)W+AIt4og#1^)RL2P1Ymv{w?@CYbejq6>@
zn%BDCu&rSo%5Fy*oyc1cDMmqxPs)N7%oB$5pz+Qkv%?<l$d5gsQHN2DY7WHsM}?Xa
zA!1}BAIfmYtz=<_0tDc=7|;L>z`)!xmthQJ*uxqOK!6RLK^gRzGo3r+1psKp9_h&Q
z2QI*e=Pd#aI2gnaelP?i7~$nZP{I&8vXw-S;)<jY_b9~vjXY6|q778=i&ORXQ~`$?
zs-s5Xf0YhuraOJ<Fp$CFfN6ssr~wZ;A9v4vzMh`zoa<jtwAjJEb**>Z=Q>uq**B~6
zG(_Pz#72R$>5f;r-yQGHb^#*jAa=2DUF~fjyVnH|_Q129>^r|A@^nsaotL{5Sp1|Z
z)_Jm8IIo@F=!TKyO%<2SgAQ>p0~W9_p8*{q4z0?@Jor5nTIsg88Zsjhj7tCrc%TL~
zAj8AU+zCzGf>x}R^2YBs6bB069?-aC4G7Q$lRr-ogBS!4dcXuD5c=iU=L0FW@=5Q1
zAN+i<Lf!hZY4anU>7-VF`j_5*_rHJXGMIV{){y=O@ZTT*`Pcvc`M-Zrd|?1?1Mf=g
z#8yBBlFh_YfB^H(#P%)^III66Py#1V|6susN&yyFVR_am7uF4Bn4#T<Y(zq<KRmBP
z{J|dT!5+B56#T#xWQ4Y8OAh=18`z;Vq{I&VKvnF34`d-4glri8;K`sup<u)ereFd#
zU<0IJ4WNM?_&^Gzpa#G|4Qzq<LP{!1VJqCgC6<p4x&c_c!4P@?1_VJ6h=2){014L6
z2%=yJ*gy*dpz=(H^45v+GH@0$5Ee?o3Jkyiw1xfFkNuwR{YVXk<`0EV2>uQc5gXC!
z^57KmFx@gx7FJ<-)X5ZLVHPM)ohXqNVE$niFmV=QArmRD5<{`aERhvr0o^i?6*X`b
zB=HnmG2JTB7E#d>OCb#cZ~;dQIueis5b*9?V8n#60UOW-0D%!GFB3PB6jRX|X;BuQ
zu^BP(6{GPJKhYUYQ59d25@P`tr?C!AW&>++4s$`>{LIhXi5o_c1mVr|>;aPWK_;XO
z_FTdp-pC%70cDn989+-K+R+=>pbDmd3+zA|jK)G>!W)hO8rrEHWWpYbfuA~n3T>es
zuCVwz#2&hW5AMMpYJdYE00`V+^NLInfS?Cr;0%l4<&fYAkbn!=02pRLD~>_kn5Xj4
zu>*Bd7|M$dtiTGOpf3m!g$(fo{t&V03`{8(@%`pcDd+D7dY}i;AnRs97|>BCJCG-H
z5*=}pC$n-Zf08S`Qr)z&EXA@X!*VN4rYzGE9r0iaR$u`QaKwml#O%_<3=jeFk{JI|
z1zaEn{?Y|rAP^7%5k#Ra*K#boaxK-;E!9#o9TO|V5-lN<1J?;Gv$7V9K^^@J8km6{
z+tJ9_q4P{oDrkZzVh=-5f(^J~%Z!35nj#;#VH(0gDcFG&k^wh0vVPcM3-^Nz2w)V9
z;T=M&$XcKU-cHYufC<<T2-c1J1mH8P(>nc(AFUt)7UL+5vOAsfaGJ9Hpc28#(>&$R
z2Byvis0t0%fDtb5I^$FRKIfA@>$5&#!404w0gF)u>@o!m&;X4nFb5Pc3v@6A6G2q~
z5D;?}?9)LX6hg0)AJ?&+MpN_N?H%KhLpsDa{~;Q7bKsVu8pwbK$RIX5w2^d$E9jsf
z?%@<RKt$>R$u>_pU*HP7s|Jvu2xhJTLSY$fG61+CG#?TKgNz@AB{Ut98I<%P>%a;K
zzyTByJQuMkp^`kO4ok5#JmoLJ){`+}00-3I5n`bhqG1`7;Y^LRoiH!RfDBFFG)>Xe
zOyLwxqXEd~)JW+xP3<&K_w-0HZ%x;f(w6Nm2Q=9VR0UW-FbnlBS>OO&Kv4rTQXw^V
z5EB>tv`+aT4<7yj8SXSrKNU~)v`+akRQJ?PjWkL7)J#{E8-#2$MUco!vjpRjLwk!g
z8;Kphfg9ey46J|*)Zh%(Kv>@(BZ-0*&HxNb^h8k<3mHu+=D`B&Kn~{39onEqTR<fJ
z#|WNt3gmzexFHs@B3NFPT+0;?qTpM^b4pQ&mzJ(w<u5!LBTL~`{&<N?r|znHU=c*2
zT>I5u{}o^ZR$$$!4Wa;G2J;vNlo(UsP?2#k57dYdG(jmfFb^{ka-m#v0S~w-5L{tk
zKNe)kbzIxA9f`~vfTc9y?KEd|Lu2n&Yo$Nh0VKPj3N|1JJb>XMj=A<g&7dowmJkfc
zU>DMXS^notDzbtP=CK~4023g@B@jV5m6HZ)paVVt=GG7ks-PSAKwL?dM84K*!<Iyd
ztQ}{84y3>kpYjmhk6z>NUhnk=>NN(^lb6<$Zo5<l(7+MImT$q<Z~K;H$@Xu-mTbEL
za0z#C0rwcJ05}TOaRoFm{W4;Ws4pp%QV%s^71d&qfpEEjEb?;&009tQA#fAdbOBdz
z5BG41jCDtF^THuzOK{$(A|^OQHD}^!@Q5DTzzlv>SfgMAA^-!XPz)lj4A#IJXvHYf
zfB>ej3@~92e4%LP>_eiUXF3E84&f2r!6jTkIa#0uVBiC^X9=31K#+kM*a0g7Kt$N#
z{v=WMB;8kjPqxU43=F$L723cG04H9_vplslOA(A-6YOx(^KEx2ZVmVbZoqD3;06}K
z7s4TaH}8Gzw|yyCMBw*==U0O*xPsf4e&<(yKX`&UxX7Y`52%0y=FV~z^>QaSa|LrS
zT{u!Bmr@ngg<l{M8sQO~p?yig4XEG&e24^+F%et=ggbbIGuVPXn29%-grWF}iwt&C
zHf865d$CwU?ExlW;vc*ipK!$<a={IxzzT3y49Y+Zz`z5*U<$#Yc*P)%)j$)}OnLL6
z56+;C2dK>6V0uL=AI|0gX2KrsK@lQB5J0jYK%oV|_XS$u0NfxBfPf2%>)odQ07R<T
ziZ_{)OEVmnA&9AfFPaKp<Mw~cbAjy@Uk#^}8N-(tm`f2jJ#AnH5I6@g;TJktmv@<$
zd)b$D`53+c3J}&%BbQMb_F-+<g(sJWC-()OnGuYkG*K7|XyZ^{U_ogCLw}i@yLp#O
zP>Z+NB|1cAuIGz~A|Gz$T!EJc_BagQ8J^{M4C2^$d9M?!!5gGt%yd=^$iR>PIHY)`
z0q&t^?%@y$fe_T7AL!v5KmdG|^9tgj9u9yQkU$vV@!$ji-Zr`fH(K7@0m(L6Mn9S!
zK)N18M392`TTTdH+m=gJ`K1Ziff;y#9e9`QmZt4ir*Qz6Px>9^p&kA{TBL&-i#PhG
zKRTp;`Z)VTMn}4+`{SrfdZ?Xxso5c_P1>rTnjM;&9il;)F}FY|*N7aIFl~6OEf=j>
z;F%AT5l~?q*a3^@;TZBW81Iq=7Bm_{>#CDltDBmrtNN;qx~Toy9pYN44I8oT;ip|<
zvBi0ut%pPLDV@W(R=Ob#lu!yjfZ^adp66MP=^5f2ZVmQ852T=ti#Ost3ZO%(3Iw14
zisGQna2)c19%kVSqIRI|p$q7t2*N=cN}5?*B0nHxxbaA_dFvhMp&pW31(A%qn)|p{
zFpm`xUgLIv-?pX`EW35OOBq<Fzng(^U=f;udzV|elRE{`yZ)r{h`8UO9`Zv*Q?MP_
zo4$wJzN4GIotqxq`@S1ni?R5=JC7Y!feI2}hRymwA=ZTtw3!t)ho2b`5&;>;0l?!e
zKjylK&>9dL0UkSCxQRQyXB8f`_`H?-x$&s6J8!+28^0U7u^k&Vb!94K^H$!W3Oc}@
z<N2M%APhcx40e2;gPgfiTb}v34E8{ISG#_^K>)l!w>;sX34sdqA)OmRIbEO)sD=+}
zp$~?E4_u-v3ShoroXp9b9ay0Oxbtn_)|IV$rq>fa;e0(8c+TrwmUG|-*1#3YoX`8*
z&;Ojv=|F`Kv>4Mmb0ziAM{Jo3GY}e~5y%0?xq%O^{(!-;IT6GG&@)}rGrd-4{C02m
zDIkfv*a28bfvyf<%)o%NcihK|_nm`Wo?BgKVZF@MKn<qhWtCh*xFG;Y0ipDx5PYBz
zfIuqPAs>`;0LC1T;(!axfVTjkD>mKFxgiSFT!3p@fa~?84fxI9{JXz>JymTDoI%>l
z-Q52?9OyuG5S^K0cyi5p(IGX$3E8oqFbEX=1sMGm&Yj;ky;eD7LnE6gCYveRVHO<X
z45Xj~Bp?IAEcXT|)_1(ef&8<>z_Vc;)=^u?d2bJNJ!%A?6wY}l3ZW1LfeujcAG$#~
zqW~sAG9R!&2$rG9dPP6_eT%ul3X%j%-Mn7@Q`vzNtV?G(=w}(+zZ(Y>VHT9#=9iw|
zqk#^fAQ;^phL^dSmANpDSR8s`92{E|svw(;2%8&y>zSVHSr*_UJK$L}*Nfr_qaX-^
zU;}XcjR%PCS6z-JzQ=!@;$J<EV?E+DUdWjXdNpAhhR@?OBmjnj9%teZ2%!R)K_BKp
z9~hyLsUV$&A|Kem4uHW4v=9IeAd*6V9!1|CBB}JL#`IGk^;f?i^1&ObKuTvCZn4UM
z;e6<CKit7RynBEVpur_n|MY_&_=kV>g}?ZT-}sXs`IjG(@_`+a0qV`V-Q%5x@A9ng
zIuTys70ThU`6DL8uZY!JbUQX2Vg~*NYZv{`U;V9z{nNkw*FXKsS^m3t{_DSsaaXs}
zd0CUf=}`o^0K!8JFkr$MJXlbS!7&QMG-P-%%$P9?#fV|Z@Zv;?Wi*Z{Q$~%NHM{iv
zJBc!-%9Sizx_k+frM;8=^34=fNXSWh{gAa_@xqSHd-UeXI<;n5000H@`OEk3>C>oA
zt4jT6)hbr0Rk=lR07Hh17&gY<5PMcE+Ouh8*f?wBZQQvya9ly7CXe1ctf2ktXL6?C
ze>3&<BaFE4;lYX%BR1@~G2?%bCs%fSd9Y!9Q4*j~;ev&W(4$L-20dDI>d`Mszd+PW
z+_=qv0Y4@6B?aq1ffB{r8vYsa)XBpg7ax3FIMkWNn^%nr94mC`{!l`V`A?6O4ju#&
zj0iq_M2ik7HoS-)BYKS%C%Oj-64Ny7^y}NduaZ!YQP6F+Fmc5=jP>yW7<epIz)Xnk
zbkb$CL3YnpTFvBCI99+ALkw!UwIPQxq*c~jBFgpF4LS6{h$qju1Jz+%T}5GamMw-E
zjhfl`7><(F7+hvKI!46;DxjtsX-2MQT5B$(@IsS;Fv5;K_Dp44i{ap5f(lJGf{Zgx
zA*NMUSBd#0nbnyoVSj3#N!3YHE$F6n(_L2|8wLneLl^~xmr!`;Ep$+O6}3kwM($x$
z#-U|s(oS{wJqoFP{tEsh2oTGiln)C)Fmq`?zkC4d0SKmK5={;MMH#9l9R?MfXP)WJ
zg(rA;16yL1r53Iq)_Ox(wT_5}Blnc)Ds{%TdhD9XzMAS`t2PVmv(h@NYPHl}3#}PY
zpl~D$s38fiXh69TNF(v|QyXQ60k^6<$tV+!KY)R$EV8X$_vTc^+PfycbfUy&N&y4>
zZ=3k}6N?uU&`?932JN|LLk@wb(0LX;v|dIU6Y7yiiI#!ME$~E2a>*?zn9m_}l!{mo
zctkZR86VW}!BPMmkP@{h>1-;?aTfefI9_0=mWH{a<za|!`HDlYOP8faFQ>*SXU{6}
z{If~}TfKAssyTbzwa-{r`*o>wP{HkKtQiTKYORSj<Y*}%@`^kxeMzdLU{7Zq-<bN1
zZ%W7t?zgdqGuXGIbSiE#9aeY%#z1?D=jWdiNqjj)<)zo&MCp0{UKwj#v#!aen|_ne
zC$Ym48DV^|MgX!$QTj_*V;yjH7Gg*i(!VDCyVD%@`s-Y3e6slBe^age;cyz>eDr=#
z3N6^{vV1+ND9t2_6$$8OT4_bP&>HwgHc2;~)FYk}_3o>Lw{?0OJSV||&tLWFB$IN6
z<daAAKgvbKJmhgmp*W`=LtP|M8d(E9RHwfPVuue0h{7!XVTkq-W_>3)1`s}Q3joM2
z2r&N19@)5NwQf*>cVpR=T0oSvr_JSQIT*wb(7=cYX;5GVdz1Z4RlUD4j(GqR;to$`
zLhWG>iQ5xJ6c~U21n5RIFDMe+NYk1ji3Sjj*xT*AxVQJ+4}SCW;SlK;9SN2!8fzE`
z0L23a%t2&g1)O67Ikuk98R{crAj2B)r^Z0KgdGA1Kmq8$!6W|T9bou}2mwGlL9WgU
zCkTtKZnYMt`ATVV=~akupoSiNLWhzpr5d>b1qLjD1acE!l3Lj$KqR6ZrmUqc@#Bp(
zc;I1g)RO>zX}~~j?vBKq2z>T1pIhG2AG0Wc2n*namrP0&bdVhYNLWgCeBlOR$^PV8
zlvXPafwOoyP(&=65e{XlGg9`zLmR%}neZJ6i-6!wBgO%=b?VcW$LQbW0!TT6fWZSA
z$Us6Qb}`MV2Z8K4)EL&#30qjhKC|e9D4_9<ecp#11t_HK5~-3Ph%f*_0AvL15e+nS
zvn(3YmC?irQ>EeH2a8z6KS-(3oWdj@q<{hmrievXzMx$Az(+oHiqr@Kqkp_ah%fa7
zhV4wj3w`hgH|SvxZloa!Kq!bE2^FY3Zft>XD26{EQ=eRrq7P@-LpbVCi=48f6xfM^
z?3N+`y0)YM)+~T2^T7=?I3PnW#bgUHxk<RVWu|&~Mp7Tk5_h~q6eb`6{%D94z83(3
zZuo$#XC-I{Gq};Iz61jq$Z&?#nt=|v_(qie(GEPAAqtS2m;o;qs5^d6bjW-ZH*(R6
zGrWNlYoJ9=yAS{Y00DvmV}}<;n%pStV<pyPN-h2I1qPhp1TSTaV%d_|xGW6~o>0eU
z&+86$h~fbZAc1ODumTgnmkM8K#4F-))br|>KHk7X6ea*Uf^-12R!D7X-vAAGn1s3i
zNQWB;AcfO*B)1GD<{E5~4wGUB4{SJt6e3)PxaO3vONr!5xB(3Ss941Th$c#G7J%iN
zGFA+z0Cz7bmckD2E@)W8BG!-xPwaQU^}quico7aMDY+i_1?iCfpB$5ROko33ID;GB
z(2jc8;~wwmGJ{#d4L2D0v=}+aLNkJ%GVCH9C8L8Gtk3~4ydk4H<qUKI5RDT4BM5gD
zfB;JR4;bUDl=dLS6)F$|?jE*xzWR`1%c6)pLwU$wp0uSSr4J3)hZ}WBgBjjThKONq
zJ!B}uGUzc~lG%eD(C|ep$nXZp`r{s6u%;oo0TX{5L!>OOLZI(DXh~{Q3WYUV48r-s
zqm=~>o5=5_pAGG33-YY(l!8G4RWUt9cn#^mDWtfehcld(411u4exI9M5ym1W&V8gm
z`p{QXvO`uKV(GD!#%OUKuhP=)x4-{=AA8Ut3I;f+Ft`3g20Un*qHd5vLCD~XCs)&E
z0GNW4PZH*(c!AFWz;!9_Q41?*m%D`BkcP<emL8zt3RY+X8e9SJn9F?T^^t`ex=;l(
zG&~AbE=4;en1&71U=6T<#l@3q4?a})W_lf@C`8)gTc*JQ+dcVMjJ+W=EaDvz;y2B|
z4t9XwLonfRhZ^b8hsPmU+ACghuKy7WHvoWjwRA%is{T?&+W-ezcSJ0l9rnNr{@GzZ
z?HWL$hHrTNKJ9=8G`tOMAhaC-7lbSxtbmsu5ccF6sud2_@QJ~Te)Q5V1rRO(1~p7$
zA5$Vd$=J~Ls3$cKXsG=aa*TBi;lL5-aJ}z;fBs%k9AE=x=meXAPqHdZ-o-if1|O&p
z1sD*G^Et)=EzFMm>}y}0>di$s9=QETxnU{Rbc0S~5rhZe{p2_x#2^f@h-d&l{PeHC
z{r6c4TLYjAkhf(l?p{e&7XvvE36Wq4y^sz^cYh1WfDNcdZ6^R`Fn<yh4tl@`X|Q=2
zf&+sv2!~(@J>UoqD1swMf~Yfoj|2cRc1!PY3P`~_goOh>@B=?E2tAMoi&ug<sDmYF
z56{J7By|k7L=UxKa{*X#8rTDm@N+xJgiYv#_JD#*u?)48Y4%VHXRra4balGt2<ha6
zUkHXgC;*Q{2kMj$X+Q<DvUxbL2-To~d|?QOap-#YFmd`+fs}+HwbBEMun3=keR2qh
zfhdSE0S#^d1u$>}RB!`5U<fn?4ih+tktm5ws1Me#RZtKGQD7oHV1vBCb@4Nap(u(v
ys1NTD4ay)2X;6E3ks{!b4of(Su_%ipSSt4L4({-Vv#5)^$cw$`i{}*(002AU`ZLe~

diff --git a/images/sqrt.gif b/images/sqrt.gif
deleted file mode 100644
index 5bad811d7db4aae7ab61a37385409ba5a4b55ec1..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1014
zcmchW?`vCC7{|Z&<}~Njg?cwmW>%C%$6VJc0kO%g$~b}UM>UfTDn^^(wHBS&7u#@T
z&`_?iTSZqvWN%6-8+1Pq8P(=Z6?R;8*RE2?-KMTo$7BH=nwtwn4P|h=>HY_wH|NE9
zp7VXaAHHXI&n`3ddK_{5i$-ah77m9grOnOFEiEmrt*tg}rBn@$0wjXASQnfKz<}py
z%(opyAO#ABuuRS1U<@TQ<9eQ<l#ucqovYd&B}xbxy7?+hfH#J--8AXhToEQkjvUjG
zyniu5GhKEyLKG8Hzd@0-!3mfsQQ_JtgK#0(U_4t(RTWo~$)9ti?tv-@i84udVwi(4
zh-@0k;rpY=#h^R5n1z+Z5O!KjH-I|rd)d|rfe@JJMa_ibkPN>Tv<?qph4NIT)0^e0
zJxfWjMXvXPp6Umufy3#Or4G*xD?><O5$sr79qF<&Tx}--rx~CP2m_)-h#MS%T&bLJ
ztK3jNK@E7c4v_%km?&enhlm0T;NBxB0;Dl8P%rrjS8+o!A*|o>JGMo6m@$FVb(jfo
zerpn=Mf6PG(~HI1gfs<RrI7DJ85q#vj|JK+@1~*#XM$n-B-WmOs2(aNKsoX*q6V};
zYZX`qHUkxCvp_onMzDZ@Q3Qh=eho@NN*w$L{_jwmF_58tEl35CQp5{~e9Ke@myGQn
zqD9N524Rr^>gRcwQE&zpFECghjE2d0X3c*9Dez0u@Q1M$AfpmnnRQg4UWmbRkcpw@
zG!pqg))Nh_#$NzihQ=P=GkyB<`%UZHa`V$?z8zhsiHSX*ocZoU)^hNd`7iD`AMsRw
z?iecFV_SO0M?QG!?A)mb`@R}k+BfwAd3w{r#4Crl>7me;@Rg+%d#7$TJ=S(;|JyGf
zPab~2_>8A<e&5(nH&(Cp|2VZ_>#nU=PhGg8ZG7#>yMJ8&vv(9HKYnD{f9JrL&-^gA
z*nH%z<>UHa=N1=^p1-Pfq{jz09c{aL`OTT<H@4nBd*_?=D+^EdMa6sHlg!2LyPw8i
n3gtW1-mY<Paem<MODFEGKE8Kp=8a1yz8<PvzqKx;<JtcJ#l$R(

diff --git a/images/std.gif b/images/std.gif
deleted file mode 100644
index 61e29c4ad7cfc08d5387d5087da8491b854af16a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 151
zcmZ?wbhEHbRA5kGc+3C-jEs!T%*-q-EUc`oY;0^C92}gSoLpR7yu7?ZLP7@^4*W*~
zIv^b&y$mcq6Ha=r-fQvt?C$>w8ht65^D?q@b6)RypuJz-cB5*{!{q9pN)Nikqf<&u
rA5VOd$T!vVg|W&Uv85s@U!LcLN#4@_a(nX119vx9oy}XU#9$2op`kg&

diff --git a/images/subtract.gif b/images/subtract.gif
deleted file mode 100644
index ece53af6fa0268426160ec0b381484f94e5d1f26..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 978
zcmcJO-D@0G7{;I7IlFTbCvpg9?Sxpe-bk`3tYG$|bgd^~n-ncrS?wZA8H-CO2C0_1
zLY6SumR6<pGvG~H(hG}1ln|y!KzdplryqK;Q;P;lODe%$$j%P5*i8|Rv-ZY+!1way
z{rWvT@7S)L`P>^xB=G>9h(scqrV&D7u~<AFr<8Veb#-@l8-~%-)6?7AYnrAFTPa0>
zExH-lt|E{EHO8!bg>mK#a?h_Y#e}41^`)lm>qH45wbkE|*TDNhMQKFO`ZiaDN>L{7
zN09|MK#%H$t#;EOiV7LMp(AU96Gs=GonwRxLCeYUR<5bIl2rco5^4L;6@)~CjAk&+
zK`2CN6xr#pFWiH&iKvUE609s32c?+8d5i96&<)VGesQl|AS7|3c&C*wD4vj%-kO{r
z_M35~gp`fheHlN-Rewo0vUY_NM#CG5JesS3vvAyE%^CK+cn4Nk1l#|`!05N`^o<nh
z6G8y%z%q~mghLiRG#QkDY&+{|Z<(_c6=IE`1t>rQPqiV^pd51-Fy_NhpkelJpzeaC
zAVjf^APvPKLsY9&-R>v2qG8dzhWsNHar?A$yzUXgcAZAF?yiwkOo*4AM&M-><5JLK
zv=N+aDxzd~w-c{=euC*3(l)hA^|Y^ZC7fv0LxLeffQP_MU?3z2X`INihZMt90ILb6
zV8}Ar6tp&!(xnuozZ${aj2hs@f*!O&pAc3%7+Co|Q+nO9bw;QWin35eJ^@ZaleKn8
zm@NE9g8QRrP2-WP*3Em%t_sDgnXp_ipkSiqcAWY@#Foy?&M$y|j|hEod~yEbk=W+J
z@|DFimySOc5p(08omu(F9N72sl|6UdkNcjlFAToBzVi9gnX~t)J+tG(etqeMtJh~P
zeDKUfG@qQek6<Kyd!g&l%$P<eAAfGB`1S5%bncbe17FyekG+`4u3kI**6vRt-<_Xa
zJ@@C{ulB$5-if|@2VVU&_f6{LwvpkLqt9+0H0=j(u8=40{&DK{s#u#nyyLnt)UYr8
eIQ+}b*S2oFUHR$Dfr(##*f=<G^^_LHwtoO{$tySj

diff --git a/images/tan.gif b/images/tan.gif
deleted file mode 100644
index bba5910b4c51ec47b7b3bd8f96ec6c0f9c71d662..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 959
zcmbVLYinFp6kRiOI`?3Q_Mj&^VkK7#=27kKm${QDG-v!^TUsowsh!d&H)JN^(j<hL
zm{bzub(Ew?eL=+vL$D={wxl-b1&IVsO3RoJE#yi<p*}E>J`m?2s2#O<ykq}?`{Ukg
zZPr@sy<^vNE%8nrI_{uW4jiQxWDkPfiD&Jx`lCdoMLruX8wK?OWEbvaqOEozG5|V=
z<JoA%?+-$DVzhs&wRM2?^i#XX?S|I?)&>DgYozEvBM<_i{7+~pfh$m%JfWE-hbuy*
z=pe^~FaZwGx3qk#Q;ZNrg$yohFdc9LDqOAIbK;C}A?UpRSvg)*TuCag&5&vdnu3s6
zB8eErI0%JECSZ=;qoN}k3Knoal(b+na0)t-gk@635uvEwJ#kP=6EFe#%k)CIm{uB-
zg8mS4x4;Y6`eMmli+M%PR8qTUI#o_IDnv*7QZOC^=b&`<dv{C83o9n1un4x_Gvf!E
z&<kUwW}<|z4_^X}Ko}4%As)bhs;e7bNf1;31^A%Hst^WfZ7cFRc9apR1qR&JkaIw4
zQM%Q}iV;_wOG+25)u!zYbv#U|Kq`fn_>YsA$kl+mCP^>{oq!0;x}k~(#UKUTAS&-b
z5iGi!n!I=&<5W;IO#(T9JX8(k3~1b+(k2ge3;_cC1$+n8``Upu1m=KdU<9!Kvk^fe
zwKr5Af=WYX@W(K2Wl(QJ3?iRKSyw78<<Tpn_TN@^BO3&@r$+kUuzJ9BM7jg8fnFdD
z=r4{o5X%7bFRbgSd>_gwC>zp1`!FgtR4-&N)@;mmqPY(ui%|pCFjxlk$b8-i`FJ_x
z2me3n$r`8j3!qa7&`mpMKe_nkx^;~mi?gRLO{@=y{LT+fU4A=S|MHc^eXE6cA8yOd
zG-lQ=zZcklE@FMV`O=5Yhwl?--#@eGqxF5)=MU{)c>Y*h?CCupXVc!v4ZHtYo_k<=
zX*_Yny!z*FH+E0O&OY(zxs^{>W)5E%dg=Pg<I7*9E}Fdcz|RLSbbj;TYmdBgbKmK0
kzuy1*p|5UTJ@)$E@c8Zcz^a^hb;IXhM+dsb0|7kqFJovWPyhe`

diff --git a/images/tanh.gif b/images/tanh.gif
deleted file mode 100644
index 60753ac6abb4f1a6eb81dc3356a6d7aad547e6ee..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 972
zcmb7D-)kII6#jPhCUe0B@55c((!#E&Nla`PmF%Wk+PfCI4Mtj<p!+bCiJM6}xCwQ$
zHf^(A$5m()TL>tkjUj<hXi3Uer;$EQ#j+OkAp;>HijqjszU&WQ?6wVDC+%P0c{-f$
zJ3qd|ImzxrUGe@1BDjZbvtUVi%Nc?)n0U=BgijM;2yHY}i<V_CoHXubLwn6~;3Q}c
zJ)@zz7tg`zM}A~RKVh&vj1E!TEV+XQYh~g9y^Mv5wXBjILJ0rVpXl;~?*tX8J~dXg
zxFpmO8u?5?4B!C$Sk?Agl>m`cILfk$m<3L3wa>NUjBp`nK60WKuShN(>a1o+qY70*
zI9MWyP82u@g-9h3D|mTcA7~-JQpPoZN=J-=Q_%4g^cZCv0gA@f+S}?mf}jAMzx+w9
zQjluU5%hPz-2t~5Oh7EzbDeI5GnrCXV^)I`l?tKtEh+I%a1KgKQ+7wyZI(<pLMPb&
zSc$jI#Co|<X(v*6^Wi(74QK|0O^6FH&jT9vdep6epbkjD<2~Jgh=Ohx#?(5JRRkoc
z2W+V*S|9{M<Qi@DXk2pcP&%P+B=@%lc{BCY4h*0x{>L=V7atSRJx!D%W(X94leWL^
zLNYi4IR*6#kOUL<uB00eGEN1>^V2{PAWt>?#VAznp)~0t$lxQu1K>9x{Alh?_-+F2
zz&M~k5)(k;*(3hiDM$lO7XOan?<~S5L=KvPT0~02aZWGg&4>EvlPm{n&W(?Js1Jcf
z5E%5q1TugDMBd7`5<Lp$9a(y}HjLT;qzNYqK8CspnSqnRriq*VXdi|cMLr6B3@i)!
zuF#@BkMEqq|F;*mQ?`Er^g4WW*WuF44<9u(wP|-svp=4H!Y8H<PtM-{G!#Dm%bhnj
z$`^MWD9*QKH*a6?9lIL1HEA6DyrXw9H*swC=Zo=Y_T;{LIeo<U+y^gh-S3_2xzsUv
zweRfm-J^3oXIh$mJ$l!@v~TR}{42RDZ!9RKk#i4jWKyMfu2g%*S28Q#hSwIqdAK;N
zmHO9K-aoUtaQypp`mbRA3uEJYOLt2-mHp@SpZLy$hxXln&-nVw7l#Hz46ptNHgYOX

diff --git a/images/templator b/images/templator
deleted file mode 100755
index a8c9f2e9b..000000000
--- a/images/templator
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh -f
-unset PYTHONHOME
-opt=$1
-bindir=`dirname $0`
-if (test "--help" = "$1") then
-   echo 'Usage: templator [template_name]'; exit 0
-fi
-set path="${bindir}:$path" #in case of respawns, get our python
-exec ${bindir}/python -O -c "import vcs, gui_support;vcs.templateeditorgui.create(template_name='${opt}');gui_support.root().mainloop()"
-
diff --git a/images/tg2_20.gif b/images/tg2_20.gif
deleted file mode 100644
index c2390d374da6e88429f164dd27b3dadeef2aa47c..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 825
zcmZ?wbhEHb6k!ly_~yd^1d>uxs;a71R#v=DQS$y7(!m9~&K|}d{?XCVu^B08v4QC+
zaYY%Cd3kw>Ma9KAiS;E(6=enX6?5!s7uvV2bm&;^(7(}P^5%rzg(;Jk=g-<$HG5U*
zoK02hc2})E*jba&*;G2YC2wM9<%+45-5rfndK;%r>X<xv(t^2@mMxpJV{z-sl`EHQ
z-m!80>aCkrZr!%6f75|UJ5H`Wb#v>P8;4eRo!mb2%HCPW_O3jAV9oJkJFgyEa_#Ky
zdzbb+I<xZO`Atu+?mThg#F-0cPh7ro<?_ipckbMJ{P5|`gRdW+ee?Y0`*)8%ynFor
z|9_A_2PIJa$->CU;KQH;avdm67})<cV0EbvCSdS!)YZ|^(+6S$0}xAt&w|0xz{Cj#
zoSmJVG#EJ<3=9l~g+)cgL`6l#B_t%|444=g4D@A%fr3yVA+HY-)A9CnYjF04109f{
zmZzJsl!2$G5D2JxYB4e}=xTZx$+Ggh@PM7F=%N7>)6?{@W#Le_vQSr3R#nz_R|o3V
zQTK4S(YJ7ScXx1bw{>w>2ddLicW`jAa)5|AI5?<*#1tJ|OeD4J9Rzu&H}LZED%vQ6
zMePk`*|@B^!L|sRDS*Tj>?~Q>mCdx3loS*cw9P?c8oZ{aDoQFqLK_53d4cLR7#L9i
d$Or>wut7)!CxefH1}bRKaAd#?YM(|1YXDs9$anw%

diff --git a/images/tg_20.gif b/images/tg_20.gif
deleted file mode 100644
index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 743
zcmZ?wbhEHblwgox_-e!e0<xOw8XBf%W(?M$QUSU44h|lko)M9e85tS1wYB__jneUb
z(uw`j8B?VSdt{2{%2X|pZCn-Ey)1V2u7p|J=k}GYoL;kfPV3rb)3+_{+Pr$s_Kk~o
zUwLqT+r+E8rybb2>i&^AH;*p6d1l+)OM4z3oA>C<%4b)1p1*PT_O%m-Za#f}`^e*a
z7hXR+`{wzL4{slQ{P66{ukT=I5k)BeWMO1vFk;XFsRsFpf$g8eOdqw$CsT|_q#BSk
z1pA0^wYn}0&=8S0>as8(f`MVC&$5oANgE?{#JXXW2txx<kdeEm?eN0_2Zae9DoPV=
zlDHTg)PfI4AOwXbx+H)Ez4s&-Nbs;V9|Tj(hZ5GaFepq`jawhy{;29;-1_UUzgF#y
z0}Cp~=uORX^4J*z34;hdkV?g<b*Gjv@Dv0=1T7TAm>4FU6j*z$OW;IKkoGx{pUswt
z{R4^et-ac{<z`RVYSB&?tBFCYuQGBloJeWEnz>bJZrJLpS*1~H!&YB@#Q+p+yZmy=
zq8`8HmtU1w&GlP-`6WoO`SQx<SpqK0g$2NtHM}rkU`W|?@MOqL9VQ83hy!}uK!Qe%
zCsmYKbi^lm6z`1D6F+lY1thp&L5NnRp1A+YFF$MS=AU04BE-O8B<Y|az`?@Q2&aJN
i7)e45gOCnT&P*RRWMMEHDR$5!7dhtGK$4LH4AuZ(u?;!^

diff --git a/images/tg_menu.gif b/images/tg_menu.gif
deleted file mode 100644
index 7e08f4ad83538d6e03d87e4929a60061395b8ca8..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1774
zcmd6ki96c~0ziL>NTQA)q7p@n&9_)OraA~FLQn=3B4`~`SF6*J(b&31ElEgH1c{&~
zB}z3BN2qhuP6Uz6P^MJTQfI4QcU5V-`&o3m|HZ!l;JtS)(BBQ8-~ciKQ~>bl(<cA`
z)M|C}g8`{j+SMuR?UwfT^$7%g8jYqJmiA6AOwLTHXXnruOiWBndAGtVTjW(N^}5+e
z<j9EST|~iy+>Rj}4!5zf(c0QNF)?vN)ScTtg2&^jR4ScL-`&~x$E(GqjkT5*QTuO;
z$^84-ndv2*LJJEE9*-x!!+*0j#UIdEg{CIZGv;PBSCZqoj2kPDAL{#DbPfou=5r=g
zgH>gPmE4@I#gX}`p{*x_m&zK~rn~?4*F<{i1#?PzTx#+YW%K2@Firn$ZeHs8r1W=1
zn<FV;ps7f}qRkE0DP^}L!W?&Z_s_onS2%pG5B%>106Q)K7)(}CF`L7^RZ?2UE3c^J
z3#x?GHMI=T&!Ri^^|u@EHZ|XCxi1lbq;2gTo${_O8CVMf(T4VG!(e>_aGev~W8<m`
zbt@RAtM_PnT3`PW+yJ3jSX_G61{yXrGBQS*7{5fBzCH;vlQBOJMKjRm)#wlVObqa$
z=$N}YZZ7FGCPp3VrE#LJlDxfjkZJOB0V{VoWd`<2PQV84N2nVMd8MNnuWO;#^X!#%
zd=P*kM1w2Wv{clubhq@(5WYbr)tQ)Z@35+&EK6c4xsuUj^K%jJivvTk_!e+pTLZxg
zE6ILwh?_^*1w^=!4Tcr{d!J*W@VM@Miega650)^MK5avlKaziC?qVF@dyjhzmqNTk
z5~3^EHSFis{%8Bbx4vBo5IZKU7r>4F?8ElAX0Vt|(f=BGzq>}7%_j)JWb2I6UQ2+B
zS!8g()tmX=3#56Cs|&mx8!O8eC9beH?Ram4kKM9PAU>9w-`xQXvbPy|gc|q$3Q`9~
zJ|9u<F?cOsVKtr6A4_O_wS1E_g+0s!Zi9$f&wo%(jasGO5W;|KM{VnmlHR_C#nO%(
zw;Pi7BFvruf056CVW%GgB)xhyK>GSzoY2JY^%T<_Ue;UXeZr{_h6({ZUCFsl%c!Pc
zy9MW@u>Z_yD+{K#j<Ziu_8*!Mle}Lxj}&~l%-Jk34AXgWed}t)3n~iKqKUMEb1BsM
zt=a~GNijQxhdJwNo^}XXEWg7?N3KA>JJ>e+ZUIyQF2e_2M$n3m>?DOUNxJCT$2Pu%
zRtzB%+0HX$roOA~B`pKIxPl3CUjUvGu(3}!;$u;Rw)wGm_H*8_1xk>18jp(z|7Ll7
z`aJnwDkV;L7>eqofFm99bOJGz5k9>iFGdA@D7KCtQ3fI^T}B2jh3s7tUXyM1+1D~V
z48TdjQ#gCHtk1x_7Me%zrDYL7C=|#=E*>?2+KNXNp1a~vJ*TSNr9ReS54Eww-Y=<Q
z`8cdKEncP?Djay{Io68(Lq8c2q1Z!qR@s?Errh~<oRloGca~|!w_0)<G=*KNq@J&E
zRqctc`*<6X`CGRalg<q>J5lD_w(tJf^m%x4n&GnC)Bhk$I~q7N)f?4^B7aC9@m!V5
zr+AL<<-vvE^v&;=Z>|<5V98_8S~Gvq7ysg$pZ6|&sd^IS_vWBgIGGJ1yj?rlxtB?c
z8}<+VUXpO?751M8rTZJE&X&tfZtRnGkK5ht)lejICkn7sP%H*_HmkDeB&O%y^Vr^c
z8^46>j#>yOhZ!v$!tSn?p<2s&)BrDPzw8;f!`cg{FX>W%kDKQ*WV6mVLCrDcaVm#=
zC{@g<f8}4L2T7GS6gmYe`4b_dAryXyRhK`tcWzxxiE<AAjoz(&=2H&vlhZzbt!Laz
zHO7rOR9hWHA_*a^4(d_KY-ip2H%d-uzC1|?^;oE@_0;<bZ)|nk=MSL&4S-L<8HU9~
zNa&#$?w=k{b63gp$-Y>Y8^ZIvD`#H<#S-&iB!e|0%a<ym2|kos4L7XOH?llPPxRaV
z+SkV4x8&u8FMO+P%eiC8)?srKIe(XQ&Z2Ft{e#yH67FI++d({*B++zYo0QAB&i>LP
zF3gmV>Oil-1H|6ObTS0zkf7twX*RB{7}caFIn|VorJRY@1cqP%ct|P(S-r|4yL;<M
zd{dAGa_;$hCBH+D;&4TSf<P3YfysNPZ)kKb9_-a9f{R@1ce#o5b<9X`%b6N9mwE~S
W&6KvBz0U<ePWyEWlb5jo;PGFcihuq9

diff --git a/images/tiltedpin_red.gif b/images/tiltedpin_red.gif
deleted file mode 100644
index db91095014e706c3169ad1c981ab32a3b039b239..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1067
zcmZ?wbhEHblw^=(cs`XO$;ocFtl~Zyl^t?=I~5fUORFA`Q$8xIep*iIvXtrxS(5`Q
zs>hVHk1OgOQPJA3u5naV=b)O&F%89&s=7B76wj)up4X7NsH%BJUFWi@${9_KiyCU{
ztW1yUo1N4*xT>vwQ&aP%meyWVhg;g3n{BOkSlOL5usv&Naa%|Cq>06OLyHr}jwemc
z=DPczF*QA8X??-aai@dLWiy>~=4Q)1eP;T{oU*k&Y2$s<+<JC!$O${|oo>DdTs)sx
zn4fWSd1_%XGbV0pK-i3!^!bT-OA<5Zq-L*)Pq^h7dCfO`T~gNKjQoc_$#;T6wkH<O
z&&yk#mG#s=<9=AggOIfKd1cq);~H9;Yg^kJTAN>lr=7?sIF*y%-QC?gdE&yp-t$#;
zr>i=r&X_i3){LpMW?X1)oi%^XrLL~c^JZ<FzvyP)#LWv9EnBv1-@N7f7A-ioeDSVz
zt8Oe_vT^&?O*^)oUbkw?&TX4^?cTa;=eAwjckS7JYWtRBhYlS)dgT6*Q+H1sJ9GKU
zm8(}@T{!vr%Jp0KZa;o>>-(*%KW|-pb^qayTQ|PkdHUntttZc)Jb(WD`=k4xo;-g2
z{Q0X_uf9Ef`sVH1?=PQ!e*5nIhY!!cfB!!UC<_6_f1=Jssfi`2DGKG8B^e4K8L0~H
zp1uJLia%KxxfuR4=r8~QD6=qdyl42&DYGMe!NFz@{zo@AxBDMzXTQGVpmXyv`3`~e
zYa)f4nLEuTgl8ERvvG@Yn>#7(Ieg5AUDl{TBV$p~0wvdSr4k1NM$SoWaamj;Q<fiX
zW@Zixn8=jabD_Rcic!mG0Ry*YfXtMi;V&ixc=>Y(1Z+$>*`yP^tfunSk%k6gcb}RE
zjLt5TmOHbsTxeiC+##vNWBXVA2~!uZ?V=kBCO_h5$!a(mbSf?XxL?oG=*I`f1^x?;
z#WqZ-xY))w!=aOnTgfTqqP=z5iHJIp%m;HUl62&InA^ManhPJr{IuQBH&6En>#vWC
Q)qdK$pIfZX!@*z;07|c>rT_o{

diff --git a/images/toggle_menu.gif b/images/toggle_menu.gif
deleted file mode 100644
index 3a2e664df08f4a931d7a7e049acb822391220b18..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1096
zcmV-O1h@M~Nk%w1VJra@0OJ4v|Ns9mFfafB0ORB1$H&L2s;d9osB&_0IXO91RaK6T
zjvXBx|NsC0|NsC0|NsC0A^sIZa%Ew3Wn>_CX>@2HRA^-&M@dak04x9i001lj6aWAO
z{t(DXtGzhu&Ab0#D2`-lo@lDBZ0o*oEYEap-*~PEyfFE?pm0bmq5={iK&crFMudWN
zS}JS?1S247l@OktFAGZ<bSH%D<t$-S#2l&mLI%NCNQ07@Ie<f42nSLnd=>=>913$U
z34;m<PYG@rlPri71p*2a0|JQ`K%j;amQ4r>1qYyqtQ(5}rYi!pdr7s1t|PJ}q?x)G
zye$O?6AHi^!Y7y$1q;OzkjP6($&?idoDvAv%pA%wzHizRJOSbsvIGbT+!6-}f{6p{
zf}B9a?SBCI2@75D1Z{byY1=#>^xAdF7l~iHc>A1zA!ErAf&~ZO#ak82N<w%3WfJ()
z7SEqYb|PINS^x%BB2$)pDELH;z$%j+0eo~L63vsH4mAqg!tNh|X9O&*Q@E|@H4M0D
zRa;Waf*%S<sCkgZQm2n~?|dB4wjj~h1#Qqa6Y%6HfCCw0WweAK%L25R7GxQq*DXK1
zYC%pw@JRufiWxl-4VzQ1PRA1x>;)nq%duD_RXXBC4--r)l&fiKi`J53zKo8RJ#$(w
zCb*?Cc6D3PN>{~8EKmD@u2h9Qlzl|Swvg2%Qw5dul*vsB6K~uEN#ST|?8*VzJWb&S
zX^IbK%FHC#l+%<C<mU@$mm;{?%&C}^cykC=L8_NsVov?eirb|1#8Cc103><=*jysg
zsd8L1<w;>2Xl?A~L2y-}LC!5wZB<-kh%6*rWM;k993jm-h|v^TDCLMM5)mN46zahP
zV^BHy)Jb|Z;>b^WF2cA^JClSKlzf^2z*1eF<p-fw{Bbx*k5fcM!7)%uA)b+g$T44o
z-snf82j<wK!2yhrU<zLo=%qkh@6}}xHOyd_iCRWtw-6`hMM4ZTN{|_6BH*zBn*?aq
zR?V8x;MgZ4V&ansWE_?F)&Y3ox6P8FIg_THTD`>xWB?$#Xp(I#LZ_ww(dn3@AY|2O
zg@dIai%5uovOp>eoK?U~A<9KeTy1>vt4^pW;Fu$ls3kyL0q0U0+pvL_abkcCPIap@
zI5tbC1lJOl1*^{NTGlUHEE^Q1s{F$wgw9ODPEOiZ;7lrzJP|J@^w@XoadY`m(Q%_h
z*eU>e{IEfpB;2t@0gQM@D8oIS5e32vH>}>nKdEu?!)#1Us2U2lLE;u|fpLbzaF{`f
z%2;I4MI9<zP{+k}c-)2;R^%)h!(L1@av5>}{qh`EqQSFH>cL?%KOcjDi@`+@^Uh0T
zJ*9;SQokkP)dF)}LvJ=v3K6jv0qeC2VmEX)23xLeLR3P!E%)4X*KPORc;~J6-hB7%
O_uqgAF1Qd80029py5K4R

diff --git a/images/trashcan_empty.gif b/images/trashcan_empty.gif
deleted file mode 100644
index cbd89f784853939528324baf11f1fd12c9080723..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1500
zcmV<21ta=LNk%v~VIlw`0QUd@000020s;gC1PTfY3=9ks5)v#dEHE%IGBPqWG&D6e
zH8wUjH#avpI5;^uIXXHzJ3Bi(JUl%;JwHD`KtMo2K|w-7LPJACL_|bIMMXwNMn^|S
zNJvOYNl8jdN=!^lO-)TsPEJoxPf$=$QBhG+Qc_b>Q&dz`RaI42S65hASXo(FTU%RP
zTwGpWUSMEgVPRooV`F4wWMyS#W@ct*XJ=?=XlZF_Yinz4Y;13DZ*XvMadB~Sa&mKX
zb98icc6N4mcXxPrczJnwdU|?$dwYC*e0_a=etv#`e}900fPsO5f`WpBgM);GgocKO
zhlhuVh=_@aiHeGfi;IhljEs$qjgF3vkB^U#kdTp)k&=><larH_l$4c~m6n#4mzS5A
zn3$THnwy)OoSdAUot>VZo}i$hqN1XsqobvzrKYB)sHmu^si~{0tF5iAuCA`HudlGM
zu(7eRva+(Xv$M3cw6(Rhwzjsnx3{>sxVgExy1Kf%ySu!+yuQA^zrVl0z`(-7!o$PE
z#KgqK#l^_T$ji&i%*@Qq&CSox&(P4&($dn@)YR40)!5kB+uPgS-QC{a-rwKf;o;%q
z<KyJy<mKh%>FMd~>+9|9?e6aG@$vEU^78cb^!E1l_xJbs`1twx`TF|$`}_O;{{H{}
z{{}z+V6*`7kpTY?0KY&0WC92AO+fzy1iwH)(6lU<kvA0(6bL{F04dY}0PX+){{R5L
z7x)B%F8~J=00;;G(6m99kv<g=1PDMt0R#X5`m+F==l~xD06;(h<OBhE001fg06+i$
zdmI2@?*I`001UrD6i);2b^!ln0Kako_ICiMasVG`06=sAOl1J@ApiemzrT2id};e&
za|#f200?QMdv1YXX8;f&01RdU0BHaKYybdd001CXd?y58F8~l^00?0%OmqP7VF3Rx
z0KY*11XBR<L;(L^0Kazt<Zl3ZasVo806;GQd}9D$WB?Fh00?xTOfLoSPyqj70Ka1Z
z0000000000EC2ui03rY)000R7009UbNU)&6g9lA1T*#0i!G;hQ3XCX`U?fis22!jT
zabO)lhz>+DaKMuQLwp`WoahjM88>+dEsCV#OaTQ301y~61<DpLEICU2f+kKMLyahD
zYG6PF86|ZFv@yaY3z;rg0JL<t<|Cp<mpEy9GzUx>B?6fkLBeIsmMvO^DvUc5(<e-k
z6!~f6ron*|OPpjmvxQBXxN{Mk0stq`!b^`30ji@$%#|-<z)*>kM-Gk)ii0k8W2es`
zM+!S-x+LilqeF%M_;KRG1PIVu9ilLuXi%RzZqkh5LZwL&A2?*NFmXWfV&?z=p+Moo
zH*n#^jr-6s0>J?c(4qcU+GHtDi4!G6eDJUVgM|qZ9GpMVA$&ssmG0yz6{-`aNsJI;
zM;c*RK|~NZyx>56_lft!6IP6(M-u=Nasz=@j4%TZ6EL8FeGXOf1QvUY5=9b8M8d-i
zFVuko2p(_<0fV_oR7n+Jlo$mRYN&`q5-yey0}k)eDA!y!o>AfzSo8=44?wPv!jKkN
zU}R$VRU$<jONQZO5K*=;Bn28=`COJx&`}~4XE<pDkSV}0<OCj=DcWL{EYU}aP_WVC
z4@>}Q0u4h-5Q3fJ9pOibOw7^a4ky+@LJmVha43j24gpAsQ1zI@6+nWp!;lYL+F+JI
z1X5xVcl3CJ7Q;Y#@WYS}m|7!}Jq&`!C_?1XV+~yZ$$=0<GGObGc<LaC8>9Td#~G2x
z00xj69Pz>p3ml7Oh%p#4hA2GrQAQ*$j6p;l8Z6O*4GDxG0j6_7Vu%ockRrn%R7B!}
z89~&M0Tv?=SplN=^`QwUrI_-D4PgL)LK`&Hk-`Nq)bPRy6u>)|AP9@1i68(lvc?Hj
zd_e^fJtSCxb7w*n0{{VZVaR`s3}Ocw1tJ#!05NpLa!Ux^dFn<)7yVq&NGCl|Kma@7
C4Q-bI

diff --git a/images/trashcan_empty_20.gif b/images/trashcan_empty_20.gif
deleted file mode 100644
index ecb0c3ed762ed2fea827a0bdadcc58c8d63511aa..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 444
zcmZ?wbhEHb6k!ly_#(gn1k%#d8X6kr=H^~rUJ(%y85tQB6&3C6?bD}EU$$)7_U+q`
zA3uKm`t|3}pa1>)_y7O@AU#9_#h)yUj0^${Iv^b&I~iF2Ehvy^Ym{+gWH?$V)0QaF
z^wFmOu!>t^gGZymna?)olWqEgL^z8Djur^8EVMb#EamU4;qzIA%}B&^lEB%|68^<D
zXD^#@cL=aA(s21~bL_H+_hSp*%{tyqB2Axd7JRhnXw>kYq;MjThw+n5KahJ#gR`ii
zEl{Mj0c1G;Bo*Gn1|CTgu0Src)cIx=&t{dbi2`hb3g<sdfVhiwI4e|I9XU<_4gajr
zc}c~)2+9Q-?y<<^<Rlr-L;)tS;hc+1PEBgyNRogW-lXBXNy8&Zq6KC+=VSxUCJ8o0
zsNv@(Tgk9>D6}TBfK9h>-(=$6q{6N!(GPZd|78WXA_=#NCg(rf^nb7clIK4+fJvM4
I!b}X-0OwkthX4Qo

diff --git a/images/trashcan_full.gif b/images/trashcan_full.gif
deleted file mode 100644
index 39acb09dc13cb93f6d1907820d2cbc999eb5a29a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 993
zcmZ?wbh9u|RA5kG_{PA%!o;F7k7Z4p&y#Z|E>1E>jvZODY>A2*%bRCczI^-g<k*rk
zt3tMPg*<uoL`O~J%d<NoLOecRJ~>%AbE<s4ynAxx+LbNaw&<9dOqn$0$)hJVQ6^8$
zt=Tfk=gXrrGEy=nWhH+;J>lVEX=!S?b85|>cU%7a`;(HCGH2eLEt6u_tXrd^r1Iz6
zmov*sbo6xI+}aZo6!PWMmp%LT*jU-z+12yr(vqI8o;{OdT%26aoICU8-J373?udwq
zloXZ3M8%ZcO1X3I&YW3uROCcz>S}oSc>cUQa%WQw3mecoA!kmV+4F8oPLRo(RcpRH
zyYl4Jnw~tDH?Q94Xz7Rui5!`i;^XI&lb7@5-J6ngF?Vj=*|TfUm3?!jOq=rL(3F}u
z6B#*~nyQ*Bm#!>Xv}DVsEk_O=xw5L{&800%>U>(-T2j(dLc&69>}-1adR*LGV&Y={
ze0syf#q;IW6BRX;KfnGk5Dj!dDnW6<!2Yj+p{cp0wXMBXwX3^}p{IYs#J&j(%5wcp
z6DLhkP_<Hab7K(q^lWm@o7Fo*SyfwBu}hmV%#I;YJ9p;fo;|7zBC4SrdJO)&q87&)
zqJ8!DTd8Wz=$>k&?IX*@ASoK}VVK0gV{9z##xJ4}yZ0&sm#nOwF+;earY4&=gBOzy
zi;0zu#>Zu`iygVrJQxEKI%jJHD)1;cF&&zr8)^1pK|!Xdie_>ELqwBf;2ahiF`fd0
zHdisDk34p}0^7VrnA0BF{0vk)KAFc&?S{strw#h?-RqVFGBGrF$a@?~@Z5N0;bi6H
z2P_NOUILw?ui~*Ioq<O{H>xKigE93i-@1SO>r6ZX9(A^wn15Ke^W!6)sm6j)d#0Xz
z;nk?GTBM+D@U=zE*hythBh%94tsdcLA~Y0TI#^ZM&Rt<hdgj)Da_U2|u+2N&cy(A+
z0t6Q=J;rOYf<69S7o#`lb+LjIX3kZ<{K5r6!hWmRyyr-rop?el@ODYwRZqh$X}WXR
za|Mr`KOoo>dvWUZ{ackSKRdb<-kG9X-IIN6yQJ=X)h+6^(R)7K{gRv3R(7biAU^ZB
zs``EX8CS!ZO6Sjg$XdGe;j)BE)jtX?`A#d;6<ae^e%!BVt=t&desJan`KI(046S)8
zYF}BC<QX=y2<22b#_$?F*yhH)QfK)Ie#rv?O#D#{9*xfYaTn6sc$O~|Y!s9za-Lq0
zByyoyz;ssOu|j<phfXamg~Bu9JxdFiGUEaaS%VDEu^f;RRY-J{NXSrZRLIU?TCUaL
JB*ejB4FC<#n>+vj

diff --git a/images/trashcan_full_20.gif b/images/trashcan_full_20.gif
deleted file mode 100644
index 97ed55ba861960dda5ff3962222823ca8c076fac..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 825
zcmZ?wbhEHb6k!ly_?F8csUxl;BciIJs;RD_t)s1_Yp8FeZ>p<mVq{`tZDW^a>||r?
zWoO{;Zs_T5ALVD48mgZhX;&JiTbpR>?Cj$1?&;y?;p^+?AK)Jn=ocIi6dM~G7ncBp
zY4M>6@d*iu@j00>$;rt%S*bZWIr)W!1^ER<g+*1>Rdtn(^^Nrnb+rrX{o5NWrZuHa
z>&#fz9I<(x@AjG58>g4H-z`5dCHDEH>gML=-rk-`lO|7}JY&YR8FS{$nK^gmlKC?i
z&Rw!?S>MtnOO~%%zGC@`HEY*w-MV%A&h0yPZrinM*WK-H*SAl+ePr>?3+taA=zn}*
z!r>!_PaHdO_QavnXU?3vaPHjQ6Xz~mynF5R<tvx3-?)DB`pw&SZeM+U<=%t)_a8of
zaP|0$>qlQdyZ`Ium7gzf{{MdO(c?!?AH8_~^7)%LufKkJ_y70vFJHfW|M~s@zi)s4
z{be8vDE?#t>(c>|pg3V*|JT5*q9m=(EvlxjrlO*zA}y^10=e0_DcW}F;i>9DX$iSt
zpp=^%A(s)RsL5r*z@cH`5tN&it&|(3otEREksA^%=9MfNXc3W{EuE{%6Q5|T5@co1
zZD6FIl;ED5EuZTms_dsD<E*4&XeQw4o#~#C3$i0iuR%!MBR3<?hu73s&@mTiV0Kon
z6{D1ehohq{yRaljb}m?9Zcuo-i;9Y}tg<vKXD(P>cCLRwxKp^jg{6`b6HpAI&dtqV
z%U?yqQd}A#=HU@w5ul=FAq7&G4N(UY^H(#U-e4gCRtd5>H`f&;s%~xx5d*2r&UJP3
z2Z>ogOoXU&as`U1!o(mZMn<_>c&M0Lipp}ZKwXn=lN)a7p`vA}q%2|%wK*%r-r6O=
g$=Y0;!#p>)0qX4BENi!nl&BzUC!ptYvl|(#0V~TfDgXcg

diff --git a/images/trashcan_menu.gif b/images/trashcan_menu.gif
deleted file mode 100644
index 1b8b1af82d7a3475df0699284426825fad13f4d5..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1757
zcmbV}jX%=~1HgZq&4yk^n4NjeQbr;|RPt`#=4H&wFf+o!me;&Qvf_?oHZLdP8VVJ4
zwjod6&Cr$dGB4$#aJVZWU7F{ff8qK51>ev2>*(xYZb8-uDFTB4K%>zp6bgw%V$c~j
zR;L{uo#W%;_xJa|OTK^k`sLl?yR6I%Zgs`_`r514uin2~85td2UR_@M@OEl?Dk6li
zz4_Pt<OnS)C^I$te8lOS^T{ZEq=~V~)wHVx1wVCkbZl*J&Cbp4OLlnmh0o`O6Kk)X
z9Gs^3#rt30_;|XH347z{blmw<D^mmitiEn7ODroXudU+N=Mec7F{OEzzOIS|?Y!m(
z&9rFhVs}x^HILxn(0dK{p4@R;9#5kZ4CndG+5Wrx-<BI28;6F578e&wuSX>M8o0T+
zFH8)HUp(?~H0oxP-go~@@jqY3<pKaeAP{uWir#pyimLmj2AB-;xx&K2;Lu<_10yR-
zD>XGW3kwSdgVERDCma+uG&ZnUB^VtHzny<A`&v#;jtAbOt*uQwCC2JwZ{NOcYin0t
zTK;db{}26tg@FIP0AP|V00fqTK&7Rm0U#d?1{Hx|C2US<S$V}TmA9+zRM*u0dY4;Q
z-*B%KE(4d9J8<yOVJU>X0$k93zw>F=f1Y*s^ga_DQB+b!s-S*QMXRZ6AmExZeN)ro
znc2C1EjfAZqiS#+%rV{LC-l^hG|hkfwE21KDIBYBaMBP!oHBxhkJfnFUN1=AR?rI)
zwy0iLg~l(vXw7j1fOF}4fP&qvf<O-Q7TV^9Lb3K|W(`en-^tn4o{289_%r-OS;Zx<
zUX<wAsj6OI&(jRZFbtx^I_}@g%`4G`>s~~mFe2`8>-NOF$6hMM*r!b5z;94}MK7XD
z-dMt!B_duDWkyOxHEy&JG!flJ#+aEBU7tpxb5FL?bzIV6JCKXvUoCB&njttV5#3P4
zO7oZ4p~~?P1lmN<3Wq79!H+NXPz$`mv;!4K(7hO0B-@i0yK20O+n9yr@70TQ<&bYU
z`HB}ZfiuVsSo-W>26ZH`)${rI9rg5G&`Iju{rcGN-Hav67{4~XIo>E1-^?>rr^?Y6
zh5XJ2yEL!jC-y!{>Pg-OZ5+DS>Y!@)vfE>|cs7@#nzCZbAGq=(Cs`ADs4y8&l--Fy
z$4_7OWD$=hafbDpQ$FQNg_<LBU}S6U!DxYI#IS$V#q9AQ4dV!Hd9*|1MA%riWCF@)
zCyYZWLBeoUahF<qZoaK6wKk_!>+6K8uV)lD%<q?>@LZ?NNKRf)t9Y^!Ln2SG{62l4
z#0PZj9F6m2Z@qO5cxVb?cuHI3<;A?bUtV>=MprBDny)swBFEiprOMg{G+7-sj+!mE
zO%GT(R$XAkM24q;tYKtLt?q&j0q-siwq9RCVw_F#rR(|6<T?eet}GN=ui}~lwF`fX
zVPhXqX+cAvZ<9Co=%yT}GF+WO`P=VjkNd{?mnv5t>K)o>AIsG{@^B#ud-=h$U~Mfo
z_E?~d{GYvPP0jRAir$3f*ree|jr5PXun@qS3G;>kSZuGm4K+X{q1V=trZ-~3)Bnkb
zs15exI{uD0;9%j!L0eiHyYa4~TZwi-SOr{MY^U#tj&lTR)P9D^NutO^SH-RQraj^7
zK>dR5_&Ga2!t6XZNvKr{88yV)6Zu?1kxX+w8LF|K3!<ik4mO$dcpHRJvc27l&91E_
zC1Fqfw2Rfa34~HA8q>umxXe*~i!LfJ%+EYj+AXCIv?XJfq+|S$m<d+{e&O%iQ4jpH
zh2<pc!)dgi86mZd_P8I_YhT5fojdCm+m~U{mY+#5pQ5Cah9e_Y*0YgPZ23WP%ZHU2
zr?&QB<;!ah;sK6-Wo&gS5sIBtnz+~yn<yZ6?QKbPZhpgF3Cys3b>-#qtRZr`WaRyu
z_eT4BywXRB89VvemR;QB$>@%OT^8wU2k`4*ab)HWMMF|*rZ+WE7fga9=#75X(#mdu
ztE#Mw#w(Q%snk%J&;gYDcIQHBH(|hg*44oVSuI$%H3Er7UA0+7TOtJ`le~O%f^GFW
z<m_%V`Jmbs3v~wOV&?iB@%@Qimxb^`f>BsU&F8+V(VyVDsWKi`%14fk-_XUbi{uhh
z`gQeQV#0Ksk$yxw$;XD9ram$r=>qM(tUnF}@6OALm^Ns&dc`Voo+ixMDK=gDpq9C<
z3848D9GWc^bxY|4*k6bX*km;za8j=wvjEtQKcr-v<PS~arE5bs+4o#AXKsMO(EhB0
wX1idOX}q*fd<~oBQ+G0Xcm$u${DG!o3sppb;Vqy8&3QP(vf<GPX)s{<Ux#W)hyVZp

diff --git a/images/unlock.gif b/images/unlock.gif
deleted file mode 100644
index da0b2f547f6573fe644ea0fbe233fd7e37bc89c0..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1048
zcmV+z1n2ulNk%w1VHN-s0O#}o6dETQA}|^wG%i0{F+*ZBLt8XMVKqZwG(=%EMPW2W
zVl_u)H%DYQM{71pXgf`9JWp~rQFS{|a5+(OA6|PsP<294W;#=PI8=N;QglF4cRo~n
zKU8`_SAIfQd`ntuMO%bIVvJE@axZR?Qe}r#W`jj*lvHVZMRb{1ZI&&7rXh)-TW*ve
zj-n-trC4&I9*wbEbEQdys9|}eOM|N=mabiWs$hPnWPPS0owQwoswSVdGn%z6pSVtt
zuP>mvVv4XdpS>@lyD*}<XNa&cqP#Doyf34@Fr>ULrM)nuy=;iKD6PaYslqs`#xSqT
zTb;XclDjjo$9I#vY?;0{vdL<jyl|MjH?+<&yVP`^y+O9rNwm|0n#DZ6)orT7LcZ2V
zyw^v&-G8ISik`ze!rNx9)l9tAh@;3_x!7H~&~38KjHAg+!PknV%1pxBV!6;U&F)~k
z(u%0ZcCgb{z}I!L)N8icW4hghuE>3{%!{kXlBmg%sLYb7%!aVcZNAb~$livs&~v-j
zlCR8vxYBpM)sVE$f4tL-w$YNb&t}Nno3zuGxX_@q(R;?(i@w~1#NMa1(1gd@P2T;O
zz}A1v-j2rFm%`Yjzte%w;H1CUcGTy8(dS*?`HsusebnHl!P%$5*`~tUkImkU&)=-U
z)rHgHqsZ8)#NLw8;EdGbsmI!k)#0bf+LzJasLI@u)9Iql;F;6mk=Ez8$J(sS<*Cr&
zxXRwM&EBBa<E7N&oZ051*5t9!;H1~&y3pRI+2yC%=(E-2uG!|g)a10-=BwT4so&|X
z-|DvA=fu|MxZUZ$+v&36?7-aWx8dr$-|NHO>B!sX#NO({;_J`d=)&ad#^UbI;OoTZ
z?$Y7x)8g*U<?+tt@5Sox(dF>T>G93y@z3Y((&zBd==9O-@!aY0*X;Aw?(^8~^xp3D
z=I`?3@b%#H^ycvN>hkvL_4n@g`0V)l|NsC0|NsC0|NsC0|NsC0|NsC0|NsC0|NsC0
z|NsC0|NsC0A^8LW3IP8AEC2ui02Tlg000Q}0RIUbNN}JUHHGx(u~M)h!AG-n4TA+s
zi$jOOm<_r^N01;<el8MJ6U`6|fp+YK3Mo>c9ARrzptQ58PfM3DW&TT9(_xDioM<Ie
zhRDZ=oBvki$N`F1ICNiR&_L0$#ttS-R(+c%4^<v2CR+AL0tU=6cj~<PI;0~)OCe3(
zym6MU+O0o8Fc@g51WTAO+TxKbrijP|1uc<OsZ$5qI%=-Q*w}EOr4cD-&Rlz!E!UhI
z6%e#6;v`KPW9OEQn)1Q}gVCi=r(WIqwMZ91xNv#K&RQ)pGdK*m$wQ2oX6?A)>Jp=Y
S0Hgy?i1<(dy7htq0RTG?6>7Ht

diff --git a/images/vcs2py.py b/images/vcs2py.py
deleted file mode 100755
index bb06fc09f..000000000
--- a/images/vcs2py.py
+++ /dev/null
@@ -1,367 +0,0 @@
-#!/usr/bin/env python
-
-""" This script converts old vcs scripts to python scripts
-This is version 0. graphic method and template won't be converted
-Therefore old script still needed around in order to load all graphic methods needed
-
-Not implemented yet, to do:
-xname, xunits, etc....
-Transform, Logical mask
-"""
-version='0.2'
-import sys
-import vcs
-import cdms
-
-## initialize some stats
-warnings=0
-vcscmd=0
-arrays=0
-arrays_from_file=0
-plots=0
-active_plots=0
-unimplemented=0
-vcs_objects=0
-## Continents overlaying (default none)
-overlay_continents=''
-
-## Determine input script
-src=sys.argv[1]
-
-## Generate output name
-outnm=src[:-4]+'.py'
-
-## Generate output name for graphic methods, templates, etc...
-outnm_vcs=src[:-4]+'_vcs.scr'
-f2=open(outnm_vcs,'w')
-
-# open input script file
-fi=open(src,'r')
-
-## Opens output script file
-f=open(outnm,'w')
-f.write('#/usr/bin/env python\nimport vcs\nimport cdms\nimport MV\nx=vcs.init()\n\n')
-f.write('"""Python script autogenerated using vcs2py version '+version+'\n')
-f.write('Input VCS script: '+src+'\n"""\n')
-f.write('## First load all the necessary template and graphic methods from the old script\nx.scriptrun(\''+outnm_vcs+'\')\n')
-f.write("## Individual python code for individual vcs object can be generated by loading the object and saving it to a file\n## e.g: t=x.getboxfill('default')\n## x.scriptobject(t,'myfile.py')\n\n")
-
-## Opens file for graphic methods rewriting
-
-## Ok now let's loop through all lines and figure out commands
-ln=fi.readlines()
-n=len(ln)
-
-def extract(instring,beg,end=','):
-    """ Extract part of a string between 2 characters def, returns None if not existing
-    Usage: val = extract(instring,beg,end=',')
-    """
-    try:
-        sp=instring.split(beg)[1]
-        sp=sp.split(end)[0]
-        if sp[-1]==instring[-1]:
-            sp=sp[:-1]
-    except:
-        sp=None
-    return sp
-for i in range(n):
-    l=ln[i]
-    #print l
-    iprint=0
-    if l[:4]=='Page':
-        vcscmd+=1
-        val=l[5:].split(')')[0]
-        f.write('x.'+val+'()\n\n')
-    elif l.split('_')[0] in ['L','Tt','To','Tl','Tf','Tm','Th','C','P',
-                                    'Gi','Gfb','Gfi','Gfo','Go','GSp','Gv','GXY','GXy','GYx']:
-        # First reconstruct the full name
-        nbracket=l.count('(')
-        vcs_objects+=1
-        j=1
-        f2.write(ln[i])
-        nbracket-=l.count(')')
-        while nbracket>0:
-            f2.write(ln[i+j])
-            nbracket+=ln[i+j].count('(')
-            nbracket-=ln[i+j].count(')')
-            j+=1
-            
-    elif l[:5]=='Sleep':
-        vcscmd+=1
-        val=l[6:].split(')')[0]
-        f.write('import time\ntime.sleep('+val+')\n\n')
-    elif l[:4]=='Over':
-        vcscmd+=1
-        overlay_continents=',continents='
-        n=l[19:].split(')')[0]
-        overlay_continents+=n
-    elif l[:3].lower()=='cgm':
-        vcscmd+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        cgmfnm=sp[0]
-        if len(sp)>1:
-            app=sp[1][0]
-        else:
-            app="'a'"
-        f.write("x.cgm('"+cgmfnm+"',"+app+")\n\n")
-    elif l[:3].lower()=='run':
-        vcscmd+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        scrfnm=sp[0]
-        f.write("## Warning the following will only load the templates/graphic methods\n")
-        f.write("## To excute commands convert script to file and uncoment the following line\n")
-        warnings+=1
-        print 'Warning: Run script, will not execute any command, you need to convert it first and uncoment the line in the python script'
-        pyfnm=scrfnm.replace('.scr','.py')
-        f.write("## execfile('"+pyfnm+"')\n")
-        f.write("x.scriptrun('"+scrfnm+"')\n\n")
-    elif l[:6].lower()=='raster':
-        vcscmd+=1
-        args=l[7:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        cgmfnm=sp[0]
-        if len(sp)>1:
-            app=sp[1][0]
-        else:
-            app="'a'"
-        f.write("x.raster('"+cgmfnm+"',"+app+")\n\n")
-    elif l[:3].lower() in['drs','hdf']:
-        vcscmd+=1
-        warnings+=1
-        args=l[4:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        ncfnm=sp[0]
-        ncfnm=ncfnm.replace('.dic','.nc')
-        ncfnm=ncfnm.replace('.hdf','.nc')
-        if len(sp)>2:
-            app=sp[2][0]
-            if app=='r':app="'w'"
-            if app=='a':app="'r+'"
-        else:
-            app="'w'"
-        array=sp[1]
-        print 'WARNING: Output file converted from '+l[:3]+' to NetCDF'
-        f.write("f=cdms.open('"+ncfnm+"',"+app+")\n")
-        f.write("f.write("+array+","+app+")\n")
-        f.write('f.close()\n\n')
-    elif l[:6].lower()=='netcdf':
-        vcscmd+=1
-        args=l[7:].split(')')[0] # get the arguments
-        sp=args.split(',')
-        ncfnm=sp[0]
-        if len(sp)>2:
-            app=sp[2][0]
-            if app=='r':app="'w'"
-            if app=='a':app="'r+'"
-        else:
-            app="'w'"
-        array=sp[1]
-        f.write("f=cdms.open('"+ncfnm+"',"+app+")\n")
-        f.write("f.write("+array+","+app+")\n")
-        f.write('f.close()\n\n')
-    elif l[:5].lower()=='clear':
-        vcscmd+=1
-        f.write('x.clear()\n\n')
-    elif l[:5].lower()=='color':
-        vcscmd+=1
-        cmap=l[6:].split(')')[0]
-        f.write("x.setcolormap('"+cmap+"')\n\n")
-    elif l[:6].lower()=='canvas':
-        vcscmd+=1
-        if l[7:-1]=='open':
-            f.write('x.open()\n\n')
-        elif l[7:-1]=='close':
-            f.write('x.close()\n\n')
-    elif l[:2]=='A_':
-        arrays+=1
-        # Acquiring Array data
-        # First reconstruct the full name
-        j=1
-        while l[-2]!=')' and l[-1]!=')':
-            l=l[:-1]+ln[i+j]
-            j+=1
-        l=l.replace('\n','')
-        nm=extract(l,'A_','(')
-        pnm=nm.replace('.','_') # . are not acceptable in python names
-        if pnm!=nm:
-            # Now replace in every over possible lines !
-            for j in range(i,n):
-                ln[j]=ln[j].replace(nm,pnm)
-        fnm=extract(l,'File=')
-        src=extract(l,'Source=')
-        vr=extract(l,'Name=')
-        tit=extract(l,'Title=')
-        units=extract(l,'Units=')
-        xnm=extract(l,'XName=')
-        xfirst=extract(l,'xfirst=')
-        xlast=extract(l,'xlast=')
-        ynm=extract(l,'YName=')
-        yfirst=extract(l,'yfirst=')
-        ylast=extract(l,'ylast=')
-        znm=extract(l,'ZName=')
-        zfirst=extract(l,'zfirst=')
-        zlast=extract(l,'zlast=')
-        tnm=extract(l,'TName=')
-        tfirst=extract(l,'tfirst=')
-        tlast=extract(l,'tlast=')
-        func=extract(l,'Function="','"')
-        cmd=''
-        
-        if not fnm is None:
-            arrays_from_file+=1
-            cmd+='f = cdms.open('+fnm+')\n'
-            cmd+=pnm+' = f('+vr
-            if fnm[-5:-1]=='.dic':
-                if not tnm is None: tnm=tnm[:-1]+'_'+vr[1:]
-                if not znm is None: znm=znm[:-1]+'_'+vr[1:]
-                if not ynm is None: ynm=ynm[:-1]+'_'+vr[1:]
-                if not xnm is None: xnm=xnm[:-1]+'_'+vr[1:]
-
-        elif not func is None:
-            # First of all treats the special commands (mean and sqrt)
-            # Mean ?
-##             if func[:-1]!=')':
-##                 func=func+')'
-            imean=func.find('mean(')
-            while imean!=-1 :
-                tmp=func[imean:]
-                tmp=tmp.replace('mean(','cdutil.averager(',1)
-                tmp=tmp.split(',')
-                tmp2=tmp[1]
-                fpar=tmp2.find('\'')
-                lpar=tmp2[fpar+1].find('\'')
-                tmp3=tmp2[fpar+1:lpar].lower()
-                if tmp3=='time':
-                    tmp3="axis='t')"
-                elif tmp3=='longitude':
-                    tmp3="axis='x')"
-                elif tmp3=='latitude':
-                    tmp3="axis='y')"
-                elif tmp3=='level':
-                    tmp3="axis='z')"
-                else:
-                    tmp3="axis='("+tmp2[fpar+1:lpar-1]+")'"+tmp2[lpar:]
-                tmp[1]=tmp3
-                tmp=','.join(tmp)
-                func=func[:imean]+tmp
-                imean=func.find('mean(')
-            isqrt=func.find('sqrt(')
-            while isqrt!=-1:
-                warnings+=1
-                print 'WARNING FOR ARRAY:'+pnm+'\nsqrt FUNCTION FOUND YOU NEED TO REPLACE AXIS NAME WITH CORRECT VALUE !'
-                tmp=func[isqrt:]
-                tmp=tmp.replace('sqrt(','MV.xxxx(',1)
-                tmp=tmp.split(',')
-                if len(tmp)>1:
-                    tmp2=tmp[1]
-                    fpar=tmp2.find('\'')
-                    lpar=tmp2[fpar+1].find('\'')
-                    tmp3="axis='("+tmp2[fpar+1:lpar-1].lower()+")'"
-                    tmp[1]=tmp3
-                else:
-                    tmp[0]+=')'
-                tmp=','.join(tmp)
-                func=func[:isqrt]+tmp
-                isqrt=func.find('sqrt(')
-            func=func.replace('MV.xxxx','MV.sqrt')
-            cmd+=pnm+' = '+func+'\n'+pnm+' = '+pnm+'('
-        else:
-            raise 'Error array'+nm+' is coming neither from file nor function !'
-        # Now does the dimensions needed
-        order=''
-        if not tnm is None:
-            order+='('+tnm[1:-1]+')'
-            if not tfirst is None:
-                tcmd=tnm[1:-1]+'=('+tfirst+','+tlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+tcmd
-                else:
-                    cmd+=tcmd
-        if not znm is None:
-            order+='('+znm[1:-1]+')'
-            if not zfirst is None:
-                zcmd=znm[1:-1]+'=('+zfirst+','+zlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+zcmd
-                else:
-                    cmd+=zcmd
-        if not ynm is None:
-            order+='('+ynm[1:-1]+')'
-            if not yfirst is None:
-                ycmd=ynm[1:-1]+'=('+yfirst+','+ylast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+ycmd
-                else:
-                    cmd+=ycmd
-        if not xnm is None:
-            order+='('+xnm[1:-1]+')'
-            if not xfirst is None:
-                xcmd=xnm[1:-1]+'=('+xfirst+','+xlast+')'
-                if cmd[-1]!='(':
-                    cmd+=','+xcmd
-                else:
-                    cmd+=xcmd
-        if order!='':
-            cmd+=",order='..."+order+"'"
-        cmd+=')\n'
-        if not fnm is None:
-            cmd+='f.close()\n'
-        if not src is None:
-            cmd+=pnm+'.source = '+src+'\n'
-        if not tit is None:
-            cmd+=pnm+'.title = '+tit+'\n'
-        if not units is None:
-            cmd+=pnm+'.units = '+units+'\n'
-        
-        # Now does the attributes that are overwrittable
-        for att in ['source','name','units','crdate','crtime',
-                    'comment#1','comment#2','comment#3','comment#4']:
-            val=extract(l,att+'="','"')
-            Att=att.replace('#','')
-            if not val is None:
-                cmd+=pnm+'.'+Att+' = "'+val+'"\n'
-        cmd+='\n'
-        cmd=cmd.replace('"',"'")
-        cmd=cmd.replace('(,',"(")
-        f.write(cmd)
-    elif l[:2]=='D_':
-        plots+=1
-        # Plotting data
-        # First reconstruct the full string
-        j=1
-        while l[-2]!=')' and l[-1]!=')':
-            l=l[:-1]+ln[i+j]
-            j+=1
-        l=l.replace('\n','')
-        off=extract(l,'off=',',')
-        if int(off)==0: # Ok it's not off, let's draw it
-            cmd=''
-            active_plots+=1
-        else:
-            cmd='## Next line commented, display was "off"\n## '
-        type=extract(l,'type='    ,',')
-        if type is None: type = 'boxfill'
-        tmpl=extract(l,'template=',',')
-        if tmpl is None: tmpl='default'
-        mthd=extract(l,'graph='   ,',')
-        if mthd is None: mthd='default'
-        a   =extract(l,'a='       ,',')
-        b   =extract(l,'b='       ,',')
-        cmd+='x.plot('+a+', '
-        if not b is None:
-            cmd+=b+' ,'
-        cmd+="'"+tmpl+"', '"+type+"', '"+mthd+"'"+overlay_continents+")\n\n"
-        f.write(cmd)
-f.close()
-print 'Successfully converted:',src
-print 'Processed:'
-print 'VCS Commands:',vcscmd
-
-print 'Arrays:',arrays,':',arrays_from_file,'from file and',arrays-arrays_from_file,'computed'
-print 'Plots:',plots
-print 'Active plots:',active_plots
-print 'Warnings:',warnings
-print 'VCS OBJECTS (templates, graphic methods, etc..):',vcs_objects
-
diff --git a/images/vcs_icon.xbm b/images/vcs_icon.xbm
deleted file mode 100644
index 912510e77..000000000
--- a/images/vcs_icon.xbm
+++ /dev/null
@@ -1,566 +0,0 @@
-#define splash_width 492
-#define splash_height 136
-static char splash_bits[] = {
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf4,0xff,0x3f,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0xff,0x0f,0xfc,0xff,0xbf,
- 0xee,0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0xfd,0xfb,0xff,0x52,0xff,
- 0x7f,0xeb,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x57,0xb8,0xaf,0x00,
- 0xef,0xff,0xd7,0x7f,0xdf,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6f,
- 0x08,0xfd,0x7f,0x6b,0xfd,0xda,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x16,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48,
- 0xf9,0x12,0x80,0xaf,0xdb,0xff,0xb7,0xff,0xee,0x17,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0xe9,0xab,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2a,0xfc,0x3f,
- 0x00,0xf0,0x4f,0x02,0x00,0x00,0xfc,0xd6,0xfe,0xad,0xbf,0x1d,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x5f,0xbd,0x07,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x3a,
- 0x74,0x15,0x00,0xa0,0x1f,0x01,0x00,0x00,0xf8,0xff,0x6b,0xff,0xea,0x0b,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x7f,0xeb,
- 0xd6,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0xd7,0xa9,0x80,0x8f,0xbe,0x2a,0x04,0x00,0x00,0x78,0x6d,0xff,0xd5,0x7f,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x00,0xf4,
- 0xd7,0xb6,0xb5,0x5a,0x2f,0xad,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf8,0xfe,0x01,0x7a,0xf8,0xff,0x5f,0x00,0x00,0xdc,0xfb,0xad,
- 0x7f,0xdb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,
- 0xf2,0xdf,0x6a,0x5b,0xdb,0xed,0xd5,0xb7,0x8b,0xfb,0xff,0x2d,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0xd0,0xff,
- 0xbf,0x00,0x00,0x14,0x00,0xec,0x7f,0x01,0x3e,0xa0,0xd5,0xbf,0x01,0x00,0x68,
- 0x5f,0xff,0xed,0x57,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x07,0x00,0x00,0x00,
- 0x20,0x40,0x01,0xa8,0xbd,0xed,0xad,0xb6,0xda,0xda,0xfa,0x6e,0xad,0xf6,0xff,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x60,
- 0xff,0xff,0xff,0xff,0xfe,0xff,0xff,0x57,0x5b,0xf8,0x7b,0x70,0x00,0xf5,0x01,
- 0x00,0xac,0xf7,0x6b,0x77,0x01,0x00,0x00,0x00,0x00,0x00,0x70,0xab,0xf4,0x13,
- 0x20,0x20,0x91,0xbe,0x00,0xfc,0xd6,0xb6,0xf6,0x5b,0x6f,0xb7,0xae,0xb5,0x6a,
- 0x55,0x95,0xfe,0xff,0xff,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0xf0,0xff,0xbf,0xfb,0xff,0xff,0xff,0xff,0x23,0x3f,0xf0,0xde,0x1d,0x00,
- 0xdc,0x00,0x00,0xd8,0xdd,0xfe,0xd3,0x00,0x00,0x00,0x00,0x00,0x00,0xdf,0x95,
- 0x52,0xed,0x60,0xf0,0x7f,0x55,0x02,0x54,0xbb,0xad,0x2d,0xed,0xb5,0xad,0x75,
- 0x5b,0x57,0x5b,0xd5,0xea,0x5f,0x8a,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0xe0,0xbf,0xff,0xef,0xff,0xff,0xf7,0xbe,0xff,0xf7,0xbf,0x7f,
- 0x5d,0x00,0xf5,0x0f,0x00,0x7e,0xff,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x56,0xa8,0xaa,0x92,0x22,0x5c,0xaa,0x90,0x02,0xea,0xd6,0xf6,0xf6,0xaa,0x5a,
- 0xf5,0xaa,0xd6,0xb9,0x6d,0x5b,0x35,0x69,0x6b,0xa5,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0xe0,0x7f,0xfb,0xf6,0xff,0xb6,0xef,0xdf,0xf7,0xfe,0xde,
- 0xff,0xf6,0x02,0xe4,0x5f,0x08,0x00,0xee,0x2d,0x05,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x80,0x5f,0x83,0x95,0x02,0x94,0x52,0x55,0x2a,0x01,0x5c,0x7b,0x5b,0x5b,
- 0xbf,0xed,0x56,0xb7,0x6b,0xae,0xaa,0xaa,0xd6,0x56,0xad,0xaa,0x05,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xff,0xfd,0xfe,0xff,
- 0xdb,0x7f,0xff,0x8a,0x07,0xd0,0xff,0x00,0x00,0xbc,0x3f,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xe0,0xa5,0xc0,0x52,0x49,0xa9,0x2a,0x11,0x85,0x04,0xe8,0xad,
- 0xad,0xed,0xd2,0x56,0xbb,0x5a,0xb5,0xab,0xd5,0xd6,0xaa,0xaa,0xaa,0xaa,0x2a,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfc,0xff,0xff,0xdf,0xbb,0xff,0xff,
- 0xff,0xee,0x7f,0xf7,0x0b,0xa0,0x00,0x80,0x0f,0x00,0x00,0xf6,0x05,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xbc,0x2a,0x68,0xab,0xaa,0x94,0x8a,0xaa,0x28,0x09,
- 0xb4,0xd6,0xf6,0x56,0x6f,0xfb,0xd5,0xad,0xad,0xd4,0x6e,0x5b,0x55,0xdb,0xaa,
- 0x56,0x95,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfd,0xbb,0xb7,0xff,0xff,
- 0xbb,0xb7,0xdb,0x7f,0xff,0xfd,0x4d,0x00,0x00,0x05,0x44,0x00,0x00,0xbc,0x0b,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6d,0x95,0x54,0x49,0x2a,0x55,0x52,0x44,
- 0xa5,0x02,0x58,0x7b,0x5b,0xbb,0xb5,0xaa,0xae,0xf6,0xd6,0x6e,0xb5,0x6a,0x6b,
- 0x55,0x7a,0x59,0x15,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xc0,0xff,0xff,0xfb,
- 0xfb,0xef,0xff,0xfe,0xff,0xfb,0xed,0x5f,0x00,0x00,0xf0,0x3f,0x00,0x00,0x00,
- 0xf8,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x97,0x2a,0xa0,0xaa,0x52,0x49,
- 0xa9,0x2a,0x12,0x04,0xe8,0xad,0xad,0x6d,0xdd,0x57,0xb5,0x95,0x6a,0x55,0x55,
- 0x55,0xad,0x2a,0x20,0xa6,0x0a,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xd0,0xff,
- 0x57,0x41,0x7d,0xff,0xfe,0xdf,0x77,0xdf,0xbf,0x3b,0x00,0x00,0xe0,0x2f,0x10,
- 0x00,0x00,0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xaa,0x2a,0x80,0x40,
- 0x4a,0x25,0x45,0x52,0xa9,0x10,0xb5,0xd6,0xf6,0xb6,0x6b,0xed,0xdb,0x76,0xb7,
- 0xb6,0xad,0xad,0xaa,0x2a,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x5a,0x00,0x00,0xe0,0xfd,0xef,0xff,0xfe,0xff,0xfb,0x3f,0x00,0x00,0xfc,
- 0x7d,0x39,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x87,0x25,
- 0x00,0xaa,0x2a,0x95,0x2a,0x25,0x05,0x44,0xdd,0x7b,0x5b,0xdb,0xb6,0x36,0xad,
- 0xda,0x5a,0xdb,0xb6,0xd6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x80,0x95,0x00,0x00,0xc0,0xb7,0xfd,0xf6,0xdf,0x76,0xef,0xb6,0x00,
- 0x00,0x78,0xb7,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0e,0x00,
- 0x80,0x1a,0xe0,0x4a,0xa9,0x54,0x92,0x94,0x08,0x58,0x6b,0xad,0x6d,0x6d,0x5b,
- 0xdb,0x76,0x57,0xab,0x55,0x55,0x55,0x00,0x00,0x80,0x55,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x40,0x2b,0x00,0x00,0xe0,0xff,0xff,0xff,0xfe,0xff,0xff,
- 0x7f,0x01,0x00,0xd8,0xff,0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x1e,0x00,0x20,0x2b,0xa0,0x2a,0xa5,0x52,0x55,0x4a,0x05,0xd4,0xb6,0xed,0xd6,
- 0xb6,0xed,0x6d,0xdb,0x6a,0xb5,0xaa,0xda,0xda,0x00,0x00,0x80,0xae,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x08,0x00,0x00,0x00,0x40,0xff,0xb7,0xdf,0xf7,
- 0xdf,0x7d,0xb7,0x0b,0x00,0x7e,0xdb,0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x02,0x00,0x30,0x00,0xe0,0x54,0x95,0x4a,0x22,0x21,0x12,0xb4,0xdd,
- 0x56,0x7b,0xdb,0xb6,0xaa,0x55,0x5d,0x5b,0xbb,0x6b,0xab,0x00,0x00,0x00,0xbb,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x90,0xdd,0xfe,
- 0xfb,0x7f,0xfb,0xef,0xfd,0x77,0xd1,0xff,0xff,0x7d,0x01,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xa0,0x09,0x00,0x30,0xc2,0x6b,0xa5,0x54,0x29,0x55,0x55,0x09,
- 0x6c,0x6b,0xfb,0xaa,0x6d,0xd5,0xde,0xb6,0xab,0xad,0x55,0x55,0xad,0x02,0x18,
- 0x00,0x60,0x01,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc8,
- 0xff,0xff,0xfe,0xee,0x6f,0xff,0xbf,0xff,0xf0,0xd7,0x6b,0xef,0x05,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf8,0x20,0x00,0x7f,0x55,0x95,0x2a,0x25,0x95,0x24,
- 0x8a,0x00,0xda,0xdd,0x96,0xdd,0xb6,0x5e,0x6b,0x6d,0x75,0xd5,0xda,0x5a,0xb5,
- 0xfa,0x03,0x00,0xc0,0x03,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x80,0xf7,0xdb,0xdf,0xff,0xfe,0x75,0xef,0xbd,0xf0,0xfd,0xfe,0x7d,0x03,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xa8,0xc1,0xab,0x52,0x55,0x52,0x95,
- 0xaa,0x52,0x15,0x4a,0x6c,0x6b,0x7b,0x6b,0xdb,0xb5,0xd5,0xb6,0xad,0x6d,0x6d,
- 0xab,0x55,0xad,0x86,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x80,0xbf,0xff,0x7b,0xf7,0xfb,0xff,0xfb,0xf7,0x7e,0xdf,0xb7,
- 0xdf,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5c,0x5c,0x64,0x95,0x2a,0xa5,
- 0x4a,0xa9,0x44,0x8a,0x08,0x28,0xb7,0xbd,0xd5,0xb6,0x6d,0xdb,0x6e,0xab,0x56,
- 0xb5,0x56,0xb5,0xd6,0xaa,0x8a,0x03,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0xe0,0xfd,0xfe,0xff,0xbf,0x6f,0xbf,0xdf,0xde,0xff,
- 0x7b,0x4f,0x3d,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xf1,0xaa,
- 0xaa,0xaa,0x2a,0x55,0x32,0x55,0x25,0xa4,0xda,0xd6,0x6e,0xdb,0xb6,0x6d,0xb5,
- 0xdd,0xda,0x56,0x6b,0x5b,0x55,0x6b,0x0b,0x06,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xea,0xd7,0xde,0xfb,0xff,0xed,0xfd,
- 0xff,0x6e,0xef,0x05,0x80,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x04,
- 0x2c,0xaa,0xaa,0x54,0xaa,0x24,0x95,0x48,0x12,0xd8,0x6f,0xbb,0xb5,0xad,0xda,
- 0xb6,0xdb,0xaa,0x6d,0xb5,0xad,0xd5,0x6a,0xad,0x0a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0x7b,0xdf,0xfd,
- 0xbf,0x6f,0xf7,0xfb,0x7f,0x0f,0xe0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0xa5,0x55,0xaa,0xaa,0x92,0xaa,0x4a,0x25,0x09,0x62,0xf5,0xd6,0xde,
- 0xf6,0x6b,0xd5,0x56,0xdd,0x56,0x5b,0xb5,0xaa,0x56,0x55,0x15,0x10,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x77,0xff,0xff,
- 0x7d,0xb7,0xfb,0xfe,0xbd,0xbf,0xed,0x05,0x01,0x0a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xc0,0xb7,0x54,0x49,0x95,0xaa,0x4a,0xa9,0x94,0x2a,0xa8,0xae,
- 0x7b,0x6b,0x9b,0xb6,0xbb,0x7a,0x6b,0xb5,0xd5,0xd6,0xb6,0xaa,0x6a,0x2b,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0xff,
- 0x5b,0xef,0xff,0xff,0xff,0xf7,0xf7,0xee,0xbf,0x4f,0x03,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x40,0x93,0xaa,0xa6,0x52,0x55,0x51,0x94,0x52,0x12,
- 0xc0,0x75,0xad,0xb5,0x6d,0xdb,0xd6,0xad,0x55,0x5b,0xad,0xaa,0xaa,0x6d,0x5b,
- 0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0xde,0xff,0xff,0xef,0xdf,0xde,0x5f,0xdf,0x7b,0xdb,0x15,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xab,0xaa,0x42,0x4a,0x29,0x2d,0x4b,
- 0x29,0x25,0x60,0xbf,0xed,0xde,0xb6,0x6d,0xbb,0xd6,0xbe,0xd5,0xb6,0x6d,0x5b,
- 0xb5,0xaa,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x7f,0xff,0xbb,0xbe,0xfb,0xf7,0xfd,0xff,0xef,0x05,0x06,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x55,0xc5,0x8d,0x2b,0xa5,
- 0xc2,0x28,0x95,0x04,0xa8,0xff,0x5a,0x6b,0xdb,0xb6,0x6d,0x6b,0xd3,0x6e,0xd5,
- 0x56,0xad,0xaa,0xd5,0x0a,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xf8,0xfb,0xb7,0xff,0xfb,0x7f,0xbf,0xff,0xed,0xfe,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0xff,0x0b,0x00,0x0b,
- 0x7c,0x55,0x35,0xa5,0xa4,0x02,0xd4,0xbf,0xa2,0xbb,0x6d,0xdb,0xb6,0xbd,0x6d,
- 0xb5,0xb6,0xda,0x6a,0xad,0xb6,0x00,0xe0,0x04,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xef,0xed,0xfb,0x6e,0xbf,
- 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x68,0xac,0x08,
- 0x00,0xf0,0xa0,0xa2,0x95,0x2a,0x70,0x09,0xd5,0x57,0xe8,0x6d,0xb7,0x6d,0xdb,
- 0xd6,0xb6,0x56,0x5b,0x6b,0xab,0xb5,0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7d,0xb7,0xfd,0xb7,0xbf,0xff,0xff,
- 0xff,0xfb,0x85,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,
- 0x55,0x05,0x00,0xe8,0xc0,0xd2,0x55,0x95,0xd0,0x05,0xaa,0xaf,0xa0,0xab,0xdd,
- 0xb6,0x6d,0x6b,0xdb,0xeb,0xaa,0x55,0xb5,0xaa,0xd5,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0xef,0xff,0xfe,
- 0x7f,0xb7,0xdb,0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xac,0xaa,0x00,0x40,0x00,0x91,0x02,0xc2,0x25,0xf4,0x96,0xb6,0x5b,0xfd,
- 0xaf,0xb6,0xdb,0xb6,0xba,0x55,0x5d,0xdb,0xba,0x56,0x81,0x5a,0x00,0x00,0x06,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xdf,0xff,0x7f,
- 0xff,0xfb,0xed,0xff,0x7e,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x78,0x55,0x00,0x20,0x80,0x00,0x03,0xe9,0x0b,0xe8,0x6f,0x5b,
- 0x35,0xfe,0xc3,0xfd,0x6f,0xdb,0xd7,0xee,0xaa,0xad,0x56,0xab,0x01,0x70,0x01,
- 0x00,0x0e,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7f,
- 0xdb,0xfd,0xed,0xbf,0xff,0xfd,0xfb,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf8,0x0b,0x00,0x00,0x60,0x00,0x45,0xff,0x85,0x52,
- 0xb5,0xed,0xde,0x5e,0xd5,0xff,0xbf,0x56,0x6d,0x5b,0xdb,0x76,0xab,0xb5,0x2a,
- 0x80,0x06,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x80,0xff,0xff,0xf7,0xbf,0xff,0xbe,0xdf,0xef,0x37,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x05,0xfa,0xff,0x00,0x00,0x00,0x10,
- 0xc6,0x7f,0x6f,0xb7,0xb5,0x76,0xd5,0xff,0xae,0xbb,0xb6,0xb5,0xad,0xaa,0x6d,
- 0xd5,0x5a,0x00,0x06,0x00,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0xf7,0xff,0xdf,0xff,0xf7,0xf7,0x06,0x04,0x80,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x06,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0xde,0x6d,0xff,0xfe,0xde,0xff,0x07,0xbf,0xcf,0x03,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x80,0x07,0x00,
- 0x00,0xf0,0x00,0x00,0x70,0x00,0x00,0xfc,0x0f,0x00,0x00,0x1c,0x00,0x00,0xf0,
- 0xf8,0xfc,0x01,0x1e,0xff,0x1f,0x00,0x04,0x70,0x15,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0xff,0xf7,0xff,0xff,0x87,0x7f,
- 0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x03,0x00,0x00,0x00,0x00,0x80,
- 0x07,0x00,0x00,0x70,0x00,0x00,0x78,0x00,0x00,0xfc,0x07,0x00,0x00,0x1e,0x00,
- 0x00,0x78,0xfc,0xfd,0x07,0x9f,0xff,0x1f,0x00,0x00,0xac,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0xfb,0xff,0x7f,0xdb,
- 0xc6,0xff,0x8f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x07,0x00,0x00,0x00,
- 0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x30,0x00,0x00,0xfc,0x0f,0x00,0x00,
- 0x1e,0x00,0x00,0x78,0xfe,0xfd,0x0f,0x3f,0xdf,0x3f,0x00,0x80,0x07,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0xbc,0x6f,0xdf,
- 0xfb,0xff,0xc7,0x63,0x0f,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xbc,0x07,0x00,
- 0x08,0x00,0x00,0x80,0x07,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0xe0,0x01,
- 0x00,0x00,0x1e,0x00,0x00,0x3c,0xdf,0xdd,0x0f,0x3f,0x7c,0x7c,0x00,0x00,0x0a,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0xf6,
- 0xff,0x7f,0xff,0x5f,0xe2,0x83,0x0f,0x00,0x00,0x00,0x80,0x03,0x00,0x00,0x3c,
- 0x0f,0x00,0x1c,0x00,0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x01,0x00,0x00,0x1e,0x00,0x00,0x1c,0x9f,0x7d,0x9f,0x3f,0x7c,0x78,0x08,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x40,0xfd,0xfe,0xfd,0x5f,0x3d,0xe1,0x03,0xcf,0xf1,0xf3,0xc0,0x81,0x0f,0x07,
- 0x00,0x3c,0x0f,0x06,0x1e,0x1c,0x00,0xc0,0x0f,0x32,0x60,0xf0,0x04,0xc1,0x20,
- 0x10,0x00,0xe0,0x01,0x02,0x08,0x1e,0x04,0x00,0x3e,0x0f,0x7c,0x9f,0x3f,0xfc,
- 0x78,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x80,0xfc,0xff,0xff,0x05,0x00,0xe0,0x83,0xef,0xfb,0xff,0xf1,0xc7,
- 0xcf,0x0f,0x00,0x34,0x8e,0x1f,0x7e,0x3e,0x00,0xc0,0x8f,0x7f,0xf8,0x71,0xce,
- 0xf3,0x79,0x7e,0x00,0xe0,0xc1,0x0f,0x3f,0xbe,0x1f,0x00,0x9e,0x0f,0x3c,0x9b,
- 0x7f,0x7c,0xf8,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0xe0,0xef,0xff,0x01,0x80,0xe0,0x01,0xcf,0xf3,0xff,
- 0xfb,0xe7,0xcf,0x1f,0x00,0x3c,0xcf,0x3f,0x7f,0x7f,0x00,0xe0,0x8f,0x7f,0xfc,
- 0xf1,0xde,0xfb,0x79,0x7e,0x00,0xe0,0xe1,0x1f,0x7f,0xde,0x1f,0x00,0x9e,0x0f,
- 0x7c,0x9f,0x7f,0xfc,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xc3,0x7e,0xf7,0x22,0x09,0xe2,0x83,0xcf,
- 0xfb,0xde,0x73,0xcf,0xef,0x1d,0x00,0x3c,0xce,0x3d,0x1f,0x77,0x00,0xe0,0x8e,
- 0xf7,0xdc,0xf3,0xde,0xfb,0x79,0x6f,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,
- 0x9e,0x0f,0x7c,0x9f,0x7d,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xa1,0xff,0xbf,0x08,0x20,0xe0,
- 0x01,0xef,0xfb,0xde,0x1b,0xcf,0xe3,0x1f,0x00,0x3c,0xcf,0x38,0x1e,0xf1,0x00,
- 0xe0,0x8e,0xf7,0xc4,0x71,0xfc,0x79,0x78,0x1e,0x00,0xe0,0xf1,0x9e,0xff,0xde,
- 0x17,0x00,0x9f,0x0f,0x7c,0xdf,0x7f,0xfc,0xf8,0x80,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x83,0xfb,0x7f,0x81,
- 0x04,0xe1,0x83,0xcf,0x73,0xde,0xdb,0x87,0xe3,0x1f,0x00,0x3c,0x4f,0x3e,0x0e,
- 0x78,0x00,0xe0,0x9e,0xf3,0xc4,0xf3,0xfc,0xf9,0x78,0x3e,0x00,0xe0,0xf1,0xde,
- 0xf7,0xbe,0x0f,0x00,0x9e,0x0f,0x5c,0xdf,0xff,0x7c,0xf8,0x80,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0xff,
- 0xbf,0x50,0x50,0xe4,0x8b,0xef,0x7b,0xdf,0xe3,0xcf,0xe3,0x1f,0x00,0x3c,0x1f,
- 0x3f,0x1e,0xfe,0x00,0xe0,0x1f,0xd7,0xf0,0xf3,0xfc,0xf1,0x78,0x3c,0x00,0xe0,
- 0xf1,0xbe,0xf7,0x9e,0x0f,0x00,0x9f,0x9f,0x7c,0xdf,0xdf,0x7c,0xf9,0x81,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbe,0x3b,0x05,0x05,0xe1,0x03,0xcf,0xfb,0xde,0xf3,0xcf,0xe3,0x15,0x00,
- 0x3c,0xcf,0x3f,0x0e,0x7f,0x00,0xf0,0x9f,0xf7,0xfc,0x73,0xf8,0xe0,0x79,0x7c,
- 0x00,0xe0,0xf1,0xde,0xf7,0x3e,0x1f,0x00,0x1e,0x1f,0x7c,0xdf,0xff,0xfc,0xf8,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xfc,0xdf,0xa8,0xfc,0xc2,0xe3,0xef,0x7b,0xde,0x7b,0x8f,0xe3,
- 0x11,0x00,0x3c,0xcf,0x3b,0x0e,0xf7,0x00,0xf0,0x9f,0xf7,0xdc,0xf1,0xf8,0xc8,
- 0x79,0x79,0x00,0xe0,0xf1,0xbe,0xbf,0xde,0x3e,0x00,0x1f,0x1f,0x7f,0xff,0xff,
- 0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xff,0xbf,0x12,0x7f,0xc0,0x7f,0xcf,0xf3,0xdf,0xfb,
- 0xdf,0xcb,0x1b,0x00,0xfc,0xc7,0x7d,0xbe,0xf7,0x01,0x70,0x9c,0x77,0xde,0xf7,
- 0xf0,0xdc,0x7b,0xf7,0x00,0xe0,0xe1,0x9f,0xff,0xfe,0x3f,0x00,0x1e,0xff,0xfd,
- 0xef,0xfb,0xfc,0xf8,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xa5,0xbe,0xc6,0xff,0xef,0x7b,
- 0xde,0xfb,0x9f,0xdf,0x1f,0x00,0xfc,0xc7,0x7f,0x7e,0xff,0x01,0x70,0xbc,0xf6,
- 0xfc,0xd7,0xf0,0xf8,0x7b,0x7e,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,0x3e,
- 0xfe,0xff,0xef,0xfb,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xbf,0xfb,0x57,0x81,0x7f,
- 0xcf,0xfb,0xff,0x73,0xdf,0x9f,0x1f,0x00,0xfc,0xc3,0x7f,0x7e,0xbf,0x01,0x70,
- 0xbc,0xf3,0xfc,0x77,0x7a,0xfc,0x79,0x7f,0x00,0xe0,0xe1,0x0f,0x7d,0xde,0x1f,
- 0x00,0x1e,0xfe,0xff,0xe7,0xf3,0xfd,0xf8,0x00,0x02,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x2f,
- 0x05,0xbe,0xef,0x73,0xde,0xe3,0x0e,0x0f,0x1f,0x00,0x6c,0x00,0x33,0x38,0xee,
- 0x00,0x78,0xb8,0xd7,0xb8,0xf3,0x78,0xf1,0x78,0x3e,0x00,0xe0,0x83,0x17,0x3f,
- 0xbe,0x1f,0x00,0x3e,0xf8,0x7c,0xe1,0xfb,0xfd,0xf8,0x00,0x06,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x44,0xf4,
- 0xff,0xff,0x07,0x00,0x00,0x00,0x00,0x08,0x00,0x40,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0xf8,0x00,0x06,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x09,0xf8,0xff,0x07,0x00,0x00,0x88,0x00,0x80,0x24,0x00,0x00,0x00,0x00,
- 0x08,0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x08,0x00,0x00,0x00,
- 0x00,0x00,0x90,0xa0,0x00,0x00,0x00,0x3c,0x00,0x00,0x09,0x00,0x00,0x7c,0x00,
- 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x20,0xa4,0xf5,0xff,0x07,0x00,0x00,0x00,0x48,0x22,0x00,0x01,0x24,
- 0x00,0x00,0x41,0x00,0x02,0x90,0x00,0x00,0x10,0x40,0x00,0x24,0xbc,0x00,0x00,
- 0x40,0x00,0x00,0x08,0x04,0x04,0x92,0x24,0x00,0x3c,0x02,0x00,0x20,0xa9,0x00,
- 0x7c,0x02,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x89,0x52,0xaa,0xfe,0x07,0x00,0x48,0x21,0x02,0x00,0x12,
- 0x20,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x02,0x09,0x00,0x01,0x3c,
- 0x80,0x44,0x08,0x00,0x00,0x00,0x40,0x20,0x00,0x00,0x00,0x78,0x20,0x40,0x02,
- 0x00,0x20,0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x50,0x55,0xe5,0x07,0x00,0x00,0x00,0x80,
- 0x00,0x00,0x00,0x08,0x00,0x00,0x02,0x01,0x00,0x40,0x00,0x00,0x00,0x00,0x11,
- 0x00,0x3c,0x00,0x00,0x00,0x00,0x00,0x20,0x08,0x80,0x00,0x00,0x00,0x78,0x80,
- 0x00,0x00,0x04,0x02,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0xaa,0xda,0x07,0x10,0x00,
- 0x00,0x00,0x08,0x08,0x00,0x00,0x00,0x00,0x00,0x04,0x20,0x00,0x00,0x00,0x08,
- 0x00,0x00,0x20,0x1e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,0x08,0x00,
- 0xf0,0x00,0x88,0x48,0x20,0x00,0x1f,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x24,0x84,0x54,0xd5,0x06,
- 0x80,0x00,0x00,0x08,0x40,0x40,0x04,0x00,0x00,0x00,0x00,0x80,0x80,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0xb2,0xaa,
- 0xaa,0xff,0xff,0xff,0xff,0xff,0x57,0xab,0xf5,0x77,0xab,0xfd,0xde,0xba,0x5e,
- 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xdf,0xfb,
- 0xef,0xb6,0xed,0xfe,0xaa,0x5b,0x25,0x81,0x01,0x16,0x00,0x00,0x60,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,
- 0x88,0xaa,0xaa,0xea,0xff,0xff,0xff,0xbf,0xaa,0x56,0x95,0x54,0x55,0xa5,0x52,
- 0xd5,0x4a,0xfd,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
- 0x53,0x95,0x98,0xaa,0xaa,0x6a,0xb7,0xa4,0x12,0xa4,0x05,0x00,0x00,0x00,0x00,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x08,0xa5,0x54,0x55,0xad,0xfa,0xff,0xff,0xff,0x5f,0x55,0x55,0x55,0x55,
- 0x95,0xaa,0xaa,0xb2,0xf6,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
- 0xff,0xff,0x2a,0x55,0x67,0x55,0x55,0x55,0xaf,0xaa,0x94,0x00,0x0a,0x00,0x00,
- 0x00,0x40,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x92,0xaa,0xaa,0x52,0xf6,0xff,0xff,0xff,0xf7,0xaf,0xaa,
- 0xaa,0xaa,0x6a,0x55,0x55,0x55,0xd5,0xff,0xff,0xef,0xff,0xff,0xff,0xff,0xff,
- 0xff,0xff,0xff,0xff,0x55,0x55,0xa9,0xaa,0xaa,0xaa,0x5e,0x55,0x42,0x08,0x38,
- 0x00,0x00,0x78,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x02,0x28,0x91,0xaa,0xaa,0xfa,0xff,0xff,0xdb,0xfe,
- 0xbb,0xaa,0xaa,0xaa,0x2a,0x55,0x95,0xaa,0x54,0xfe,0xb5,0x92,0xff,0xff,0xff,
- 0xff,0xff,0xff,0xff,0xff,0x7f,0x55,0x55,0x95,0xaa,0xaa,0xaa,0xaa,0x92,0x14,
- 0xfc,0x38,0x00,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x85,0x54,0x49,0xaa,0x6a,0xf7,0xbf,
- 0xff,0xdf,0x6f,0x55,0x4a,0x55,0xa9,0x92,0x52,0x4a,0xa5,0x2a,0x55,0x55,0xda,
- 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x3f,0x55,0x49,0xaa,0x54,0x95,0x54,0x49,
- 0x4a,0x42,0x71,0x28,0x00,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x01,0x00,0x50,0xa5,0x54,0x49,0xfd,
- 0x7f,0xfb,0xff,0xff,0x7e,0x49,0xa9,0x24,0xa5,0xaa,0x54,0xa9,0x2a,0x95,0x94,
- 0x54,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x5f,0x29,0x55,0xa9,0x4a,0x52,
- 0x25,0xa5,0x24,0x11,0xc0,0x67,0x00,0x80,0x57,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0x00,0x09,0x12,0x92,
- 0x2a,0xff,0xff,0xff,0xbd,0xfb,0x5b,0x25,0x25,0x92,0x14,0x49,0x12,0x15,0x91,
- 0x44,0x4a,0x8a,0x52,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x42,0x45,0x2a,0x25,
- 0x22,0x89,0x92,0x54,0x92,0x08,0x80,0x4b,0x00,0xf8,0x5a,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x70,0xf8,0xff,0xa2,
- 0x48,0x49,0xa0,0xff,0xee,0xdf,0xf7,0xdf,0xff,0x92,0x48,0x49,0xa2,0x24,0xa5,
- 0x40,0x4a,0x29,0x21,0x51,0xc8,0xff,0xff,0xff,0xff,0xff,0xff,0x7f,0x29,0x28,
- 0x41,0x92,0x94,0x54,0x48,0x09,0x25,0x42,0x02,0x5e,0x00,0x56,0x15,0x10,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0xfe,0xff,0x22,0x92,0xca,0xef,0x7f,0x7b,0xdf,0x7d,0x5f,0x5f,0x92,0xa4,0x14,
- 0x92,0x48,0xaa,0x24,0x92,0x94,0x24,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0x2e,
- 0x45,0x45,0x2a,0x49,0x49,0x22,0x25,0x52,0x88,0x10,0x00,0x78,0x00,0xde,0x0a,
- 0x00,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0xc5,0xff,0x9f,0x44,0xe4,0xbf,0xfb,0xff,0xff,0xf7,0x77,0xeb,0x45,
- 0x12,0xa2,0x48,0x12,0x11,0x90,0x44,0x22,0x89,0xc8,0x7f,0xdf,0xfd,0xff,0xff,
- 0xf7,0xaf,0x90,0x10,0x81,0x10,0x22,0x89,0x88,0x04,0x21,0x42,0x00,0xa8,0x00,
- 0x54,0x0d,0x2c,0x00,0x20,0xc0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0xee,0x0f,0xf0,0x5f,0x10,0xa1,0xfb,0xdf,0xef,0x7b,0xdf,0xfd,
- 0x7f,0x29,0x80,0x08,0x02,0x40,0x44,0x05,0x20,0x08,0x20,0x82,0xff,0xff,0x77,
- 0xef,0xdf,0xff,0x17,0x0a,0x44,0x28,0x84,0x08,0x20,0x22,0x50,0x04,0x00,0x08,
- 0x70,0x00,0xa8,0x05,0x06,0x00,0xc0,0xe0,0x01,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0xb6,0x0c,0x00,0x3e,0x42,0xc8,0xfe,0x7e,0xbd,0xde,
- 0x7d,0xdf,0xda,0xd6,0x2b,0xa0,0x50,0x15,0x11,0xa0,0x8a,0x42,0x85,0x50,0xfe,
- 0xff,0xff,0xbf,0x7d,0xdf,0x89,0xa0,0x12,0x82,0x22,0xa2,0x8a,0x88,0x04,0x21,
- 0x09,0x00,0xa0,0x01,0x78,0x0f,0x16,0x80,0xc7,0x7e,0xfe,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xda,0x0e,0x8c,0x98,0x08,0xe2,0xaf,0xeb,
- 0xf7,0xfb,0xf7,0xf7,0xf7,0xff,0x45,0x05,0x04,0x40,0x00,0x09,0x00,0x10,0x10,
- 0x04,0xf0,0xfb,0xff,0xff,0xff,0xff,0x00,0x04,0x80,0x10,0x08,0x00,0x00,0x00,
- 0x10,0x04,0x20,0x00,0xc0,0x02,0x00,0x00,0x08,0x00,0x00,0xb4,0x7f,0x01,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xb8,0x42,0x1b,0x18,0x20,0xc0,
- 0xfb,0xbf,0xdf,0xdf,0xbe,0xba,0x5e,0x55,0xb7,0x10,0x00,0x00,0x44,0x00,0x20,
- 0x02,0x40,0x10,0xe1,0xef,0xff,0xfd,0xff,0x7f,0x48,0x10,0x10,0x42,0x20,0x11,
- 0x44,0x24,0x41,0x10,0x80,0x00,0x00,0x02,0x00,0x00,0x20,0x00,0x00,0x00,0xff,
- 0x04,0x14,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xac,0xa0,0x7c,0x18,
- 0x00,0xe4,0x7e,0xfb,0x7a,0xed,0xeb,0xef,0xf7,0xff,0x56,0x41,0x49,0x12,0x00,
- 0x44,0x09,0x88,0x04,0x40,0xb4,0xff,0xbd,0xff,0x7d,0x7b,0x02,0x40,0x02,0x00,
- 0x01,0x80,0x00,0x01,0x00,0x80,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbf,0x52,0x0e,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xf4,0x20,
- 0xee,0x1b,0x82,0x80,0xdb,0xde,0xdf,0x7f,0xdf,0xba,0xba,0xaa,0xbb,0x00,0x00,
- 0x00,0x11,0x00,0x00,0x00,0x00,0x01,0xe0,0xff,0xf7,0xdf,0xef,0xef,0x00,0x01,
- 0x40,0x00,0x00,0x04,0x08,0x00,0x04,0x00,0x00,0x00,0x00,0xd0,0x0e,0x00,0x00,
- 0x00,0x00,0x80,0xaf,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x5c,0x00,0xbb,0x39,0x00,0x00,0xfe,0xeb,0xb6,0xd5,0xf6,0xef,0xdf,0xff,0xee,
- 0x02,0x00,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0xc0,0xbd,0xff,0xfb,0xff,0x7f,
- 0x10,0x00,0x00,0x08,0x20,0x00,0x00,0x20,0x00,0x01,0x00,0x00,0x00,0x00,0x50,
- 0xa0,0x00,0x00,0x00,0x00,0x7a,0x78,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x60,0x34,0x70,0xed,0x19,0x00,0x00,0xde,0xbe,0x7d,0xbf,0xbb,0xba,0xea,
- 0xaa,0x5b,0x00,0x00,0x00,0x00,0x01,0x00,0x20,0x00,0x00,0xc0,0xf7,0x7f,0xff,
- 0xfd,0x7d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x60,0x38,0x50,0x5f,0x18,0x00,0x00,0xf4,0xef,0xd7,0xeb,0xde,
- 0xdf,0x7f,0xf7,0x56,0x00,0x80,0x00,0x00,0x08,0x00,0x80,0x10,0x00,0xc0,0xff,
- 0xf6,0xb7,0xb7,0xf7,0x04,0x08,0x02,0x00,0x02,0x20,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x60,0x0c,0xb8,0xf5,0x18,0x00,0x00,0xe0,0x7a,0x7d,
- 0xbd,0xab,0xea,0xaa,0xad,0x0d,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x04,
- 0x00,0xff,0xdf,0xff,0xff,0xff,0x02,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xa8,0xee,0x18,0x00,0x00,0xc0,
- 0xaf,0xd7,0x6f,0xfd,0x5d,0xf7,0xde,0x06,0x00,0x02,0x00,0x00,0x40,0x00,0x00,
- 0x00,0x00,0xc0,0xb7,0xff,0xdd,0xdd,0xdd,0x05,0x40,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0x01,0x70,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xf8,0x7b,0x19,0x00,
- 0x00,0x40,0xf5,0xbd,0xda,0x57,0xb7,0xad,0x75,0x01,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xe0,0xff,0x7b,0xff,0xff,0xf7,0x00,0x40,0x01,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x04,
- 0x18,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x10,0xb8,0x5e,
- 0x19,0x00,0x00,0x80,0x5f,0xeb,0xbd,0xfa,0xed,0xf6,0xae,0x03,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xfd,0xee,0xdb,0x6d,0xff,0x05,0x60,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,
- 0x57,0x01,0x58,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0x10,
- 0xec,0x6b,0x1c,0x00,0x00,0x00,0xfa,0x5e,0xeb,0x57,0xbb,0xad,0xf5,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xd7,0xff,0x7f,0xff,0xbd,0x00,
- 0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x80,0xaf,0x95,0x00,0x68,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x00,0xb8,0x69,0x1c,0x00,0x00,0x00,0xd0,0xeb,0x5e,0xdd,0xd6,0x76,0x5b,
- 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0xff,0x7f,0xff,0xdd,
- 0x6f,0x80,0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x70,0xa5,0x54,0x15,0x16,0x01,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0xe0,0x40,0xf8,0x28,0x1c,0x00,0x00,0x00,0x40,0xbf,0xeb,0xb6,0x6d,
- 0xab,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x7d,0xdb,
- 0xeb,0x7f,0x0b,0x80,0xbf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0xb5,0xaa,0x24,0x5e,0x01,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0xc0,0x00,0xe0,0x21,0x1c,0x00,0x00,0x00,0x00,0x6a,0xbd,
- 0x6d,0xbb,0xdd,0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,
- 0xdf,0x7f,0x7f,0xf7,0x07,0x80,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x4a,0xa5,0x52,0xa1,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x20,0xe1,0x00,0x1c,0x00,0x00,0x00,0x00,
- 0xbe,0xd7,0xb6,0x6d,0x6b,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x60,0xfb,0xf6,0xf7,0x5f,0x00,0x00,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xae,0xaa,0x54,0x2a,0x55,0x01,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0xe1,0x80,0x1c,0x00,0x00,
- 0x00,0x00,0xd6,0x7a,0xdb,0xb6,0xbd,0xdd,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xc0,0xef,0xff,0xbe,0x7b,0x00,0xc0,0x2f,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x57,0x55,0x55,0x55,0x95,
- 0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0xc0,0x60,0x1c,
- 0x00,0x00,0x00,0x00,0x7c,0xad,0x6d,0xdb,0xd6,0x6a,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x80,0x7f,0xdb,0xfb,0x3e,0x00,0xe0,0x16,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x57,0x55,0x55,0xa5,
- 0x24,0x49,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x80,0xe0,
- 0x00,0x1d,0x00,0x00,0x00,0x00,0xac,0xd7,0xb6,0x6d,0x6b,0x37,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,0xff,0xef,0x6f,0x00,0xc0,0x0b,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x55,0x55,
- 0x2a,0x95,0xaa,0x54,0x12,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x80,0xe0,0x00,0x1c,0x00,0x00,0x00,0x00,0xf6,0xba,0xdd,0xb6,0xb5,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xbd,0xdb,0xbe,0xbb,0x00,
- 0xe0,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70,
- 0x55,0x55,0x55,0x55,0x55,0x25,0x49,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x40,0x60,0x80,0x1d,0x00,0x00,0x00,0x00,0x5c,0xef,0x76,0xdb,0x1e,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xfe,0x7b,
- 0x6f,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x5c,0x55,0x55,0x55,0x55,0x49,0x55,0x25,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x40,0x60,0x40,0x1f,0x00,0x00,0x00,0x00,0xee,0x5a,0xab,
- 0x6d,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,
- 0xb7,0xff,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xb8,0xaa,0xaa,0xaa,0x54,0xaa,0xa4,0x94,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0xc0,0x1f,0x00,0x00,0x00,0x00,0xb4,
- 0xed,0x7d,0xab,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xbe,0xff,0xdb,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x54,0x55,0x55,0xa5,0xaa,0x4a,0x95,0x52,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa0,0x1e,0x00,0x00,0x00,
- 0x00,0xdc,0x56,0xab,0xdd,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xfb,0xdd,0xfe,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,0xaa,0xaa,0x4a,0x29,0x55,0xaa,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x06,0x00,0xc0,0x1e,0x00,
- 0x00,0x00,0x00,0x76,0xfb,0x76,0x6b,0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x7e,0xff,0x6f,0x03,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x25,0x55,0xa9,0xaa,
- 0x52,0x85,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00,
- 0x1e,0x00,0x00,0x00,0x00,0xdc,0xad,0xad,0xbd,0x09,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x75,0xfb,0x01,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x55,0x55,
- 0x55,0x55,0x4a,0x29,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x1e,0x00,0x00,0x00,0x00,0xb4,0xd6,0x76,0x6b,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0x1f,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,
- 0xaa,0xae,0x2a,0x49,0x29,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xc0,0x07,0x00,0x1f,0x00,0x00,0x00,0x00,0xd8,0xbb,0xad,0x5d,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xdf,0x0e,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x54,0x55,0x3d,0xe0,0xaa,0xaa,0xa6,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x3f,0x00,0x1f,0x00,0x00,0x00,0x00,0xb8,0x6e,0xf7,0xb6,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xf6,
- 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xa8,0xaa,0x02,0x80,0xa9,0x2a,0x99,0x02,0x00,0x00,0x08,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x1e,0x00,0xf8,0x01,0x1f,0x00,0x00,0x00,0x00,0x78,0xdb,
- 0x5a,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xe0,0x7f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xbe,0x25,0x00,0x80,0x55,0xa9,0x54,0x00,0x00,0x00,0x5a,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x7c,0x00,0xe0,0x07,0x1f,0x00,0x00,0x00,0x00,
- 0xec,0x75,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x24,0x55,0x25,0x00,0x00,
- 0x00,0x18,0xf0,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x80,0x3f,0x1f,0x00,0x00,
- 0x00,0x00,0xb8,0xde,0xda,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0x4a,0x05,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0x07,0x00,0x7e,0x1f,
- 0x00,0x00,0x00,0x00,0xdc,0xab,0x6d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb0,
- 0xaa,0x02,0x00,0x00,0x00,0x07,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,
- 0x70,0x1e,0x00,0x00,0x00,0x00,0x6e,0x7d,0xdb,0x02,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x70,0x55,0x03,0x00,0x00,0x80,0x01,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xfc,0x00,0xc0,0x0e,0x00,0x00,0x00,0x00,0xb8,0xd7,0x4d,0x01,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x40,0x19,0x00,0x00,0x00,0xc0,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x03,0x00,0x07,0x00,0x00,0x00,0x00,0xf8,0x7a,0x07,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xc0,0x0f,0x00,0x08,0x00,0x00,0x00,0x00,0x68,0xaf,0x01,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x94,
- 0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x00,0x00,
- 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,
- 0x00,0xa0,0x77,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x07,0x00,0x00,
- 0x00,0x00,0x00,0xf0,0xdd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x0f,
- 0x00,0x00,0x00,0x00,0x00,0xd0,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0xf0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,0x80,0xb6,0x04,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0xe0,0x07,0x00,0x00,0x00,0x00,0x00,0x2f,0x01,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,0x00,0x00,0x00,0x80,0xfb,
- 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,
- 0x80,0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x00,0x00,
- 0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,
- 0x00,0x00,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0x15,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0a,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
- 0x00,0xf0};
diff --git a/images/viewmag+.gif b/images/viewmag+.gif
deleted file mode 100644
index 6daba5233321e77718e8708feb51c4fc5fb51af0..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1069
zcmV+|1k(FQNk%w1VH5xq0O$Vz009631px{J0R#sE2MGlh1po>R2pa_eAO-{u5DXC$
z5GoA-6crOW3j-G!7eNaG932}!5CcUI07?x59v>YaARbW+0Z9`DPZ0qjBOqD~096kK
zA|@zK7Xw}q1zHpXCn+XU8VDvTEGsN3LnIh5GB8*s95XdCi5LbvJv%@_Ka?W?K|(-8
zMMJ9}1GF6os3iiCFA$_D07*+oNK8(lF9}ReOS&WjPf$)%R8p!p5veyG$}14ZF$7pz
zSI9CD%{L0qIRMK&3e!6SVq{@uW@MOL7u7xznqCmtKnS!=D`;tFeqmdiWE9y+4Qp?5
zk!UfXXAf?0ZQn;4q-q74X(yFyE#6HWh;U2dN+fP|eRFkks&4?BZ!2<kdEZYYsdE5x
zczUgI0*rN5czSo?S0HzMex!CJdVG1~St6-<6|Hv_lzC5me|(^NI<0&RiF<afeHxE{
zWPpNywtfP^ZBe0qP`ZHtgN1^5h>o>`2YHN`pNCJ7i)EpOT6v75fQy*Di2#_2VTOy2
zc#of-iCmqDV)14?osM5@mb`hCs`qL+kC2U|j$y)*0Qzh@Yn#WQl4+5XkeHQqaht)f
zlSqP@s?~mBl$w2=m~zRO0L7XIaiGnSnW2B5y3CsZmYbc*p8$=ZtDT{Lkf5@cpQp~E
z0ez;;ou8bWqpNeU)PSndo};Fwp`eJY&eW^`rKhBzsIY{w(Wj}Vhp^b%u>hg4x0AEa
z&7zZ-v&y2e#ND|7m$%ccvb3wTyrj3ok-OrSy4{?-%j3QPg~j8R!pWn%%(}I-qrb_t
zxx3`V0E^1n<;4Jw$l#d5>bbkPq{PRvzrdNt=)J$ZpU304z{u^)0Jp=$yurht%;2HU
z)vd|X!Nb4o(Eyjy>bl3r@6-UM)7r<$#k|bZuF~6+-TIH;|G?4L%g)M{;QgZA_OjRP
z``!Sl+x5xO+RxF>_~Hb#+3(HL)vMk2z1if^)6k>i|ElHv*xJ^@;`7_x+S%UW(&qBj
z=Ir3<@&Et-A^8LW3IP8AEC2ui02BZe000R70RIUbNU)$mf(aZ@sE|RyL4yoR<nZFf
zObQAQ2LgC92vn+AzN#@g@en`-Eu3h{2^575ty8&h9ArQPkSkqwlm)6H4cI9!APk;N
zgs9RQYnsIPu|*5e41*#R(b}|Y7`$%E#JGXx%Z$MhqKKVQb5s+TCXvYbBJ(vCF-(H0
zbPI$eQn-1(ia<4Utem+?bfO@-H;<p05e6yPNVd&eF+Q2d&GWa927rSg7|C;YF4{JF
zl)1GKkP%I*XX3aW!^7xUogF9+9E6Z#3&kxgQouR%<;Mou4+Q`q{g1?=MPFW6K#-9j
n03B%-U2!6$Il&M*$~YMk#QH!17itR#5hBBT!4DAx1_S^*YohCn

diff --git a/images/viewmag-.gif b/images/viewmag-.gif
deleted file mode 100644
index e2dc98fa2744235c6eaf67e880b70cc4d4deaf06..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1064
zcmV+@1lRjVNk%w1VH5xq0O$Sy009631px{J0R#sE2MGlh1po>R2pa_eAO-{u5DXC$
z5GoA-6crOW3j-G!7eNaG932}!5CcUI039D3N(}=Z9~~bc9#ISdNfQN65dk41AX*Fn
zRSyLsCMZr916~mYS`-5(DJD`H2qr2lD=aEQBp5I<FjyuWGc_`a7zR8&J3v7{lp_E^
zLO?`CL#rPHv>gbjB?6Hz5Tq#pNlQpbOirOM3A!W$O;1cuQBG4-QmQu*sW%?VD-g#q
z1Xx;E$TASkHww==0LwiJ(>nuVWMO4yWYs<r*gy!hO)F?=XMSN@*+~tUXC7^DY-?|F
zre*_~XdmB48k%V!Z*gv{YXII&9O6nOZgqWgb#mWNBy)FqmvS?BdUxShAa{IzdVG1~
zSt5OZe3E@pqkAridv>9FJAi_Jo_|BZZBc`Tf|P|;d5olhi<pLsj<<>s@n$?{m&1jT
zlJ;phjgO3Zl&bh^I**Z#`fNLEo5zupka3&Af|;trm;!H}%GG{hYoXF{q0Nz*p}?FN
zf1kRRo1My^0J@t(n4pPurO}O`tD~QP#h@OLpt7H!iq4_|eWuNwpPb8}A(E$>m!h!J
zr2w9yrJJLxfU41^rHZDZpmed-prxpqrn9A}n6RdYrKhBZu-AvN*r=+ep{=^Ps*;nl
z&xN?&&7zZ-v&xOO-lDO@#jA?Ju8ZEf0GGGZt+KSru7|9&yrj3omb%@I!^o1n<e0$0
zoV?8Az5s>A<FdE9n!>}nwX~MN=#0nT<ir4-!Pd39y}7%%q{PRX!|RgF*O|xYy}!Jj
z#_zbm$h*M8qQ~i`%G00B;Jw1ct;y4?%g({WznjtHqR;Wh$i>IW#**Fnn%elR)#IVq
z_q@>DmEZid)a1a?*UQezmf`)h)bXO-_ORLLv)Jy*(b}Ej`KsLZ&(Y4f*y)+%|IO0W
z(bLe+)Yqfq|EuBjsN?*+-0ZO8{n*;p!s7GT-Qe5Z+S=dZ$>sgg<?_|$?9=D;)a?G?
z>GA*n|NsC0A^8LW3IP8AEC2ui02BZe000R70RIUbNU)$mf(aZ<u%JP~L4yoT=-9I5
zi;4;l2LgEFrVyo7vv%o0@eqIoEPg^2W0ovgC`hSv9Ar?VjjC|@_|d~g%@(9BAq<|3
z!>JUjR-YVsa;B;c4TB^2gp%Y6nAKcW&=4ba2C5=mZwyr$TM3GpXtTB$Bys5%mnm@L
zF3Kj$$XYS5u5sIY%^W;&`L>WSIH5)_XwJB~Q|FG{zI!wP91P*6T)A!J$ffId?^z20
z8Nq0JR!v$jHFtWY;gRCNOdV9VJlWDhMHx6+fN*gAP=El*|4P`IqXotV1Q`he*b&Cg
i5GF&C6U4B?3lbzkqz^RU!N!mwGqeU51Q9`CKma>ak_p}b

diff --git a/images/viewmag-.png b/images/viewmag-.png
deleted file mode 100644
index 8108ecd7b08e2e49f9c421ac3dcdbe18c28e94ec..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 1056
zcmV+*1mF9KP)<h;3K|Lk000e1NJLTq000yK000yS1^@s6jfou%00006VoOIv0RI60
z0RN!9r;`8x010qNS#tmY3ljhU3ljkVnw%H_000McNliru*8&C>GcZ<N0M!5h1FcC!
zK~y-)ozuH-TV))^@$b3VC(fxa$CuPi;v_XqLz6ZkiChX~0?`P;fDrr%h=BzO7}!`D
z7?6-ykQj=D7yuy^rK%VT6&DFq(UwMW(l()WZO2Z0;^XV_xx5S%scKx6@+|K=y!s5k
z_xDKbPY6f=I-mdoXahB%!9yb5a}NN=fYa%8`W0Q*XJlE9D2ieWA@Y?<WyLhjZ-C1{
z_hH}wupE!a-_B;Urxq3#1BXV(2@L3jLK--2@@s3})vH%mFI~Fyk!{;w0ek&G1(1PL
z$z<|_rKP3ZvoE}?9(e3&+}IIhV**znMubP1nVpqqCdUpot@>=KRJ!T9?grrZ0}V~n
z-pS>1%k$5@sx*yRQsa}PM}~<Sal+9UTGZf<CE*2Q%s)OJtrTw$7Yc=QfYlGoWHOl#
zbI-q~GrY+0Y=Wo86;332q{BUGeitFxq@n{fd>vH_OG<Zl%-pU1XqsmJeozHeJsgQg
zM~)H@RYtZxroDcJTYx7Z>VYnx)}1DG@?9Eoio;X01iwGG1bo{ML{>FI$t3#rc@F4j
zIXv6kH|z^v+`>!E5t(}fV`Laj(?b0|MFj}Rf>>BZ@<u3J4zl;x{)as~h!#s@7$J6U
zc5xhs2Z62@4AdfCGv1zgiORt&yNxn3AWNV~{|s7-qzR6lB&A8Lmu_QO)&oJS)9L)O
zcjM0&Gl?YiiD&2q2ayy7MFLfV00PZ-Iq0<!+t>N|r;Bd0+5Ej9C<A7xR64t{vC&96
zbtbKw3^hu`Y>TkdCgQY7xGg691}V43wJU$Hy1HuB>-DX>HQl#1UaQsG@jP#MXlUr@
z;)!WBDcKz8)(JZ;496nwTBvrNR>Q*F-NC!FrKWa^V|CjdY6}YYwnn*oTIF(ity--r
z`FwubFpOX*s3PnZ9m{0%#&uR!R<Jv5a)T~U6fY|idzEa;_n+MCiA2-q%D<>|U$0|^
zVH{5;lWz(k79GbqB1uwBlB8AJwl7R1BFpbBq;sbYLGi{NuIG!KuQ{DFru%u>6Ym2>
z^&U_FrfHfNP1C&CXWrVt-uCuyeqnxbaZJrjWk3kD&2lIhlwSXG*BQKRi?<a1dpy$-
zy`Cqgqh5F9(4eg7dI;e+IhK&w*z=~UZU4-pfB@A($6v2>{mH1;9gQ?C*~h_bI{dKX
znOj})>7#-IC>A>YwaunCH!ZoNJ<s9mvgiET5T6xYzLWTWk%6OGP5MNYku6WGm@a2*
apW@#O6Qh+ePzIF%0000<MNUMnLSTYvQ{%t@

diff --git a/installation/DAP.py b/installation/DAP.py
deleted file mode 100644
index ca1edb378..000000000
--- a/installation/DAP.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Edit this configuration file before building.
-# Always build with --force after changing a configuration.
-# You do not need to rebuild Python itself.
-#print 'DAP'
-
-import os
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-CDMS_INCLUDE_DAP='yes'
-CDMS_DAP_DIR=""
-try:
-    import cdat_info
-    externals = cdat_info.externals
-except:
-    externals = os.path.join(sys.prefix,"Externals")
-externals = os.environ.get("EXTERNALS",externals)
-
-for o in sys.argv[1:]:
-    pth = o.lower().split('with-opendap=')
-    if len(pth)>1:
-        CDMS_DAP_DIR=pth[1]
-        
-if CDMS_DAP_DIR is "":
-    CDMS_DAP_DIR=os.path.join(externals,'OpenDAP')
diff --git a/installation/HDF.py b/installation/HDF.py
deleted file mode 100644
index 23830d08e..000000000
--- a/installation/HDF.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# To build on Linux with HDF:
-# express_install /usr/local/cdat/somewhere --force --configuration installation/hdf.py
-import os
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-CDMS_INCLUDE_HDF='yes'
-CDMS_HDF_DIR=""
-try:
-    import cdat_info
-    externals = cdat_info.externals
-except:
-    externals = os.path.join(sys.prefix,"Externals")
-externals = os.environ.get("EXTERNALS",externals)
-
-for o in sys.argv[1:]:
-    pth = o.lower().split('with-hdf4=')
-    if len(pth)>1:
-        CDMS_HDF_DIR=pth[1]
-
-if CDMS_HDF_DIR is "":
-    CDMS_HDF_DIR=os.path.join(externals,'HDF')
diff --git a/installation/cdmsonly.py b/installation/cdmsonly.py
deleted file mode 100644
index 90ea11824..000000000
--- a/installation/cdmsonly.py
+++ /dev/null
@@ -1,16 +0,0 @@
-packages = [ 
-	    "Packages/AutoAPI",
-            "Packages/cdtime",
-            "Packages/regrid2",
-#            "Packages/regrid",
-            "Packages/Properties",
-	    "Packages/kinds",
-            "Packages/cdms2", 
-            "Packages/genutil", 
-            "Packages/cdutil", 
-            "Packages/unidata", 
-#            "Packages/cdms", 
-            "Packages/ncml", 
-            "Packages/esg",
-            "Packages/distarray",
-           ]
diff --git a/installation/contrib.py b/installation/contrib.py
deleted file mode 100644
index 606aaf5fd..000000000
--- a/installation/contrib.py
+++ /dev/null
@@ -1,73 +0,0 @@
-import os
-dostandard = force
-## try:
-##     import Numeric, cdms
-## except ImportError:
-##     dostandard = 1
-dostandard = 1
-import sys
-if not 'clist' in locals().keys():
-    clist=[]
-## Format is [path,description,licence_file]
-Clist = [
-#    ['contrib/Sphinx','sphinx documentation builder','GNU'],
-##     ['contrib/zope-interface','zope interface','GNU'],
-#    ['contrib/Twisted','network computing tools','GNU'],
-#    ['contrib/Foolscap','RPC protocol for Python+Twisted','GNU'],
-#    ['contrib/ipython','an Enhanced Python Shell','GNU'],
-#    ['contrib/scipy','Scientific tools for Python (core only)','GNU'],
-    ['contrib/SP','A collection of Python modules that are useful for scientific computing.','LICENSE'],
-    ['contrib/cssgrid','An interpolation package for random data on the surface of a sphere based on the work of Robert Renka. cssgrid uses cubic splines to calculate its interpolation function.',''],
-    ['contrib/lmoments','56 routines for statistical analysis using L-moments','UC'],
-    ['contrib/ort','Reads in Oort data files','UC'],
-#    ['contrib/spherepack','A collection of programs for computing certain common differential operators and performing related manipulations on a sphere.',''],
-    ['contrib/asciidata','Reads in ASCII files with the ability to specify tab or comma or space delimited fields','Lib/ASV.py'],
-    ['contrib/eof','Calculates Empirical Orthogonal Functions of either one variable or two variables jointly','UC'],
-    ['contrib/trends','Computes variance estimate taking auto-correlation into account.',''],
-    ['contrib/binaryio','Handles binary or unformatted data',''],
-    ['contrib/regridpack','A collection of programs for linear or cubic interpolation in one, two, three or four dimensions.',''],
-    ['contrib/shgrid','An interpolation package for random data in 3-space based on the work of Robert Renka. shgrid uses a modified Shepard\'s algorithm to calculate its interpolation function',''],
-    ['contrib/dsgrid','A three-dimensional random data interpolator based on a simple inverse distance weighting algorithm.',''],
-    ['contrib/pyclimate','Provides functions to perform some simple IO operations, operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA analysis of coupled data sets, some linear digital filters, kernel based probability density function estimation and access to DCDFLIB.C library from Python.','GNU'],
-    ['contrib/ComparisonStatistics','Calculates statistics (e.g., correlations and RMS differences) that quantify differences between two datasets. Allows for ellaborated masking and regridding operations','UC'],
-    ['contrib/IaGraph','Package for Quick Interactive Graphing','GNU'],
-    ['contrib/MSU','Package to compute Equivalent MSU Temperatures','UC'],
-    ['contrib/EzTemplate','Package to generate VCS templates easily','GNU'],
-    ['contrib/ZonalMeans','Package to compute zonal means on any grid (requires f90 compiler)','GNU'],
-    ['contrib/HDF5Tools','Package to read HDF5 files into CDAT (requires h5dump binary utility)','GNU'],
-# following is now built via externals
-#    ['contrib/eof2','',''],
-#    ['contrib/eofs','',''],
-#    ['contrib/windspharm','','GNU'],
-]
-
-# natgrid has illegal C comments but gcc lets them through...
-# we need to fix it.
-NCARG_ROOT = os.environ.get('NCARG_ROOT')
-NCARG_COLORMAP_PATH = os.environ.get('NCARG_COLORMAP_PATH')
-if NCARG_COLORMAP_PATH or NCARG_ROOT :
-    Clist.append(['contrib/pyncl','Generate NCL plots of cdms transient variables',''])
-
-
-if sys.platform == "linux2" or sys.platform == 'darwin':
-    Clist.append(['contrib/natgrid','A two-dimensional random data interpolation package based on Dave Watson\'s nngridr',''])
-
-if '--enable-R' in sys.argv or '--enable-r' in sys.argv:
-    Clist.append(['contrib/Rpy','Python Interface to the R library','GNU'])
-
-if '--enable-ioapi' in sys.argv :
-    Clist.append(['contrib/pyIoapi','Python Interface to the IoAPI library','GNU'])
-    Clist.append(['contrib/egenix',"Collection of  tools which enhance Python's usability in many important areas such as ODBC database connectivity, fast text processing, date/time processing and web site programming.",'LICENSE'])
-    Clist.append(['contrib/ioapiTools','ioapiTools developped by Alexis Zubrow form University of Chicago','GNU'])
-
-if '--enable-spanlib' in sys.argv :
-    Clist.append(['contrib/spanlib','Package to do Spectral analysis','GNU'],)
-    
-if not dostandard:
-    packages = []
-
-for c in Clist:
-    clist.append(c)
-    packages.append(c[0])
-
-    
diff --git a/installation/control.py b/installation/control.py
deleted file mode 100644
index 49ed5d9af..000000000
--- a/installation/control.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# This file is used to control the behavior of install.py.
-
-# The search path is used if the X11 directories aren't configured.
-x11search = ['/usr/X11R6', '/usr/X11R6.5.1',
-             '/usr/X11R6.4','/usr','/usr/openwin','/opt']
-# Here is where they are on OSF1 and perhaps similar systems
-x11OSF1lib = ['/usr/lib/X11', '/usr/lib']
-x11OSF1include = ['/usr/include/X11']
-
-# Controlling the install itself
-force=0    # Force a complete recompilation?
-norun=0   # Cause _install just to echo command?
-echo=0     # Echo installation commands before executing?
-log=1      # Make logs?
-silent = 0 # Report progress?
-
-import os,sys
-current_dir = os.path.dirname(__file__)
-build_dir = os.getcwd()
-sys.path.append(build_dir)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-
-sys.path.append(src_dir)
-sys.path.append(installation_script_dir)
-
-# Configuration
-do_configure = 1
-if os.path.isfile(os.path.join(build_dir,'cdat_info.py')):
-    try:
-      import cdat_info
-      do_configure = 0
-    except:
-      pass
-
-finish="""
-******************************************************
-Success! CDAT has been installed in %s .
-Make sure all Packages built successfully
-******************************************************
-
-""" %(sys.prefix,)
-
-# Options used for building setup.py, install_script, make
-if os.environ.has_key('MAKE'):
-    make_code = os.environ['MAKE']
-else:
-    make_code = 'make'
-
-# List of packages to be built
-packages = [
-    "Packages/pydebug",
-    "Packages/cdtime",
-    "Packages/demo",
-    "Packages/help",
-    "Packages/regrid2",
-    "Packages/cdms2",
-    "Packages/esg",
-    "Packages/ncml",
-    "Packages/DV3D",
-    "Packages/vcs",
-    "Packages/vcsaddons",
-    "Packages/cdutil",
-    "Packages/unidata",
-    "Packages/xmgrace",
-    "Packages/genutil",
-    "Packages/Thermo",
-    "Packages/WK",
-    "Packages/gui_support",
-    "Packages/distarray",
-    "Packages/testing",
-    ]
diff --git a/installation/debug.py b/installation/debug.py
deleted file mode 100644
index 87fcd2bc9..000000000
--- a/installation/debug.py
+++ /dev/null
@@ -1,12 +0,0 @@
-## action['setup.py'] = sys.executable + ' setup.py build --debug install'
-## action['install_script'] = './install_script --debug ' + sys.exec_prefix
-## for k in ['makefile','Makefile','MAKEFILE']:
-##     action[k] = make_code + " PREFIX='%s' DEBUG=1 install " % sys.exec_prefix
-
-# matplotlib depends on pkg-config under install/bin
-action['setup.py'] = 'PATH=%s/bin:$PATH && %s setup.py build --debug install --prefix=%s ; ' \
-    % (sys.exec_prefix, sys.executable, target_prefix)
-action['install_script'] = './install_script  %s %s --debug ; ' % (target_prefix, sys.exec_prefix)
-for k in ['makefile','Makefile','MAKEFILE']:
-    action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' DEBUG=1 install ; " % (sys.exec_prefix,target_prefix)
-action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s  --with-python=%s ; make ; make install ;" % (os.path.join(os.path.split(target_prefix)[0],'Externals'), os.path.join(sys.exec_prefix,'bin','python'))
diff --git a/installation/irix.py b/installation/irix.py
deleted file mode 100644
index 04e8318aa..000000000
--- a/installation/irix.py
+++ /dev/null
@@ -1,2 +0,0 @@
-x11include='/usr/include/X11'
-x11libdir='/usr/lib/X11'
diff --git a/installation/pcmdi.py b/installation/pcmdi.py
deleted file mode 100644
index 141884ef2..000000000
--- a/installation/pcmdi.py
+++ /dev/null
@@ -1,22 +0,0 @@
-# Edit this configuration file before building.
-# Always build with --force after changing a configuration.
-# You do not need to rebuild Python itself.
-CDMS_INCLUDE_DRS='yes'
-# if sys.platform=="linux2":
-#   COMPILER_EXTRA_LIBS=['pgftnrtl','pgc']
-# else:
-#   COMPILER_EXTRA_LIBS=[]
-COMPILER_EXTRA_LIBS=["gfortran",]
-#if sys.platform[0:3] == "aix":  # and probably other platforms...
-#    CMDS_INCLUDE_QL = 'no'
-#else:
-#    CDMS_INCLUDE_QL ='yes'
-
-# These don't actually get respected by the libcdms build yet.
-# drs_file = '/usr/local/lib/libdrs.a'
-
-# Add on additional packages
-#packages.append('Packages/psql')
-#packages.append('Packages/cu')
-#packages.append('Packages/pcmdi')
-
diff --git a/installation/pp.py b/installation/pp.py
deleted file mode 100644
index 6c5abf9c0..000000000
--- a/installation/pp.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# To build CDMS with support for the Met Office PP format:
-# express_install /usr/local/cdat/somewhere --force --configuration=installation/pp.py
-CDMS_INCLUDE_PP='yes'
diff --git a/installation/psql.py b/installation/psql.py
deleted file mode 100644
index d3b52b6eb..000000000
--- a/installation/psql.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# Add on additional packages
-CDMS_INCLUDE_QL ='yes'
-packages.append('Packages/psql')
diff --git a/installation/standard.py b/installation/standard.py
deleted file mode 100644
index b86f594dc..000000000
--- a/installation/standard.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# DO NOT EDIT THIS FILE
-# Instead, make your own configuration file to override these values 
-# and use the -c option to read it.
-
-# This is the standard configuration file. It is read first by install.py.
-# In your own configuration file you can use any Python statements to modify
-# these values. 
-
-# File pcmdi.txt is an example that shows the changes we use at PCMDI.
-
-# Append to packages to build additional packages, such as
-# packages.append('cu')
-
-#This file is executed as Python input so you can compute values depending on
-#platform, etc. Modules os, sys will be imported already.
-
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-libcdms_dir = os.path.join(src_dir, 'libcdms')
-
-## This part figures out the target thing
-target_prefix = sys.prefix
-for i in range(len(sys.argv)):
-    a = sys.argv[i]
-    if a=='--prefix':
-        target_prefix=sys.argv[i+1]
-    sp = a.split("--prefix=")
-    if len(sp)==2:
-        target_prefix=sp[1]
-
-
-# This is where we build netcdf, if you let us
-#netcdf_directory = os.popen('%s --prefix' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-#netcdf_include_directory = os.popen('%s --includedir' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-#netcdf_include_directory= os.path.join(os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')),'include')
-
-#  Control of the CDMS build
-drs_file = '/usr/local/libdrs.a'  # only needed if next line is 'yes'
-CDMS_INCLUDE_DRS='no'    # Change to 'yes' to include DRS. If yes:
-                         # Assumes /usr/local/lib/libdrs.a exists.
-                         # Assumes you have a Fortran compiler.
-CDMS_INCLUDE_QL='no'     # Include QL in build?
-                         # Caution: if set to yes, CDMS library compile
-                         # may fail on certain platforms, including AIX.
-CDMS_INCLUDE_HDF='no'    # Don't set to yes, doesn't work.
-CDMS_INCLUDE_PP='no'     # Met Office PP format is built in to cdunif.
-#  Location of X11 library
-#     If you set x11libdir (that is two ones and an el) AND x11include to point
-#     to the lib and include directories, they will be used.
-#     Otherwise a search is made for common locations.
-if sys.platform in ['mac']:
-   x11libdir='/usr/X11R6/lib'
-   x11include='/usr/X11R6/include'
-else:
-   x11libdir=''
-   x11include=''
-
-#  List of math libraries
-#    We attempt to get the C math libraries right but if we don't fix it.
-mathlibs= ['m']  #i.e., libm.a
-if sys.platform in ['win32', 'mac', 'beos5']:
-    mathlibs = []
-
-# Build actions
-action = {}
-## Commenting out pyfort not used anylonger (it's been years)
-#if os.path.exists(os.path.join(target_prefix, 'bin', 'pyfort')):
-#    action['*.pfp'] = os.path.join(target_prefix, 'bin', 'pyfort') + " -i %(file)s ; "
-#elif os.path.exists(os.path.join(sys.exec_prefix, 'bin', 'pyfort')):
-#    action['*.pfp'] = os.path.join(sys.exec_prefix, 'bin', 'pyfort') + " -i %(file)s ; "
-#else:
-#    action['*.pfp'] = "pyfort  -i %(file)s ; "
-    
-# matplotlib depends on pkg-config
-action['setup.py'] = 'PATH=%s/bin:$PATH  %s setup.py install --prefix=%s ; ' \
-    % (sys.exec_prefix, sys.executable, target_prefix)
-install_script_path = os.path.join(libcdms_dir, 'install_script')
-action['install_script'] = install_script_path + ' %s %s ; ' % (target_prefix, sys.executable)
-for k in ['makefile','Makefile','MAKEFILE']:
-    action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' install ; " % (sys.exec_prefix,target_prefix)
-action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s  --with-python=%s ; make -j1 ; make -j1 install ;" % (os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')), os.path.join(sys.exec_prefix,'bin','python'))
diff --git a/pysrc/README.txt b/pysrc/README.txt
deleted file mode 100644
index 040a48ab7..000000000
--- a/pysrc/README.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-This directory contains all the sources for building a Python suitable for
-use with CDAT. 
-
-Changes from standard distributions:
-   a. readline
-      In file readline.c, change definition of RL_LIBRARY_VERSION to avoid
-      the error if this macro is already defined, by undefining it.
-   b. We use a private version of Python's setup.py to have it find
-      our own tcl/tk.
-
-To install:
-./install_script /whereyouwanttoputit 
-
-A subdirectory build will be created that contains the output.
-Some of these products can be tested by changing to their directory under 
-build and typing "make test".
-
-If you put in a new source file you need to remove the old one and run
-./clean_script before building again.
-
-
-OPTIONS:
-you can add: --enable-aqua to the build line to prevent the build of Tcl/Tk
-and use Aqua Native
-you can add: --disable-tkbuild to the build line to prevent the build of Tcl/Tk
-
-Log files are created in the build subdirectory.
-
-Each of the pieces may be built individually using the corresponding .sh 
-files in this directory. Some warning errors are usual from 
-many of the packages and vary from architecture to architecture.
-
-N.B.: The order in which the packages are built matters. 
-
-You can add an 'exit 0' at any appropriate point in install_script if you
-want to go up to that point and then stop.
diff --git a/pysrc/clean_script b/pysrc/clean_script
deleted file mode 100755
index 185cc2b0e..000000000
--- a/pysrc/clean_script
+++ /dev/null
@@ -1,2 +0,0 @@
-/bin/rm -fr build >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/pysrc/install_script.obsolete b/pysrc/install_script.obsolete
deleted file mode 100755
index a96a6fab4..000000000
--- a/pysrc/install_script.obsolete
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/bin/sh
-d=`pwd`
-if [ -n "$PYTHONPATH" ]; then
-    echo "PYTHONPATH environment variable should not be set!"
-    exit 1
-fi
-
-if [ -n "$PYTHONHOME" ]; then
-   echo "PYTHONHOME environment variable should not be set!"
-   exit 1
-fi
-
-echo "Building Zlib, Readline, Tcl, Tk, and Python."
-echo "Logs are in $d/build"
- 
-
-OS=`uname`
-AQUA=no
-TK=yes
-all=$*
-READLINE=yes
-ZLIB=yes
-OSver=`uname -r`
-OSMajor=`uname -r | cut -d. -f1`
-
-s=$1; shift;
-while [ "$#" -ne 0 ]
-do
-  # Translate $1 to lowercase
-  MYOPT=`echo $1 | tr 'A-Z' 'a-z'`
-  if [ "$MYOPT" = "--enable-aqua" ]; then
-      if ( test "${OS}" == "Darwin" ) then
-	  AQUA=yes
-      else
-	  echo "--enable-aqua is for Darwin systems only! Use --disable-tkbuild"
-	  exit 1
-      fi
-      if ( test "${OSMajor}" == "9" ) then
-	  echo "Detected Leopard 10.5, doing the posix thing";
-	  CPPFLAGS="-DSETPGRP_HAVE_ARG "${CFLAGS}
-      fi
-  fi
-  if [ "$MYOPT" = "--disable-tkbuild" ]; then
-      TK=no
-  fi
-  if [ "$MYOPT" = "--disable-externals-build" ]; then
-      TK=no
-      READLINE=no
-      ZLIB=no
-  fi
-  shift
-done
-./prolog.sh ${all}
-if [ $? -ne 0 ]; then
-    echo "Unpacking of tar files failed."
-    exit 1
-fi
-
-
-if [ "${ZLIB}" = "no" ]; then
-    echo "skipping build of zlib"
-else
-    echo "Building zlib"
-    ./zlib.sh $s  >build/zlib.LOG 2>&1
-    if [ $? -ne 0 ]; then
-	echo "Build of zlib failed"
-	exit 1
-    fi
-fi
-
-if [ "${READLINE}" = "no" ]; then
-    echo "skipping build of readline"
-else
-    echo "Building readline"
-    ./readline.sh $s >build/readline.LOG 2>&1
-    if [ $? -ne 0 ]; then
-	echo "Build of readline failed"
-	exit 1
-    fi
-fi
-if [ "${OS}" = "CYGWIN_NT-5.1" ]; then
-   echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution"
-   echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution"
-elif [ "${OS}" = "CYGWIN_NT-6.0" ]; then
-   echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution"
-   echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution"
-elif [ "${AQUA}" = "yes" ]; then
-    echo "Tcl - Using the pre-built tcl library that is part of the standard Darwin distribution (with Aqua support)"
-    echo "Tk - Using the pre-built tk library that is part of the standard Darwin distributioni (with Aqua support)"
-elif [ "${TK}" = "no" ]; then
-    echo "Tcl - Using the pre-built tcl library that is part of your system"
-    echo "Tk - Using the pre-built tk library that is part of your system"
-else
-   echo "Building tcl"
-   ./tcl.sh $s >build/tcl.LOG 2>&1
-   if [ $? -ne 0 ]; then
-       echo "Build of tcl failed."
-       exit 1
-   fi
-   
-   echo "Building tk"
-   ./tk.sh $s >build/tk.LOG 2>&1
-   if [ $? -ne 0 ]; then
-       echo "Build of tk failed."
-       exit 1
-   fi
-fi
-
-echo "Building python"
-./python.sh $s >build/python.LOG 2>&1
-if [ $? -ne 0 ]; then
-    echo "Build of Python failed."
-    exit 1
-fi
-
-echo "Python built successfully."
-
diff --git a/pysrc/prolog.sh b/pysrc/prolog.sh
deleted file mode 100755
index f98909593..000000000
--- a/pysrc/prolog.sh
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/bin/sh
-if (test "$1" = "--debug") then
-    D="--debug";
-    OPT=-g; 
-    shift
-else
-    D="";
-    OPT=${OPT:=-O}
-fi
-export OPT
-
-OS=`uname`
-
-if (test -z "$1") then
-    echo "Usage: $0 prefix";
-    exit 1
-fi
-version=`more ../version`
-
-if (test ! -d $1) then
-    echo -n "$1/${version} is not a directory; create it? (y/[n])";
-    y='n'
-    read y;
-    if (test ${y} = 'y') then
-        mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $1/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include
-        if (test ! -d $1) then
-            echo "Could not create $1, installation aborted.";
-            exit 1
-        fi
-    else
-        echo 'Installation aborted.';
-        exit 1
-    fi
-fi
-echo "Created  $1/${version} and $1/Externals directories"
-echo "Python/CDAT built in $1/${version} and external dependencies binaries and libs are built to $1/Externals"
-
-prefix=`(cd $1;pwd)`
-
-if (test ! -d build) then
-    # Unpack everything into build
-    mkdir build
-    /bin/cp src/*gz build
-    cd build
-    OS=`uname`
-    if (test "${OS}" = "Linux" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "Darwin" ) then
-    	 TAR=`which tar`
-    elif (test "${OS}" = "CYGWIN_NT-5.1" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "CYGWIN_NT-6.0" ) then
-         TAR=`which tar`
-    elif (test "${OS}" = "AIX" ) then
-         TAR=`which tar`
-    else
-         echo "Building tar for non GNU OS to unpack Python, some error messages may be generated but can be ignored"
-	 chmod +w tar*gz
-         for x in tar*gz;
-         do 
-           gunzip -f $x;
-           tar xf `basename $x .gz`;
-	   (cd tar-* ; ./configure --prefix=$1/Externals ; make ; make install; cd .. )> LOG.prolog;
-           TAR=$1/Externals/bin/tar
-         done
-    fi
-    #rm tar*gz
-    chmod +w *.gz 
-    for x in *.gz; 
-    do 
-        echo "$x"; 
-	gunzip -f $x;
-        ${TAR} xf `basename $x .gz`;
-        /bin/rm -f `basename $x .gz`;
-    done
-#    for x in *.tgz; 
-#    do 
-#        echo "$x"; 
-#        ${TAR} xzf $x;
-#        /bin/rm -f $x
-#    done
-    cd ..
-fi
-cd build
-echo "Installation to ${prefix}"
diff --git a/pysrc/python.sh b/pysrc/python.sh
deleted file mode 100755
index 3e0d844b7..000000000
--- a/pysrc/python.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-# Python, idle
-# This needs to be set or Python's installer will conclude _tkinter cannot
-# be imported.
-CCTEMP=${CC-gcc}
-# Get command name WITHOUT the parameters
-CCTEMP=`echo $CCTEMP | awk '{print $1}'` 
-if (test "${CCTEMP}" = "gcc") then
-config_opt="--with-gcc --without-cxx"
-else
-   config_opt="--without-gcc --without-cxx"
-fi
-OS=`uname`
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   CPPFLAGS="-I${prefix}/Externals/include"; export CPPFLAGS
-   LDFLAGS="-L${prefix}/Externals/lib"; export LDFLAGS
-   config_opt=""
-   OPT=""; export OPT
-fi
-getaddrbug=""
-if (test "${OS}" = "OSF1") then
-    getaddrbug="--disable-ipv6"
-fi
-if (test "${OS}" = "AIX") then
-    getaddrbug="--disable-ipv6"
-fi
-cd Python*
-/bin/rm -f setup.py
-/bin/cp ../../src/setup.py setup.py
-CDAT_PREFIX=${prefix}/Externals; export CDAT_PREFIX
-if (test "${OS}" = "Linux") then  # Linux -- needed for readline
-   export LDFLAGS="-L${prefix}/Externals/lib -Wl,-R${prefix}/Externals/lib"
-   if (test "${CCTEMP}" = "icc") then  # zeus x86_64 with Intel compiler
-      if (test "${IC}" = "") then
-	  echo "Run 'use <compiler>' to set environment variable IC to the location of libimf.a, libirc.a"
-	  exit 1
-      fi
-      export LDFLAGS="${LDFLAGS} -L${IC}/lib -limf -lirc"
-   fi
-fi
-./configure ${config_opt} --prefix=${prefix}/${version} ${getaddrbug}
-if (test $? -ne 0) then
-    echo "Python configure failed."; exit 1;
-fi
-
-make 
-if (test $? -ne 0) then
-    echo "Python make failed."; exit 1;
-fi
-
-make install 
-if (test $? -ne 0) then
-    echo "Python install failed."; exit 1;
-fi
-
-#cd Tools/idle
-#${prefix}/bin/python setup.py install
-#if (test $? -ne 0) then
-#    echo "Python idle install failed."; exit 1;
-#fi
-mkdir -p ${prefix}/Externals/share
-if (test "${OS}" = "CYGWIN_NT-5.1" ) then
-    ln -s /usr/share/tcl* ${prefix}/Externals/share ;
-    ln -s /usr/share/tk* ${prefix}/Externals/share ;
-fi
-if (test "${OS}" = "CYGWIN_NT-6.0" ) then
-    ln -s /usr/share/tcl* ${prefix}/Externals/share ;
-    ln -s /usr/share/tk* ${prefix}/Externals/share ;
-fi
-
-${prefix}/${version}/bin/python -c "import Tkinter"
-if (test $? -ne 0) then
-    echo "Python Tkinter import failed."; exit 1;
-fi
-echo "Python built with Tkinter correctly." 
diff --git a/pysrc/readline.sh b/pysrc/readline.sh
deleted file mode 100755
index 40f2d97d2..000000000
--- a/pysrc/readline.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-cd readline-*
-./configure --prefix=${prefix}/Externals
-if (test $? -ne 0) then
-    echo "readline configuration failed.";
-    echo "Some platforms don't support readline, this doesn't matter.";
-    echo "Ignoring this error.";
-    exit 0;
-fi
-make
-if (test $? -ne 0) then
-    echo "readline make failed.";
-    echo "Some platforms don't support readline, this doesn't matter.";
-    echo "Ignoring this error.";
-    exit 0;
-fi
-make install
-if (test $? -ne 0) then
-    echo "readline install failed.";
-    echo "This is unexpected since it built ok.";
-    exit 1;
-fi
diff --git a/pysrc/src/setup-2.7.1.py b/pysrc/src/setup-2.7.1.py
deleted file mode 100644
index c7d059069..000000000
--- a/pysrc/src/setup-2.7.1.py
+++ /dev/null
@@ -1,2067 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision: 86041 $"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguoius dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        ## PCMDI changes look for AQUA_CDAT env variable to decide
-        if os.environ.get("AQUA_CDAT","no")=="yes" :
-            if (platform == 'darwin' and
-                self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-                return
-        ## End of pcmdi changes (we just added the if test
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.2.py b/pysrc/src/setup-2.7.2.py
deleted file mode 100644
index 1f9c9b83d..000000000
--- a/pysrc/src/setup-2.7.2.py
+++ /dev/null
@@ -1,2090 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        if not find_executable('dpkg-architecture'):
-            return
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            tmpfile)
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        ## PCMDI changes look for AQUA_CDAT env variable to decide
-        if os.environ.get("AQUA_CDAT","no")=="yes" :
-            if (platform == 'darwin' and
-                self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-                return
-        ## End of pcmdi changes (we just added the if test
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.3.py b/pysrc/src/setup-2.7.3.py
deleted file mode 100644
index 4026128eb..000000000
--- a/pysrc/src/setup-2.7.3.py
+++ /dev/null
@@ -1,2094 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/')
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if sys.platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if sys.platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if sys.platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-        platform = self.get_platform()
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        if not find_executable('dpkg-architecture'):
-            return
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            tmpfile)
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-	# PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # end PCMDI change
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if sys.platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 8)
-        min_db_ver = (4, 1)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                    db_incdir.replace("include", 'lib/x86_64-linux-gnu')
-                ]
-
-                if sys.platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if sys.platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-
-            if sys.platform == 'darwin' and is_macosx_sdk_path(d):
-                f = os.path.join(sysroot, d[1:], "sqlite3.h")
-
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if sys.platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others don't
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        if (platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.4.py b/pysrc/src/setup-2.7.4.py
deleted file mode 100644
index ea8a5f51e..000000000
--- a/pysrc/src/setup-2.7.4.py
+++ /dev/null
@@ -1,2186 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup-2.7.7.py b/pysrc/src/setup-2.7.7.py
deleted file mode 100644
index 9a92bc3a7..000000000
--- a/pysrc/src/setup-2.7.7.py
+++ /dev/null
@@ -1,2244 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # End PCMDI Changes
-        # Ensure that /usr/local is always used
-        if not cross_compiling:
-            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        if cross_compiling:
-            self.add_gcc_paths()
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(9):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        curses_incs = None
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            curses_incs = find_file('curses.h', inc_dirs,
-                                    [os.path.join(d, 'ncursesw') for d in inc_dirs])
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h):
-                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        return missing
-
-    def detect_tkinter_explicitly(self):
-        # Build _tkinter using explicit locations for Tcl/Tk.
-        #
-        # This is enabled when both arguments are given to ./configure:
-        #
-        #     --with-tcltk-includes="-I/path/to/tclincludes \
-        #                            -I/path/to/tkincludes"
-        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
-        #                        -L/path/to/tklibs -ltkm.n"
-        #
-        # These values can also be specified or overriden via make:
-        #    make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
-        #
-        # This can be useful for building and testing tkinter with multiple
-        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
-        # build of Tcl so you need to specify both arguments and use care when
-        # overriding.
-
-        # The _TCLTK variables are created in the Makefile sharedmods target.
-        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
-        tcltk_libs = os.environ.get('_TCLTK_LIBS')
-        if not (tcltk_includes and tcltk_libs):
-            # Resume default configuration search.
-            return 0
-
-        extra_compile_args = tcltk_includes.split()
-        extra_link_args = tcltk_libs.split()
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        extra_compile_args = extra_compile_args,
-                        extra_link_args = extra_link_args,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Check whether --with-tcltk-includes and --with-tcltk-libs were
-        # configured or passed into the make target.  If so, use these values
-        # to build tkinter and bypass the searches for Tcl and TK in standard
-        # locations.
-        if self.detect_tkinter_explicitly():
-            return
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/setup.py b/pysrc/src/setup.py
deleted file mode 100644
index 9a92bc3a7..000000000
--- a/pysrc/src/setup.py
+++ /dev/null
@@ -1,2244 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision$"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-import sysconfig
-
-from distutils import log
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-from distutils.spawn import find_executable
-
-cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ
-
-def get_platform():
-    # cross build
-    if "_PYTHON_HOST_PLATFORM" in os.environ:
-        return os.environ["_PYTHON_HOST_PLATFORM"]
-    # Get value of sys.platform
-    if sys.platform.startswith('osf1'):
-        return 'osf1'
-    return sys.platform
-host_platform = get_platform()
-
-# Were we compiled --with-pydebug or with #define Py_DEBUG?
-COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS"))
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def macosx_sdk_root():
-    """
-    Return the directory of the current OSX SDK,
-    or '/' if no SDK was specified.
-    """
-    cflags = sysconfig.get_config_var('CFLAGS')
-    m = re.search(r'-isysroot\s+(\S+)', cflags)
-    if m is None:
-        sysroot = '/'
-    else:
-        sysroot = m.group(1)
-    return sysroot
-
-def is_macosx_sdk_path(path):
-    """
-    Returns True if 'path' can be located in an OSX SDK
-    """
-    return ( (path.startswith('/usr/') and not path.startswith('/usr/local'))
-                or path.startswith('/System/')
-                or path.startswith('/Library/') )
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-    if host_platform == 'darwin':
-        # Honor the MacOSX SDK setting when one was specified.
-        # An SDK is a directory with the same structure as a real
-        # system, but with only header files and libraries.
-        sysroot = macosx_sdk_root()
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(dir):
-            f = os.path.join(sysroot, dir[1:], filename)
-
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    if host_platform == 'darwin':
-        sysroot = macosx_sdk_root()
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ ]
-
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-
-        if host_platform == 'darwin' and is_macosx_sdk_path(p):
-            if os.path.join(sysroot, p[1:]) == dirname:
-                return [ p ]
-
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-        srcdir = os.path.abspath(srcdir)
-        moddirlist = [os.path.join(srcdir, 'Modules')]
-
-        # Platform-dependent module source and include directories
-        incdirlist = []
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append(os.path.join(srcdir, 'Mac/Include'))
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = [sysconfig.get_config_h_filename()]
-        headers += glob(os.path.join(sysconfig.get_path('include'), "*.h"))
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, moddirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            # platform specific include directories
-            ext.include_dirs.extend(incdirlist)
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        # Parse Modules/Setup and Modules/Setup.local to figure out which
-        # modules are turned on in the file.
-        remove_modules = []
-        for filename in ('Modules/Setup', 'Modules/Setup.local'):
-            input = text_file.TextFile(filename, join_lines=1)
-            while 1:
-                line = input.readline()
-                if not line: break
-                line = line.split()
-                remove_modules.append(line[0])
-            input.close()
-
-        for ext in self.extensions[:]:
-            if ext.name in remove_modules:
-                self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print ("Python build finished, but the necessary bits to build "
-                   "these modules were not found:")
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if host_platform == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if host_platform == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-
-        # Don't try to load extensions for cross builds
-        if cross_compiling:
-            return
-
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def add_multiarch_paths(self):
-        # Debian/Ubuntu multiarch support.
-        # https://wiki.ubuntu.com/MultiarchSpec
-        cc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile))
-        multiarch_path_component = ''
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-        finally:
-            os.unlink(tmpfile)
-
-        if multiarch_path_component != '':
-            add_dir_to_list(self.compiler.library_dirs,
-                            '/usr/lib/' + multiarch_path_component)
-            add_dir_to_list(self.compiler.include_dirs,
-                            '/usr/include/' + multiarch_path_component)
-            return
-
-        if not find_executable('dpkg-architecture'):
-            return
-        opt = ''
-        if cross_compiling:
-            opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE')
-        tmpfile = os.path.join(self.build_temp, 'multiarch')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system(
-            'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' %
-            (opt, tmpfile))
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    multiarch_path_component = fp.readline().strip()
-                add_dir_to_list(self.compiler.library_dirs,
-                                '/usr/lib/' + multiarch_path_component)
-                add_dir_to_list(self.compiler.include_dirs,
-                                '/usr/include/' + multiarch_path_component)
-        finally:
-            os.unlink(tmpfile)
-
-    def add_gcc_paths(self):
-        gcc = sysconfig.get_config_var('CC')
-        tmpfile = os.path.join(self.build_temp, 'gccpaths')
-        if not os.path.exists(self.build_temp):
-            os.makedirs(self.build_temp)
-        ret = os.system('%s -E -v - </dev/null 2>%s 1>/dev/null' % (gcc, tmpfile))
-        is_gcc = False
-        in_incdirs = False
-        inc_dirs = []
-        lib_dirs = []
-        try:
-            if ret >> 8 == 0:
-                with open(tmpfile) as fp:
-                    for line in fp.readlines():
-                        if line.startswith("gcc version"):
-                            is_gcc = True
-                        elif line.startswith("#include <...>"):
-                            in_incdirs = True
-                        elif line.startswith("End of search list"):
-                            in_incdirs = False
-                        elif is_gcc and line.startswith("LIBRARY_PATH"):
-                            for d in line.strip().split("=")[1].split(":"):
-                                d = os.path.normpath(d)
-                                if '/gcc/' not in d:
-                                    add_dir_to_list(self.compiler.library_dirs,
-                                                    d)
-                        elif is_gcc and in_incdirs and '/gcc/' not in line:
-                            add_dir_to_list(self.compiler.include_dirs,
-                                            line.strip())
-        finally:
-            os.unlink(tmpfile)
-
-    def detect_modules(self):
-        # PCMDI Change
-        # Ensure that place we put tcl/tk/netcdf etc. is always used
-        libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals'))
-        mylibdir = os.path.join(libbase,'lib')
-        myincdir = os.path.join(libbase,'include')
-        add_dir_to_list(self.compiler.library_dirs, mylibdir)
-        add_dir_to_list(self.compiler.include_dirs, myincdir)
-        # End PCMDI Changes
-        # Ensure that /usr/local is always used
-        if not cross_compiling:
-            add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-            add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-        if cross_compiling:
-            self.add_gcc_paths()
-        self.add_multiarch_paths()
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr' \
-                and not sysconfig.get_config_var('PYTHONFRAMEWORK'):
-            # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework
-            # (PYTHONFRAMEWORK is set) to avoid # linking problems when
-            # building a framework with different architectures than
-            # the one that is currently installed (issue #7473)
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        inc_dirs = self.compiler.include_dirs[:]
-        lib_dirs = self.compiler.library_dirs[:]
-        if not cross_compiling:
-            for d in (
-                '/usr/include',
-                ):
-                add_dir_to_list(inc_dirs, d)
-            for d in (
-                '/lib64', '/usr/lib64',
-                '/lib', '/usr/lib',
-                ):
-                add_dir_to_list(lib_dirs, d)
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        srcdir = sysconfig.get_config_var('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if host_platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if host_platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        # HP-UX11iv3 keeps files in lib/hpux folders.
-        if host_platform == 'hp-ux11':
-            lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32']
-
-        if host_platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if host_platform in ['darwin', 'beos']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        #exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c', '_math.c'],
-                               depends=['_math.h'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.1 _io library
-        exts.append( Extension("_io",
-            ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c",
-             "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"],
-             depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"]))
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if host_platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        libs = []
-        if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)):
-            # May be necessary on AIX for flock function
-            libs = ['bsd']
-        exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) )
-        # pwd(3)
-        exts.append( Extension('pwd', ['pwdmodule.c']) )
-        # grp(3)
-        exts.append( Extension('grp', ['grpmodule.c']) )
-        # spwd, shadow passwords
-        if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                config_h_vars.get('HAVE_GETSPENT', False)):
-            exts.append( Extension('spwd', ['spwdmodule.c']) )
-        else:
-            missing.append('spwd')
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if host_platform not in ['atheos']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        # syslog daemon interface
-        exts.append( Extension('syslog', ['syslogmodule.c']) )
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        readline_termcap_library = ""
-        curses_library = ""
-        # Determine if readline is already linked against curses or tinfo.
-        if do_readline and find_executable('ldd'):
-            fp = os.popen("ldd %s" % do_readline)
-            ldd_output = fp.readlines()
-            ret = fp.close()
-            if ret is None or ret >> 8 == 0:
-                for ln in ldd_output:
-                    if 'curses' in ln:
-                        readline_termcap_library = re.sub(
-                            r'.*lib(n?cursesw?)\.so.*', r'\1', ln
-                        ).rstrip()
-                        break
-                    if 'tinfo' in ln: # termcap interface split out from ncurses
-                        readline_termcap_library = 'tinfo'
-                        break
-        # Issue 7384: If readline is already linked against curses,
-        # use the same library for the readline and curses modules.
-        if 'curses' in readline_termcap_library:
-            curses_library = readline_termcap_library
-        elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
-            curses_library = 'ncursesw'
-        elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
-            curses_library = 'ncurses'
-        elif self.compiler.find_library_file(lib_dirs, 'curses'):
-            curses_library = 'curses'
-
-        if host_platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if host_platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (possibly broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if readline_termcap_library:
-                pass # Issue 7384: Already linked against curses or tinfo.
-            elif curses_library:
-                readline_libs.append(curses_library)
-            elif self.compiler.find_library_file(lib_dirs +
-                                                     ['/usr/lib/termcap'],
-                                                     'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        # crypt module.
-
-        if self.compiler.find_library_file(lib_dirs, 'crypt'):
-            libs = ['crypt']
-        else:
-            libs = []
-        exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'],
-                               depends=['socketmodule.h'],
-                               libraries=math_libs) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-
-        # look for the openssl version header on the compiler search path.
-        opensslv_h = find_file('openssl/opensslv.h', [],
-                inc_dirs + search_for_ssl_incs_in)
-        if opensslv_h:
-            name = os.path.join(opensslv_h[0], 'openssl/opensslv.h')
-            if host_platform == 'darwin' and is_macosx_sdk_path(name):
-                name = os.path.join(macosx_sdk_root(), name[1:])
-            try:
-                incfile = open(name, 'r')
-                for line in incfile:
-                    m = openssl_ver_re.match(line)
-                    if m:
-                        openssl_ver = eval(m.group(1))
-            except IOError, msg:
-                print "IOError while reading opensshv.h:", msg
-                pass
-
-        min_openssl_ver = 0x00907000
-        have_any_openssl = ssl_incs is not None and ssl_libs is not None
-        have_usable_openssl = (have_any_openssl and
-                               openssl_ver >= min_openssl_ver)
-
-        if have_any_openssl:
-            if have_usable_openssl:
-                # The _hashlib module wraps optimized implementations
-                # of hash functions from the OpenSSL library.
-                exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                       include_dirs = ssl_incs,
-                                       library_dirs = ssl_libs,
-                                       libraries = ['ssl', 'crypto']) )
-            else:
-                print ("warning: openssl 0x%08x is too old for _hashlib" %
-                       openssl_ver)
-                missing.append('_hashlib')
-        if COMPILED_WITH_PYDEBUG or not have_usable_openssl:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-
-        min_sha2_openssl_ver = 0x00908000
-        if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver:
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (5, 3)
-        min_db_ver = (4, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 5:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((5, x)):
-                        yield x
-            elif major == 4:
-                for x in range(9):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        if cross_compiling:
-            db_inc_paths = []
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-
-                if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                    f = os.path.join(sysroot, d[1:], "db.h")
-
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-
-                if host_platform != 'darwin':
-                    db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                else:
-                    # Same as other branch, but takes OSX SDK into account
-                    tmp = []
-                    for dn in db_dirs_to_check:
-                        if is_macosx_sdk_path(dn):
-                            if os.path.isdir(os.path.join(sysroot, dn[1:])):
-                                tmp.append(dn)
-                        else:
-                            if os.path.isdir(dn):
-                                tmp.append(dn)
-                    db_dirs_to_check = tmp
-
-                # Look for a version specific db-X.Y before an ambiguous dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        if cross_compiling:
-            sqlite_inc_paths = []
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        if host_platform == 'darwin':
-            sysroot = macosx_sdk_root()
-
-        for d_ in inc_dirs + sqlite_inc_paths:
-            d = d_
-            if host_platform == 'darwin' and is_macosx_sdk_path(d):
-                d = os.path.join(sysroot, d[1:])
-
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if host_platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-            # Comment this out if you want the sqlite3 module to be able to load extensions.
-            sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1"))
-
-            if host_platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entire path.
-                # This way a statically linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-
-        if host_platform == 'darwin':
-            if is_macosx_sdk_path(f):
-                sysroot = macosx_sdk_root()
-                f = os.path.join(sysroot, f[1:])
-
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = host_platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        dbm_order = ['gdbm']
-        # The standard Unix dbm module:
-        if host_platform not in ['cygwin']:
-            config_args = [arg.strip("'")
-                           for arg in sysconfig.get_config_var("CONFIG_ARGS").split()]
-            dbm_args = [arg for arg in config_args
-                        if arg.startswith('--with-dbmliborder=')]
-            if dbm_args:
-                dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":")
-            else:
-                dbm_order = "ndbm:gdbm:bdb".split(":")
-            dbmext = None
-            for cand in dbm_order:
-                if cand == "ndbm":
-                    if find_file("ndbm.h", inc_dirs, []) is not None:
-                        # Some systems have -lndbm, others have -lgdbm_compat,
-                        # others don't have either
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'ndbm'):
-                            ndbm_libs = ['ndbm']
-                        elif self.compiler.find_library_file(lib_dirs,
-                                                             'gdbm_compat'):
-                            ndbm_libs = ['gdbm_compat']
-                        else:
-                            ndbm_libs = []
-                        print "building dbm using ndbm"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           define_macros=[
-                                               ('HAVE_NDBM_H',None),
-                                               ],
-                                           libraries=ndbm_libs)
-                        break
-
-                elif cand == "gdbm":
-                    if self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                        gdbm_libs = ['gdbm']
-                        if self.compiler.find_library_file(lib_dirs,
-                                                               'gdbm_compat'):
-                            gdbm_libs.append('gdbm_compat')
-                        if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                        if find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                            print "building dbm using gdbm"
-                            dbmext = Extension(
-                                'dbm', ['dbmmodule.c'],
-                                define_macros=[
-                                    ('HAVE_GDBM_DASH_NDBM_H', None),
-                                    ],
-                                libraries = gdbm_libs)
-                            break
-                elif cand == "bdb":
-                    if db_incs is not None:
-                        print "building dbm using bdb"
-                        dbmext = Extension('dbm', ['dbmmodule.c'],
-                                           library_dirs=dblib_dir,
-                                           runtime_library_dirs=dblib_dir,
-                                           include_dirs=db_incs,
-                                           define_macros=[
-                                               ('HAVE_BERKDB_H', None),
-                                               ('DB_DBM_HSEARCH', None),
-                                               ],
-                                           libraries=dblibs)
-                        break
-            if dbmext is not None:
-                exts.append(dbmext)
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if ('gdbm' in dbm_order and
-            self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if host_platform not in ['win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if host_platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        curses_incs = None
-        if curses_library.startswith('ncurses'):
-            if curses_library == 'ncursesw':
-                # Bug 1464056: If _curses.so links with ncursesw,
-                # _curses_panel.so must link with panelw.
-                panel_library = 'panelw'
-            curses_libs = [curses_library]
-            curses_incs = find_file('curses.h', inc_dirs,
-                                    [os.path.join(d, 'ncursesw') for d in inc_dirs])
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = curses_libs) )
-        elif curses_library == 'curses' and host_platform != 'darwin':
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   include_dirs = curses_incs,
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h):
-                zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:])
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if host_platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if host_platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a group of
-        # developers on SourceForge; see www.libexpat.org for more information.
-        # The pyexpat module was written by Paul Prescod after a prototype by
-        # Jack Jansen.  The Expat source is included in Modules/expat/.  Usage
-        # of a system shared libexpat.so is possible with --with-system-expat
-        # configure option.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"):
-            expat_inc = []
-            define_macros = []
-            expat_lib = ['expat']
-            expat_sources = []
-            expat_depends = []
-        else:
-            expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')]
-            define_macros = [
-                ('HAVE_EXPAT_CONFIG_H', '1'),
-            ]
-            expat_lib = []
-            expat_sources = ['expat/xmlparse.c',
-                             'expat/xmlrole.c',
-                             'expat/xmltok.c']
-            expat_depends = ['expat/ascii.h',
-                             'expat/asciitab.h',
-                             'expat/expat.h',
-                             'expat/expat_config.h',
-                             'expat/expat_external.h',
-                             'expat/internal.h',
-                             'expat/latin1tab.h',
-                             'expat/utf8tab.h',
-                             'expat/xmlrole.h',
-                             'expat/xmltok.h',
-                             'expat/xmltok_impl.h'
-                             ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = expat_inc,
-                              libraries = expat_lib,
-                              sources = ['pyexpat.c'] + expat_sources,
-                              depends = expat_depends,
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = expat_inc,
-                                  libraries = expat_lib,
-                                  sources = ['_elementtree.c'],
-                                  depends = ['pyexpat.c'] + expat_sources +
-                                      expat_depends,
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (host_platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if host_platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif host_platform == 'darwin':          # Mac OSX
-            macros = dict()
-            libraries = []
-
-        elif host_platform == 'cygwin':          # Cygwin
-            macros = dict()
-            libraries = []
-
-        elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('openbsd'):
-            macros = dict()
-            libraries = []
-
-        elif host_platform.startswith('netbsd'):
-            macros = dict()
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict()
-            libraries = ['rt']
-
-        if host_platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-            if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not
-                sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if host_platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8')
-            or host_platform.startswith("gnukfreebsd")):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if host_platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if host_platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if host_platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        return missing
-
-    def detect_tkinter_explicitly(self):
-        # Build _tkinter using explicit locations for Tcl/Tk.
-        #
-        # This is enabled when both arguments are given to ./configure:
-        #
-        #     --with-tcltk-includes="-I/path/to/tclincludes \
-        #                            -I/path/to/tkincludes"
-        #     --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \
-        #                        -L/path/to/tklibs -ltkm.n"
-        #
-        # These values can also be specified or overriden via make:
-        #    make TCLTK_INCLUDES="..." TCLTK_LIBS="..."
-        #
-        # This can be useful for building and testing tkinter with multiple
-        # versions of Tcl/Tk.  Note that a build of Tk depends on a particular
-        # build of Tcl so you need to specify both arguments and use care when
-        # overriding.
-
-        # The _TCLTK variables are created in the Makefile sharedmods target.
-        tcltk_includes = os.environ.get('_TCLTK_INCLUDES')
-        tcltk_libs = os.environ.get('_TCLTK_LIBS')
-        if not (tcltk_includes and tcltk_libs):
-            # Resume default configuration search.
-            return 0
-
-        extra_compile_args = tcltk_includes.split()
-        extra_link_args = tcltk_libs.split()
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        extra_compile_args = extra_compile_args,
-                        extra_link_args = extra_link_args,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        sysroot = macosx_sdk_root()
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-
-
-            for fw in 'Tcl', 'Tk':
-                if is_macosx_sdk_path(F):
-                    if not exists(join(sysroot, F[1:], fw + '.framework')):
-                        break
-                else:
-                    if not exists(join(F, fw + '.framework')):
-                        break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-
-        if is_macosx_sdk_path(F):
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),))
-        else:
-            fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Check whether --with-tcltk-includes and --with-tcltk-libs were
-        # configured or passed into the make target.  If so, use these values
-        # to build tkinter and bypass the searches for Tcl and TK in standard
-        # locations.
-        if self.detect_tkinter_explicitly():
-            return
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        if (host_platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83',
-                        '8.2', '82', '8.1', '81', '8.0', '80']:
-            tklib = self.compiler.find_library_file(lib_dirs,
-                                                        'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs,
-                                                         'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in host_platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if host_platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if host_platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                               'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                                'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if host_platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if host_platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        srcdir = sysconfig.get_config_var('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if host_platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            srcdir = sysconfig.get_config_var('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split()
-                               if (('--host=' in arg) or ('--build=' in arg))]
-                if not self.verbose:
-                    config_args.append("-q")
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            with open(ffi_configfile) as f:
-                exec f in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if host_platform == 'darwin':
-            sources.append('_ctypes/malloc_closure.c')
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif host_platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif host_platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if host_platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")]
-        if not ffi_inc or ffi_inc[0] == '':
-            ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/pysrc/src/site-2.7.7.py b/pysrc/src/site-2.7.7.py
deleted file mode 100644
index c22c48ab5..000000000
--- a/pysrc/src/site-2.7.7.py
+++ /dev/null
@@ -1,602 +0,0 @@
-"""Append module search paths for third-party packages to sys.path.
-
-****************************************************************
-* This module is automatically imported during initialization. *
-****************************************************************
-
-In earlier versions of Python (up to 1.5a3), scripts or modules that
-needed to use site-specific modules would place ``import site''
-somewhere near the top of their code.  Because of the automatic
-import, this is no longer necessary (but code that does it still
-works).
-
-This will append site-specific paths to the module search path.  On
-Unix (including Mac OSX), it starts with sys.prefix and
-sys.exec_prefix (if different) and appends
-lib/python<version>/site-packages as well as lib/site-python.
-On other platforms (such as Windows), it tries each of the
-prefixes directly, as well as with lib/site-packages appended.  The
-resulting directories, if they exist, are appended to sys.path, and
-also inspected for path configuration files.
-
-A path configuration file is a file whose name has the form
-<package>.pth; its contents are additional directories (one per line)
-to be added to sys.path.  Non-existing directories (or
-non-directories) are never added to sys.path; no directory is added to
-sys.path more than once.  Blank lines and lines beginning with
-'#' are skipped. Lines starting with 'import' are executed.
-
-For example, suppose sys.prefix and sys.exec_prefix are set to
-/usr/local and there is a directory /usr/local/lib/python2.5/site-packages
-with three subdirectories, foo, bar and spam, and two path
-configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
-following:
-
-  # foo package configuration
-  foo
-  bar
-  bletch
-
-and bar.pth contains:
-
-  # bar package configuration
-  bar
-
-Then the following directories are added to sys.path, in this order:
-
-  /usr/local/lib/python2.5/site-packages/bar
-  /usr/local/lib/python2.5/site-packages/foo
-
-Note that bletch is omitted because it doesn't exist; bar precedes foo
-because bar.pth comes alphabetically before foo.pth; and spam is
-omitted because it is not mentioned in either path configuration file.
-
-After these path manipulations, an attempt is made to import a module
-named sitecustomize, which can perform arbitrary additional
-site-specific customizations.  If this import fails with an
-ImportError exception, it is silently ignored.
-
-"""
-
-import sys
-import os
-import __builtin__
-import traceback
-
-# Prefixes for site-packages; add additional prefixes like /usr/local here
-PREFIXES = [sys.prefix, sys.exec_prefix]
-# Enable per user site-packages directory
-# set it to False to disable the feature or True to force the feature
-ENABLE_USER_SITE = None
-
-# for distutils.commands.install
-# These values are initialized by the getuserbase() and getusersitepackages()
-# functions, through the main() function when Python starts.
-USER_SITE = None
-USER_BASE = None
-
-
-def makepath(*paths):
-    dir = os.path.join(*paths)
-    try:
-        dir = os.path.abspath(dir)
-    except OSError:
-        pass
-    return dir, os.path.normcase(dir)
-
-
-def abs__file__():
-    """Set all module' __file__ attribute to an absolute path"""
-    for m in sys.modules.values():
-        if hasattr(m, '__loader__'):
-            continue   # don't mess with a PEP 302-supplied __file__
-        try:
-            m.__file__ = os.path.abspath(m.__file__)
-        except (AttributeError, OSError):
-            pass
-
-
-def removeduppaths():
-    """ Remove duplicate entries from sys.path along with making them
-    absolute"""
-    # This ensures that the initial path provided by the interpreter contains
-    # only absolute pathnames, even if we're running from the build directory.
-    L = []
-    known_paths = set()
-    for dir in sys.path:
-        # Filter out duplicate paths (on case-insensitive file systems also
-        # if they only differ in case); turn relative paths into absolute
-        # paths.
-        dir, dircase = makepath(dir)
-        if not dircase in known_paths:
-            L.append(dir)
-            known_paths.add(dircase)
-    sys.path[:] = L
-    return known_paths
-
-
-def _init_pathinfo():
-    """Return a set containing all existing directory entries from sys.path"""
-    d = set()
-    for dir in sys.path:
-        try:
-            if os.path.isdir(dir):
-                dir, dircase = makepath(dir)
-                d.add(dircase)
-        except TypeError:
-            continue
-    return d
-
-
-def addpackage(sitedir, name, known_paths):
-    """Process a .pth file within the site-packages directory:
-       For each line in the file, either combine it with sitedir to a path
-       and add that to known_paths, or execute it if it starts with 'import '.
-    """
-    if known_paths is None:
-        _init_pathinfo()
-        reset = 1
-    else:
-        reset = 0
-    fullname = os.path.join(sitedir, name)
-    try:
-        f = open(fullname, "rU")
-    except IOError:
-        return
-    with f:
-        for n, line in enumerate(f):
-            if line.startswith("#"):
-                continue
-            try:
-                if line.startswith(("import ", "import\t")):
-                    exec line
-                    continue
-                line = line.rstrip()
-                dir, dircase = makepath(sitedir, line)
-                if not dircase in known_paths and os.path.exists(dir):
-                    sys.path.append(dir)
-                    known_paths.add(dircase)
-            except Exception as err:
-                print >>sys.stderr, "Error processing line {:d} of {}:\n".format(
-                    n+1, fullname)
-                for record in traceback.format_exception(*sys.exc_info()):
-                    for line in record.splitlines():
-                        print >>sys.stderr, '  '+line
-                print >>sys.stderr, "\nRemainder of file ignored"
-                break
-    if reset:
-        known_paths = None
-    return known_paths
-
-
-def addsitedir(sitedir, known_paths=None):
-    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
-    'sitedir'"""
-    if known_paths is None:
-        known_paths = _init_pathinfo()
-        reset = 1
-    else:
-        reset = 0
-    sitedir, sitedircase = makepath(sitedir)
-    if not sitedircase in known_paths:
-        sys.path.append(sitedir)        # Add path component
-    try:
-        names = os.listdir(sitedir)
-    except os.error:
-        return
-    dotpth = os.extsep + "pth"
-    names = [name for name in names if name.endswith(dotpth)]
-    for name in sorted(names):
-        addpackage(sitedir, name, known_paths)
-    if reset:
-        known_paths = None
-    return known_paths
-
-
-def check_enableusersite():
-    """Check if user site directory is safe for inclusion
-
-    The function tests for the command line flag (including environment var),
-    process uid/gid equal to effective uid/gid.
-
-    None: Disabled for security reasons
-    False: Disabled by user (command line option)
-    True: Safe and enabled
-    """
-    if sys.flags.no_user_site:
-        return False
-
-    if hasattr(os, "getuid") and hasattr(os, "geteuid"):
-        # check process uid == effective uid
-        if os.geteuid() != os.getuid():
-            return None
-    if hasattr(os, "getgid") and hasattr(os, "getegid"):
-        # check process gid == effective gid
-        if os.getegid() != os.getgid():
-            return None
-
-    return True
-
-def getuserbase():
-    """Returns the `user base` directory path.
-
-    The `user base` directory can be used to store data. If the global
-    variable ``USER_BASE`` is not initialized yet, this function will also set
-    it.
-    """
-    global USER_BASE
-    if USER_BASE is not None:
-        return USER_BASE
-    from sysconfig import get_config_var
-    USER_BASE = get_config_var('userbase')
-    return USER_BASE
-
-def getusersitepackages():
-    """Returns the user-specific site-packages directory path.
-
-    If the global variable ``USER_SITE`` is not initialized yet, this
-    function will also set it.
-    """
-    global USER_SITE
-    user_base = getuserbase() # this will also set USER_BASE
-
-    if USER_SITE is not None:
-        return USER_SITE
-
-    from sysconfig import get_path
-    import os
-
-    if sys.platform == 'darwin':
-        from sysconfig import get_config_var
-        if get_config_var('PYTHONFRAMEWORK'):
-            USER_SITE = get_path('purelib', 'osx_framework_user')
-            return USER_SITE
-
-    USER_SITE = get_path('purelib', '%s_user' % os.name)
-    return USER_SITE
-
-def addusersitepackages(known_paths):
-    """Add a per user site-package to sys.path
-
-    Each user has its own python directory with site-packages in the
-    home directory.
-    """
-    # get the per user site-package path
-    # this call will also make sure USER_BASE and USER_SITE are set
-    user_site = getusersitepackages()
-
-    if ENABLE_USER_SITE and os.path.isdir(user_site):
-        addsitedir(user_site, known_paths)
-    return known_paths
-
-def getsitepackages():
-    """Returns a list containing all global site-packages directories
-    (and possibly site-python).
-
-    For each directory present in the global ``PREFIXES``, this function
-    will find its `site-packages` subdirectory depending on the system
-    environment, and will return a list of full paths.
-    """
-    sitepackages = []
-    seen = set()
-
-    for prefix in PREFIXES:
-        if not prefix or prefix in seen:
-            continue
-        seen.add(prefix)
-
-        if sys.platform in ('os2emx', 'riscos'):
-            sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
-        elif os.sep == '/':
-            sitepackages.append(os.path.join(prefix, "lib",
-                                        "python" + sys.version[:3],
-                                        "site-packages"))
-            sitepackages.append(os.path.join(prefix, "lib", "site-python"))
-        else:
-            sitepackages.append(prefix)
-            sitepackages.append(os.path.join(prefix, "lib", "site-packages"))
-        if sys.platform == "darwin":
-            # for framework builds *only* we add the standard Apple
-            # locations.
-            # DISABLED FOR UV-CDAT!
-            pass
-            #from sysconfig import get_config_var
-            #framework = get_config_var("PYTHONFRAMEWORK")
-            #if framework:
-            #    sitepackages.append(
-            #            os.path.join("/Library", framework,
-            #                sys.version[:3], "site-packages"))
-    return sitepackages
-
-def addsitepackages(known_paths):
-    """Add site-packages (and possibly site-python) to sys.path"""
-    for sitedir in getsitepackages():
-        if os.path.isdir(sitedir):
-            addsitedir(sitedir, known_paths)
-
-    return known_paths
-
-def setBEGINLIBPATH():
-    """The OS/2 EMX port has optional extension modules that do double duty
-    as DLLs (and must use the .DLL file extension) for other extensions.
-    The library search path needs to be amended so these will be found
-    during module import.  Use BEGINLIBPATH so that these are at the start
-    of the library search path.
-
-    """
-    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
-    libpath = os.environ['BEGINLIBPATH'].split(';')
-    if libpath[-1]:
-        libpath.append(dllpath)
-    else:
-        libpath[-1] = dllpath
-    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
-
-
-def setquit():
-    """Define new builtins 'quit' and 'exit'.
-
-    These are objects which make the interpreter exit when called.
-    The repr of each object contains a hint at how it works.
-
-    """
-    if os.sep == ':':
-        eof = 'Cmd-Q'
-    elif os.sep == '\\':
-        eof = 'Ctrl-Z plus Return'
-    else:
-        eof = 'Ctrl-D (i.e. EOF)'
-
-    class Quitter(object):
-        def __init__(self, name):
-            self.name = name
-        def __repr__(self):
-            return 'Use %s() or %s to exit' % (self.name, eof)
-        def __call__(self, code=None):
-            # Shells like IDLE catch the SystemExit, but listen when their
-            # stdin wrapper is closed.
-            try:
-                sys.stdin.close()
-            except:
-                pass
-            raise SystemExit(code)
-    __builtin__.quit = Quitter('quit')
-    __builtin__.exit = Quitter('exit')
-
-
-class _Printer(object):
-    """interactive prompt objects for printing the license text, a list of
-    contributors and the copyright notice."""
-
-    MAXLINES = 23
-
-    def __init__(self, name, data, files=(), dirs=()):
-        self.__name = name
-        self.__data = data
-        self.__files = files
-        self.__dirs = dirs
-        self.__lines = None
-
-    def __setup(self):
-        if self.__lines:
-            return
-        data = None
-        for dir in self.__dirs:
-            for filename in self.__files:
-                filename = os.path.join(dir, filename)
-                try:
-                    fp = file(filename, "rU")
-                    data = fp.read()
-                    fp.close()
-                    break
-                except IOError:
-                    pass
-            if data:
-                break
-        if not data:
-            data = self.__data
-        self.__lines = data.split('\n')
-        self.__linecnt = len(self.__lines)
-
-    def __repr__(self):
-        self.__setup()
-        if len(self.__lines) <= self.MAXLINES:
-            return "\n".join(self.__lines)
-        else:
-            return "Type %s() to see the full %s text" % ((self.__name,)*2)
-
-    def __call__(self):
-        self.__setup()
-        prompt = 'Hit Return for more, or q (and Return) to quit: '
-        lineno = 0
-        while 1:
-            try:
-                for i in range(lineno, lineno + self.MAXLINES):
-                    print self.__lines[i]
-            except IndexError:
-                break
-            else:
-                lineno += self.MAXLINES
-                key = None
-                while key is None:
-                    key = raw_input(prompt)
-                    if key not in ('', 'q'):
-                        key = None
-                if key == 'q':
-                    break
-
-def setcopyright():
-    """Set 'copyright' and 'credits' in __builtin__"""
-    __builtin__.copyright = _Printer("copyright", sys.copyright)
-    if sys.platform[:4] == 'java':
-        __builtin__.credits = _Printer(
-            "credits",
-            "Jython is maintained by the Jython developers (www.jython.org).")
-    else:
-        __builtin__.credits = _Printer("credits", """\
-    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
-    for supporting Python development.  See www.python.org for more information.""")
-    here = os.path.dirname(os.__file__)
-    __builtin__.license = _Printer(
-        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
-        ["LICENSE.txt", "LICENSE"],
-        [os.path.join(here, os.pardir), here, os.curdir])
-
-
-class _Helper(object):
-    """Define the builtin 'help'.
-    This is a wrapper around pydoc.help (with a twist).
-
-    """
-
-    def __repr__(self):
-        return "Type help() for interactive help, " \
-               "or help(object) for help about object."
-    def __call__(self, *args, **kwds):
-        import pydoc
-        return pydoc.help(*args, **kwds)
-
-def sethelper():
-    __builtin__.help = _Helper()
-
-def aliasmbcs():
-    """On Windows, some default encodings are not provided by Python,
-    while they are always available as "mbcs" in each locale. Make
-    them usable by aliasing to "mbcs" in such a case."""
-    if sys.platform == 'win32':
-        import locale, codecs
-        enc = locale.getdefaultlocale()[1]
-        if enc.startswith('cp'):            # "cp***" ?
-            try:
-                codecs.lookup(enc)
-            except LookupError:
-                import encodings
-                encodings._cache[enc] = encodings._unknown
-                encodings.aliases.aliases[enc] = 'mbcs'
-
-def setencoding():
-    """Set the string encoding used by the Unicode implementation.  The
-    default is 'ascii', but if you're willing to experiment, you can
-    change this."""
-    encoding = "ascii" # Default value set by _PyUnicode_Init()
-    if 0:
-        # Enable to support locale aware default string encodings.
-        import locale
-        loc = locale.getdefaultlocale()
-        if loc[1]:
-            encoding = loc[1]
-    if 0:
-        # Enable to switch off string to Unicode coercion and implicit
-        # Unicode to string conversion.
-        encoding = "undefined"
-    if encoding != "ascii":
-        # On Non-Unicode builds this will raise an AttributeError...
-        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
-
-
-def execsitecustomize():
-    """Run custom site specific code, if available."""
-    try:
-        import sitecustomize
-    except ImportError:
-        pass
-    except Exception:
-        if sys.flags.verbose:
-            sys.excepthook(*sys.exc_info())
-        else:
-            print >>sys.stderr, \
-                "'import sitecustomize' failed; use -v for traceback"
-
-
-def execusercustomize():
-    """Run custom user specific code, if available."""
-    try:
-        import usercustomize
-    except ImportError:
-        pass
-    except Exception:
-        if sys.flags.verbose:
-            sys.excepthook(*sys.exc_info())
-        else:
-            print>>sys.stderr, \
-                "'import usercustomize' failed; use -v for traceback"
-
-
-def main():
-    global ENABLE_USER_SITE
-
-    abs__file__()
-    known_paths = removeduppaths()
-    if ENABLE_USER_SITE is None:
-        ENABLE_USER_SITE = check_enableusersite()
-    known_paths = addusersitepackages(known_paths)
-    known_paths = addsitepackages(known_paths)
-    if sys.platform == 'os2emx':
-        setBEGINLIBPATH()
-    setquit()
-    setcopyright()
-    sethelper()
-    aliasmbcs()
-    setencoding()
-    execsitecustomize()
-    if ENABLE_USER_SITE:
-        execusercustomize()
-    # Remove sys.setdefaultencoding() so that users cannot change the
-    # encoding after initialization.  The test for presence is needed when
-    # this module is run as a script, because this code is executed twice.
-    if hasattr(sys, "setdefaultencoding"):
-        del sys.setdefaultencoding
-
-main()
-
-def _script():
-    help = """\
-    %s [--user-base] [--user-site]
-
-    Without arguments print some useful information
-    With arguments print the value of USER_BASE and/or USER_SITE separated
-    by '%s'.
-
-    Exit codes with --user-base or --user-site:
-      0 - user site directory is enabled
-      1 - user site directory is disabled by user
-      2 - uses site directory is disabled by super user
-          or for security reasons
-     >2 - unknown error
-    """
-    args = sys.argv[1:]
-    if not args:
-        print "sys.path = ["
-        for dir in sys.path:
-            print "    %r," % (dir,)
-        print "]"
-        print "USER_BASE: %r (%s)" % (USER_BASE,
-            "exists" if os.path.isdir(USER_BASE) else "doesn't exist")
-        print "USER_SITE: %r (%s)" % (USER_SITE,
-            "exists" if os.path.isdir(USER_SITE) else "doesn't exist")
-        print "ENABLE_USER_SITE: %r" %  ENABLE_USER_SITE
-        sys.exit(0)
-
-    buffer = []
-    if '--user-base' in args:
-        buffer.append(USER_BASE)
-    if '--user-site' in args:
-        buffer.append(USER_SITE)
-
-    if buffer:
-        print os.pathsep.join(buffer)
-        if ENABLE_USER_SITE:
-            sys.exit(0)
-        elif ENABLE_USER_SITE is False:
-            sys.exit(1)
-        elif ENABLE_USER_SITE is None:
-            sys.exit(2)
-        else:
-            sys.exit(3)
-    else:
-        import textwrap
-        print textwrap.dedent(help % (sys.argv[0], os.pathsep))
-        sys.exit(10)
-
-if __name__ == '__main__':
-    _script()
diff --git a/pysrc/tcl.sh b/pysrc/tcl.sh
deleted file mode 100755
index d8c7fbf6c..000000000
--- a/pysrc/tcl.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-# tcl
-cd tcl8*
-cd unix
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   ./configure --prefix=${prefix}/Externals
-else
-   ./configure --disable-shared --prefix=${prefix}/Externals
-fi
-
-if (test $? -ne 0) then
-    echo "tcl configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "tcl make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "tcl install failed.";
-    exit 1;
-fi
diff --git a/pysrc/tk.sh b/pysrc/tk.sh
deleted file mode 100755
index 8878bbe75..000000000
--- a/pysrc/tk.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/sh
-. ./prolog.sh
-cd tk8*
-cd unix
-if (test "${OS}" = "Darwin") then  # MacIntosh OSX
-   ./configure --prefix=${prefix}/Externals
-else
-   ./configure --disable-shared --prefix=${prefix}/Externals
-fi
-
-if (test $? -ne 0) then
-    echo "tk configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "tk make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "tk installation failed.";
-    exit 1;
-fi
diff --git a/pysrc/zlib.sh b/pysrc/zlib.sh
deleted file mode 100755
index c2497bc94..000000000
--- a/pysrc/zlib.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/sh
-. ./prolog.sh $*
-cd zlib-*
-CDMSARCH=`uname -m`
-if (test "${CDMSARCH}" = "ia64") then
-  export CC="gcc -fPIC"
-fi
-if (test "${CDMSARCH}" = "x86_64") then
-  export CC="gcc -fPIC"
-fi
-./configure --prefix=${prefix}/Externals
-if (test $? -ne 0) then
-    echo "zlib configuration failed.";
-    exit 1;
-fi
-make
-if (test $? -ne 0) then
-    echo "zlib make failed.";
-    exit 1;
-fi
-make install
-if (test $? -ne 0) then
-    echo "zlib installation failed.";
-    exit 1;
-fi
diff --git a/resources/uvcdat.icns b/resources/uvcdat.icns
deleted file mode 100644
index 3d1efc0aa54111774aafee3e215bf5348a602254..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 40131
zcmZ^L2YgfKz4q~DLjW&HmVuCj5E3$O)3nWK)26q#X`41FZ5bg=Q^+J>Tk-&ck0fur
zG(7;DWm}TBjltHiyp0zaFxX%lYj}e3wl(eZJx7qHx4-ZHAebP%&-;J>dz|-t`PsK2
z1aZ3l&ma796G0GuUvgf1l_2(=;=FO_Yl8UsvhV*NpAnSvS26LFvc6TPR;#XDxqPX#
zq^O`snLR=<+FP6I8=DK%Z4GrbI&D?OrOOq?`3j}7qMaHbHeYXRX>Pf8qyEhM`w~);
zlcM5sDlcCu$KryVW8eOm!D7AELy+wOt)168t~XU4dX@brQkX>YPRiAlSIWzZixpq)
ze0~$3NwB(zt?eCcoj0!Cyjh?2=U+U{U_Sqoj}i;3D%IM@UoDdU-PhZnMKWDk3}z>>
zw)5JxuIsnDZl3z)qnDXKdc}`q`fPsX<Hn|1o#x4c?2q4n*$*U;Ocz#QJMrSZ2u;_u
zTXzcbV&B-YiT!n9)^q+$Cc|%gWNl-uPOGlEQeIkIaPCy%`#ye5CW{B6t`Zxml;1!9
z$+fQLw4Zz$y#K)d=;k|}Z~O9@8(#S6^wm17s=QoYQe2=^$oBsTBoOp6??%GA;mbER
zKY#pg=?Aa)Z-3?0S6>pCy8FU>_{`-D|9veDbu^9SC9mRqx%_PMFTHt8cNR!t@d-~I
zH9s?Nvyk&17W=~NJXJ{eQl~%K&i38ka`i9%3?>V|eBY_KZ;(DEVESzMxUIRq213*3
z#^x0%<O*f#+rIt`lDUe-WH1LQi`hJHHrt~YJuPOd#b&1D#7c_)W&k$Z9R8^vgU4L%
z=g;(&4I8w3{dml^etT=$uQoN*=q?}qO~Bi-B2vCqapq53Hu*FC7)++$KOayOX(JYS
zl#9d4Hxm|;a<R_k67Ky+b~3T68)M5RV6N~x@{RZAvJr#nxDSuHZoTkMM_Y4KeNAQf
zcJGb<Q>3I(vl731#_z2_pa%S|nOY((>unaR&1wb3_F22tW?3}b?p+|<oBsI>g8>D>
ze=Gf->5SUQ{BiNf=-)Py%%^;Rr@eK(y|uZaR;NyV=haV3@&)n@1)Qz?Hz7X;<G;>O
z_SyN_DQYZ=^6H|79!^oi)Oo^9wQmz6p1I1;_v1;44{QCOOyeouBy;`7q`sTiui^Cd
zHPxzuTxCgtf+t^_C%cfE^XCn;qA=KOib{dG07Jbe5ExJUz9YYP|92bSeSgoygBvel
zSD&o@uAA38TAQI!8r7BZ;zAsDd49@^{~cR+=Ch}vA+8L6W(DC^1%No_GYo&<*KgPT
zg2@QnSM=z?qwm%;nH#10JGVNowYN0Y>$IvXmr9HBN%;z;=r^06`7&P-^&-UN!eEsX
zZati5{NtF<F#@)E`_%Odp7z`FyGIk_kNW=ll>bJdsr$B*j)vN5byY=Kae)GK<efhn
zxaqGMCGYzPz|c~{<673+CpIx+nA;e>Wbr%X_@f!p`?r_orzRgw-2CS06@sCAG%1iB
z6t?OzP6L)KQ)F-V3)mU<v2Qd}3~r<o9_`BazdgW+Wp3N}zJ9!aApCt!pDyyeb#8j{
z(L+PQ=L$n_H@MQ?((F7Dyn~V_U#`g6>C0ej`SyK3K1`aqIfZbe9A+wwaKEgPC-dT%
z&u{$l*5s7^3WFK={+X!(+rr$`<oJW3`vU`gcW>XKncPrQt#&eqFJG4T*UkREzR&%Y
zC7>O|){}$>HE*?2W{_7|tF3w8Cx*G*@5{c4shOpm^$g$VdzE|UZ40xYaCF!-(EsG1
zjbK-01z64pw=N!I{puyZL)#cp;8r~0wP3bcsKtfRcmn$WkcuWe%T(H0Ss){t`GW7e
z_Z~f(nxnEeF#Vqw?PRDa^Zd-j_}H*X-;dMZxYl+R*cw_C`B8GO+yl}Be>h36BpA`e
zigAm@M%gVE%1lKOe(hGXdBHLV)3q#E7UmYr)TMH@M)j^=40FfEU6bPzGxL-fn!?(`
zTpvVPaR}@)Xt@7)pBAUEoFb2sd*;f|UHs}P-zb_XY#!l>uUi%=Gih<PT4n^qN+WEJ
zq84pd+ag6SlGL&VTWdw7?mGswVu$ZrcPA!i=9ik`ap6w<cJ@;XbCAKv_nSdUY0YqM
z7AtpGMt}610Ft@dALhbh5X3wUZX+$E*%c?_6O37$(u`wL7K;^!v{>=;fx29E={4V2
zxJLHdF|!NvRJosk$@1M}T$-DD^mwnHt{YIFANGoqZ_LX%_hVnC|F&mfb$(Hl4Z35o
z2+S^4GoNssw^O8L;}Xy<^Okvv0uN`Wt1B*P1&joy7h_A9$!hu^U+ltQyr;EJJvk9}
zq0R1JCrXg7D?9}6_}`pops8<K><-EjWp-UMQ#@kX)WRZM)uL^VkAp3vsN9m$ii$V=
zPBJ|i{$W#pWk6||4DUbAOg)he4c>P3-@(`C=OfZYC{jLT1~6VQJ8ZTk3*YSGSOA`d
z_S{RB2UHy4B~+G{R7wIEiA?v6f9TojAH($b_s^Ys=+v!ycfQvxr<v!Ul&=zp{@B;=
zlLGn0KfL|)29k2K*sZh#>}Dgv%K}Zbg-Rei<%LBhS9bd*G2MNB)43U%$nf`n@c_(W
z$f)nX3%l=Vy-ItJO4<X(%e@u3ZzF8@{#+qf%KrZ82NXe2HoJ`$?L0W(VFR#jA%XBx
z<`orQl5X(_u<y?<u;I4@7=9a>k*SBL9@1Lc*K_-(6T1e%6WGcGxqtp=K7f8D2LUWQ
z`+Pn@&;h_~wpu3zgvY!Cb~gixFDexU6`|fKOxJ*?0n7G%?iG&K63g7xItDAd2P<<b
zNe#Rv?PC(;Zi;iS`I5|)p#=r9#Dd~H!fdtiK(K|PVeMwef|C)&^8CwRZcKH?m_VP;
zvUurZ<G)|$o%O(=r#%fE;MK;u8aTkRl0wBvxo;tt_Vn)-Wc~8Z9JzAR@kEoSqTq8D
z9FF-e_=@a2r98j*H$GBkp!eRxTjC$gwY~E0#vRv<v^Tif>5N*mt0^lfNS3eqz=!Gc
zp7Ota7*SP26lwObo6RtoXu^GgvYO}PX<^Cb#e9a085sD@mS^u;ZkLwdj}N(_?|n=k
zl$Jg<EvY2=s)AU5hR;t<Kj*ViIA;YKWm~XNGcknwlzGuXqvaLJ<%;)wfW`{+4-B~!
z^u@&BpkbhoR;I^dr%zjs(<VXgXJKdDWbYl>vk-}evd%*{saV2o#zrkbJ$sc3c|j7J
zkq$Zg`>|il7&YhtMW3>RR`|zaMl?;9Z&0S65d353?oL{`c4#*;Piq|E4uMz~qKM^`
z)i#2RsOW$<4aR(F{hJjgy}rNiZa1Vr#}7n$Ct*cOp?p=*iRaU%Y#?dL$PeH_9pjxy
z7K$WRQM9Y3?&Qc7IY04BW3m~(A)#;94T7-VyLY;1FobBDXO)*A0Y$2Ob*?g}-I;t?
zst&&Ub_%g!1{ny*v)XW6Yi*7^U%Hu*2C||Cnj4|Tz4v<Vblr5ai_RF-G{1`SQ{?OO
zaxV2QKqah8_v1czEg*W{MkNwnksxS(VbMXFz3dAmdGdldfU*5vY}fbq_uaeOedqSA
zn>1BOKb)BZ5^$Ph?m2}WQ?TCEi_`g^zWW@Lu_=Z!Td9*c>71Pcnwy=n-Y4a*g%Q3o
zrn}#|=h}M79-r<zT{LY@yoOr(m{7DrWs-bNj-p}I0`5C(&EfCA$|jkfEN^OR$~=`o
zxXxH%&MU2y-AbJ=h8Vu}mV(6Bjzr$LbNhBz*DV^cqs>Vogj!XBbU8m!z9u)X+GwV&
z)p7UapMUl=pSg_1gfuAg++-}_I&FhOlh)<a6eXuY{{c|<4V>PtTfCdzH#$3!a<#TJ
zH`aqnnuqkU6Xa`i^VI!wPRs}SAHVwog1QUKpN9;=JZFn0Hc$(Tkh^0EGA5|2d4LV_
zkucqS*QeeQbZ)$Uy&afHW9w?Z2Q5^_%h%=PsrqJV$v7rdu_xYU!9}~U7<`~2PDBwK
zU~^C=i`8yzpIy9~BbOH>u*6I+-}j=MuC;e`knJ1VT7l~%772V6T_qJNV`-7gHT_SJ
zt51F>JsN;y2pKCaC3r_3;q4@tT0kVSILwY3U@I=X<|kr$`TV7=wWX!Cm26pm^=fky
z&4-$5jSARhP^TFAhP)hQjehPi)w+|%lh4cF_Kjl_z5?2u;Uq|6tr_aJXq^Jj>6lcN
z3w0=X&qs`!BCD;biEQ53)YRBeUk7BG>PpD1q!5{;+&ed?=;{z15p0f!8epH3D=)t6
z2XqF|VU*MyNqAW2EoR#cNqEj#VQi|4Kv%r&D`vX+OPlNX_3P_uY2a#&T2*=F(qm}8
z+%GRDx8lY_S}kn$;flELlF!N&IP_0x+5@5p);tn{S%MfdTkU2#l)%``meI>F*z+&>
zi|9h4pf<W@jjoyouDViDURHu!nlImwo0DJDHw_g=SQ{veK9PD(j--?(DHhTU5D)=l
z@a)tAFz1$#Pa+GUp{G>C;CkL0;JN#TkF;71uX>$Uqd~P>RY{|l(nTCerxke>*G8T8
z3rBu|Bt_>H3clP;DccH}y0Dmt9CNhOnJ0;j3ybyzig2bfi^DNz20HQzKlK*E4Bu3!
ztD;msRlr6yOoM~r^1Pg!;)eUvFz4kq$3&y-JHG6qB3ka2FWLMU8}a!uBP=AXGL&PU
zKuTz~+v;<Z<J)`7;@ANx$S;^uRmJ7Yyem&%zFbl97`}+c&&esM>7BOG;mBbfD2@t?
zJtK#Dy5$Q4o%jKK!et)T=t3><P}qV=(+dtn8rzq0Wkyt4|CN{i>m+248pY=qC1hDt
zd0-h$dNB<SwrhJPoKe~4m~6{DdMpKuiKmI*ghGZuZs|2|hqxCPruYDd5kdG6gakF|
zh4{>yk?*nMk$MIA`Y|@8l##_zB>_M#Dl7nWVRiSo#ToD&=Khk{FhN?b5`or@wvl+|
z3W$=1gC@;I6C3dZG%pZEawSwsnsQ9~i;WBcbB&Lm?}mfIBRe<jxLA;1P!LtfC@9F6
z=jIjH-JNiPTO790I_c44NoN(x6uE2BN8Yi_mCh^`9-c;xC6>>E=*0yhI}bINm-6CA
ze~Q}A_T@3xF#b%2?@KKz&is{MG(s_{Ae9V-JU2&CadlwY$rX#;{;2(IWLR`MXcxi)
zfA13wS$S_z!<`~aTC|zt5aAXa7RwAl<Q4J|z=C(a^V@IU@L@20|5!v8E{A~>CHuUe
zPZ!85^Kx@?3$-^#=51&Om{;2E6P>w&Fj9P8p_G95ecl4*YQOc&5Aw}O5fGMba|y(9
zq>UDo3px3Dq<obk`?;5Q?EK4zTYQ;`1<~@gZ?cc>ONWvuue9EuwAhdtthPDqbM5B@
zVJA|u<!E8ZUGsm1d}*2ghK;X=HX$e@zAQRy6DJ9OBdv{Q%UF9EEL&c<H{j{#w!Qx6
zp8z>Z&MrK%>hl9hRZVvvE`VA%Si57vq`R`vl_Ncsqf}(bR~7zsqwo5S+dol^P>VQ@
z+aju+WP&wmp$+wpVS40RWqw}T_8<8MZhP*{Kb{3A@^Z<XKt)0HrNIfXfIgz#VI99!
zDm)$*(Z6J$xvQ4v<fX}1<mK=6{^>UbdIwFdhm~?z%tNWf&%p>YmBf?Qj+iHV+m8H}
zy>3GQd+V9<Jb7V-uKnJ~%;F*_g|By59^I+T;D;TJm0dVfi0Z^<9%(Dh%}J53&V#|`
zD$5%x8z(4~F=kgQY9tFmNr=~{(41+>O%=-99@@<Juf*;D?O(pF>NO5eqU5pBRgGiO
zJ~wo|JY5hL&QCp)t57Nw;vrDzWwp)pRx5Mz668L)x%uVQ9eoextc!~{pw+QtA8U;6
z$RK<WkCq&B4~8BtSZqtEY-~$*x-}v&ubmydTAm|35q4CNdg>xjd9aeq%MQMI8FYN?
zT6u0xUX0vF*)lW^cMVE#p2a2G_>Fx2r@!?(O{`2jQ+<ENzUXk+Z8p+QILO5fC=bBh
ziH@SwgCD;CapI}0oV?ta9Jh<YQEL=9YMUFVF9K$s{FcK?Gt6O`>eXhQ`0S6rdh4}q
z%+tj7SV6?m*i*$d9e4VWklwl8sHyE5n#Q5*mif5}<ISd<cdy>+>b%y{(!_6cYpU$E
z#hBggi{mvQ)jREY#q^ZvrY=7%mh-_c-+q1DThTjxfXb%i^7JJBv9Pf4@S{im^5KVn
z{QaJ<IfswQ)a^r4j}{z!%5`aRk#xA&?R=Yy9fg^f#csKKJ~~qoBj2dZIvyK)JS^;Z
zRPyORFqqyyI#cj7pDf}9MX^$;$UUEZDkJ@L@U|DWZQXY0pFe){cbwo8XL2%P6H2sK
zJ9`EON5>~;X6E<{0&vwa-<c5+DZLnn?DaxYlI+ZdJcY85$M*LLD@P@kMYt;SV6U`C
z2$X-g-jCtWLVdR5^jT4CTJf2Pu&~gu@QCA){OC9`krcV6+_J;{TOHF)!f-)m9*PtK
zD_s<HW2N{@pFhY8-uL#+CS3Bs@|Tr)dpG;|`1+w)mswV#Je^r`wXrDc^r>{II5jyj
z?nqorRMG=06lc7OcJ>Noy4>SrPX5{L-e(CHd48<iPm!O-eg9{#`Z5`R{Ng}N{)M!(
z;*J|CMV^AET&Fmhr^s!w+w4PSg7Cz%3Pq;eGhdOP5?DZG6Rz#@!Z`U#WnsQD>&=aR
z-W#8Kom4Q+OGO3O?$#IPAmz#vr6(yz9sTkXM^i5-6<KnRqA%W}tXS)!C`^<;oqvA+
zkDm4S+j$;Eo#MP$C{s0Ft5D=B@-JMh>^yUvl)^Kdm3!v>{5{ks6!>^vUXY0HQo+Ue
zSA3s8gMQTpMM1(_`#;RQeK(tX@UwzElAn>MJTLbu{LT+i-8_Mvl!eK1rlLsrtnb$-
zLlg?`cAs}&{n^Kd^R-cX^3I+@sVXO6YOc!PtfDIfoJ*Nom?CEso_LxOS|Gn5+w98>
z@C(@f#@qWZ+#G1DQ0C=H<*vnwyjXbfgn(SaHD9SnkuNU}-LZB5hd=&^jqc_ef9CUV
zy!~NJ^QgY5905is_sD4)(B|hxW6g)|d0>4}s@$vK%z<C}{NWk@C@f%XdF`$L`9^tv
z;(pyFWnK!pRXG!8LtQ~$9#WTl>2^8cnk&!8B4yEqu$*ITRO3%E{MkEx`m5csHKQ|z
z)++__Je*^VBE89Bvy5CV&dWpa`<s$*mFFYccqj^$N;)rG>(6-l)wh0oKyqVxe&l9#
zse;5wR$sh;Ca~2ye*KbM@!8sZ5Lk>DQW)l!z}&#_57_qlJ71?1-<mK_4z#ID6nS}3
za4NDscsx(5ZK@}06O|9X$qU7DmxB0YCez>dMPc}ddn0n14Re+W{q@?4LL@g(iL6U@
zzS+}apO`aK6lhb*3q*)jpZj1DWAkr!u(rMa>p$)b%W8eFV4Zt7aI0B&rMxif5&W8`
z6_pkeP6a_KQ?Yx_Pkd9*arN<sEwP??<!A4H@ZG6P9j0lk-L^0_ZkVz}o894!t<%^g
zw*cWBOWr^Qw4UM5*!0X-amOQg$>*-L^^VOTvj<7=-_`~C7`ge**#Egt26(k4rQl+g
zB!Pb{;=3rJ!NxaxpxCoor-}*JynJOcvcBvW{nMGgenAD$Eu~U^F*`juYle202xC@j
zIliaNPX;SL+L*?4^ZQkfLV&odP|EvkX8R(VPZO6RA!T6_ohSV~K#HV!dnU4aue?ih
zHoC2$B3v&hi|CIPyL}~0SMNU+#X_BKO?BUWs3qL8l*Nif^wo0qvyl|L`hQo5bgn=v
zIax!vDHZuJGY>_6*iJu23XosOg6DHn7N;DlC*0)9VkJH;%J}F-HX{ic-TMXj5L(jT
zNVuI>DvA>2p33~}sJ8;p$98A<zmSNwq+5Q0Lf!)CeA<FNlttP9d=AN-8~fGBA_2(2
z^2m0=y%69?&)k*8;ZOTVV|Bp((`cBw7vOVAC*hv2loz29<yLgSA0?KX-!Bx%rhtZo
zP1!{dh46Pp1#wuP^<Q*re<kCYSU?sQD$rW$L8)8_Q!Xq*%Hmp(_y$HMtT(<ZMd^r7
z?-MRm0bfq!lgO7=mz?<(o4qCQCm9%g<ewiV+-c_yU%UcAKI3=q{pL5{HDt6Aq>hN>
z6NHk~@z7kwPt;@46cL$55Rvik5WDgT_p&-|Z3Cb1QrBv3FH9^<qc4hv#Ka7W%UO%f
zCZJLY9?P9g5dZut_6wJ<KEo2NR@bOSqfT3|ZJU}lPoV|2f|{P6o0_8Nz=x2u=tw1&
z(QQ){!w0^KJ>c@y0j$$$b<NdSSKU~ndo;f=GtVcMPt4BQ>17lpSahN2Wej@CLl6hQ
zCcof)PJZF?wR35;u8H2Vx}jG6$TDp~p1oq)KD{vEpcd`+MFB-PFrC17z@5P$KKmLA
z50am|eEkKz>%T1>pP#l(^Kimx^W5};eaTKCf?8eZ(S{2Xvod#<Kk>y^<U!X1UwqCx
z=<?OUAJ#9&)|#%V`Gv`cB(Y**X4X3CpzKRFTlDvf-9UCEZ<qUlFTXf&nA}A~{_n-r
zm#3{WbJHZTeB3r|o<uj;frh`!N@#%#gNb^XUdJu|7r{Q*u==%G^Ryi~Gc|3Qn?#e8
zqSp}qpcK5N>v!j>LqDwYrdKu1Pt94TC}Mbe&H}gLpls<*zB16uW_`cae=ceuCM~nG
zW5g_$j8ltXY#Om*J(I;^_+k-*#ryy7b(xs5PFtsm8QTmvN}(l_MvzP|7Q+v#=)T4O
zqEN>tysPVLt1lCiv$NLeS<pF2*c{*_W*=C<M7QJLmNY<i&=?;rmNXF5q-Ab;W_B7y
z#H<a6qND`j8_y(v*aE|j#%dnno1v|%tyU7mjAdqedTw@R8vWWO&?Y5ZHnU>rQuc>M
z9!1)kI$aq__|;d}G-y#Y&&<qX7xPp*p`@i`Mbhik4$`0hZ|hd;wDr20T9r;+U0dC#
zC6P8vnaODv>m*4mvn@K7kOG=9Y;d8bP7>bBY0d|1*}ORbITDKnvw*Y@l@P0{8>?$;
z>T7CsHM#~Zh7{oD^z7^eNqAAnqnD7Zl8dem3T-cXl<UtYRyz6m1AtCfr>m{6tJmpj
zwRHfPn+5HYFm!hm1P)5D1hYE%n7Pmw0zusKUyI$T>PB4+S?yK>0?^1dPFrT}lhCSr
zR@jxm@%>VejE%Q~a3+H|_|+E&50w(jmDRO%TAdCJz$o-%FtVJYJ15w5kx#(`Bw#O)
z%-AX*R?s9f;R(L_=U0dunrbZ^Q!!e+L-X(%(Qs^3l#M_U=f1($m$?zWWF~(45-{OU
zcfk?im+)VcdQY_MhDU~n_=6sL)6nqP0Os@)4<^PQKI$2r9e?m(Y;0t3Xk=_qZ`2z{
z7rF+B_2Um8j*ZXEJ!qGltF3Qr)YfzjlS3;9jiw>Jt}KqjjmYRBo}HMQ7@wV*m>e6(
zj|!Dl*4L@jwReVxhD;`-X`o0zMubIhIblb;i0wm<CZ=Z|O;3+BpGuD4aKDSotm!ct
zhd>hjWy5uONDwEK8yXQB%E739a&lsJVrFKtMUl?uMDsZ$cVmPgdu)7c<ogBPIq6X$
zTr$)xA~Y<lop^P)%rK7~Zgp10MuvyVyIPNNxLi)icUOiU{;=ro?bfuQ5H2^A8^#R_
z$E;pgBjuePpBiq6JFh%_KC5zQYCJxK&)pE2+4jR;MuWaHH#UUidW3SrLqj7mtT)IL
zj-Du=?Uo8cPVfZ+@``zCrZ|Yt-N@l&KYZZa&@?z~GU)X^ZNeZP*F6*`3&EU8mytwD
z2@xsw5Q;@&;pswRrS575FFZJFWGI8f;c>k<At$@%7HYp0aD5KbSfe=dgS+*;=xZB{
z9m&BQFk>~&%oR%{Vi8beGG3Z%dRiJOc9Y1&LOH_9m4vWRE;pj&Y%rg@JUE0KRBN8^
zI~Bs?t_?n8dIV{Yju<YB!;-F>c=|PW&P0TVaD%y_p<$#XJ&lyQ04rlvnpA=vgp#z=
zS>m)@!fm=x5X$GegopFFD?;SCN5XHJXXi?AwsnVVr^g@CDrBlSapZ{9Xdv}#4Lz;Z
z$AS~X!RydtiBb@^UM7*rq~bJ@P$-h5rODDnGT{ZnZ7Aov(0J}jPQ-A<k<jQ)vn3;l
z<USQFx<B;@pis-9rljaxlR=>OFkLxTn-olP{X-*SQpMt=lS#rz{LYBPZ((G7k~li!
zJmETU?pOkMRY=fjTXhgONIGj((||{`a}$r8h@+!JeLej~0D9i&?dUsm1pFjJT{0pc
z59(bFXTuY?PX{NoJ$fWOaxyjDY8fG+GQrJrlM_!!8iCFl>0>T8w(+EmgWZaw&<<Cq
zZUy0L$cjkdZsmkTjrAtr^cfwf?MshvxkqXjCMTbe03{xT(v$iX#%t1u<Hbfp1K)`d
z#)YNNmK~4hKFi@A3ktqBUKSA&E<|_HGLe3mb40f=^@NJiCm1~a@_{~eSlGE+Lun9(
zYiJm^lyINxOuQT($9*=a^m;<^!o0a8DCLHOZZiz$M0{0Yo`!un&oVONB!sVDX6T6w
z;YL;ELStORLW7FYw7i>EaFG+s-F8GcWgdT&FO-jU7d6p+1&ev+`i1uS>3^q%He%3X
z=o18UxkroBoj76Pn9-k-Nt9`X$Gx$8b*j)<?(;`3j9|=ui^B~~tzCGyguVjknxCGX
znE_dkMfBusAZy@kL`ZPRiL6io$l^v~NS`5+N=}I|q#t}VJR%8-;cf`NI6)6xnh$e1
z-wj_)M<ax8Rc%<HLm16+rx88oI3L{V*Ko!B;L;NuoHR6?8&8lDcX66bk{VBVWDCV9
z@sZJl$I$5L!<Nt(?hC<*k03xhReJ>JRlJZciY6H2b{gHu09r{OqetmI`}1!oGn&Jq
zXhDQT6U(9Nl609^k|ByF+%hC02`PFChA$T3KVh2irU~Qf<Y4R*B%_~Zu~QnTN@xW4
zNDe{?&FO{txhMNP{MSCwde8g%-oD~PLD(mZdlXS7O@t$eC1RLtqJZ#Bf^VQ*f^@Ac
zJtOnfsmwHSa^u+W=#`LI?uy`q2k7+Jsa`Jp4U0pL;Yk0-PH}p7V?&lSl;p1Ez@J8N
z2|_FbG`8Z2UDL%%Y%RwBN+i;>bg@Xvmn=u8A$wqWSRWk{$6XehaM`wKr>+MJxXj>;
zhZq$7$B9NCb1q)LVW9tRd@z@D;#debjKdehMMxx3Vi$=RPXP#}X*e4GK`Ks96-p$@
zQempNYjki_6OsUr8J<72^e81bip$_6-Cp`Xwm7L@H(18u21~9S4}$ZjeGu?n)5M2(
z#44H4xq(b9J1yK5g^FTeXmHS&5R%As=O`D_I5AxRU<ut5UYPq{91!~x;!o<=8x83}
zAu2=r8E#k@M<kI+(j|PcOPU1za|!L3(?l7Oaj=1b5#vZ(GS@vwI(8JM<ri|*VOjY9
zsnSXPhU=B_A;GyOeP2f6iEoJ{nJir*A;qpTaT2=zBAOp!A!ghI!zSZUc2E-6Jvh}6
z#)$?KIFV<aY<6ns|FWofy|>|h61;M7ia~Ga%u$|3<`$VMm6BqY43U5!=)Xv5SZZLz
zFw`CaT0`QRAP{Z<hZDppsmirlXqB9qntJR(zV|~3`VA%pbT=frAINv_^%(#r0i3H;
zl8h0ESSFT=Xy87h!7zO3ND9{_G(x})2szGEcH1jLWCqK>Eb58D!lQt-hI=t^znm+k
z!5deNqb34M0M*M7MFLa^fr)nk)G($uj20b{aoNFHwZ+x53!=lpjaDa_a4F+I$SP64
z-c$(>6^!SdM2Yu@?q$GI#5@ThM*bv3gc3($7C$gx(i;qtU@12+DDz_Y<@qH8nRxib
z@H|ZMKhB+`Up;Uth#M@@OM*C8yK`}351Ck;A%quk7iLJrLX6K1!+QNtEl0);3_Blj
zY>XQ1yg6B=9GeB&e}sf2{VG!fqH)6Q?>NDy&kBKt-W?nshVe<Xmx?D`21ZG}izpaq
zp<$d*#ZCDI`}{oIC%}G?i8Dea=~o)=#szV%v~j{>g~&QQp+k`=;*%K03uPH1sASoF
zqu$hZip%7Lgr2B3Lk3UINGFBg3pZ8oc}rK9A9FmJC-#!aM5&QPMk>bek-|)=Fv{uJ
za!9@FP<~Jb97^PoxZCrykAa>Xk2V!2MMAy%P#q^q$P>FsPs0DuB{kuGN_s|;fg!w7
zCY7X%F~T<tB!r}KS8#&!^JDKX&`{*m+9v`0UsMV8ZpOi~1fJMKB0ll=gP|C~pOj>Z
z2sl4!8r*BENpI{7=cI9$b1u&q^w5TBz^r==^Z0O1no^)-rrRY_xMYbm;hVjmbE3qu
zNO}aHmWF>!laV-d)?I_%)Ce#yPUJ96&@}%)py-I6qIWm-YfsT)lL{j~`TMsd*Igi#
zh!MElcfpH6JC;i`ghfaU2h~9`7}w#WLo-oR>nHw2)%QF%B<cN3`lj=Aq>xJZ2lss$
zLg#dxj3kjH3A1-0ZCooQ8Su9~V^FX1BT~Rc<zIRvm?9>~M{CEON!jS=@DtW2>OG8x
z4uu4~caJ}`@3SL(?y{r71hFe#D9*;*T})?btE5uokVGF42bDomuIrJ)IdYN*ymi2%
zv%FE7KxfW=5`f0eLaB%+_LL>=|Kt#tP7os?RJTZxP!>(NN<@HM4aXHp5GSt=0<%0w
z0!MYYW_m*Kh#X(%L<FRhM)1!NPZN2gSOy|JB(n6goS<lD;0LA>Y=|OUL>YighlHgW
zB1to{VpB5>nOrqG5j9ReTuUdSG>sq<5-}R$kT@Ey714;Eu<x@qm0>Z6&fIVmY>}yF
z#E}@bBg;h;lVluBhCdNOZ}f(~m=F=yE$Gzvc=SW^0Xeqv2`2I!qajA`X)x(q^Q8b?
z2GDF(&j208T|+q`DCdOGLVC<DLO@QF#G-{H&J>HU8(|_wagdnn%ISC%^?)}<j;?q@
zCw+88b)%`bE=K}>u5g0h(Hr`s5V2i2N9iIF5nY7wIzqizD#UnQhHxmpKFHI%qyz&r
z<mz}dAYDdA$YC$~@HD7E?`1F<yHwd?pn{`mXRGe;^)89QKn;zEBA$}Ky__P51gO3U
z8E}S}*7n>km|!1|<^bpD-3QU5<cP~K%0EvU6sUZ?m(g^;wJ1YGqe^9I=QZ6?dY2>`
zHH;$wst}pJ5XLP;ssQ1qLTJlmIeBd+!@$X45!dBNX7mUjh{Paia-$(hq@X6lZOwU!
z@G)k3PR-pYSon=&2$U{iTmeEUG#<4Ru~7uO%s_a7>y~B}=jYcLhU7<tT(^+KyCdWf
zIV>1*8w9DOag_m<W-yuVH5O!uAS^g9S!RAyFPQCS?2LGfjBu(%FcnK^XiA(Z6A==~
zODb$KXbQ_J`ddSVNCVnN2M42uNYG0fd4^?1Q(tRIW-3jr7Z{LLe%)Y-g?!t?;%FWq
z@C(xsOVcG7oM(&9Amc++NKdZO54M)5D%xVAQn;=N!U-mxk)+Z3+sn@gQ-QWjB9*0I
z(A+T^<7pbhX&OT#Ff11#@k03|!>~M(R#S;sDvrFZH=25`XtnVv;qh=y+@KInc<azf
z<4S|UXfpM+muHDmMSSrxsZ4gdu({8Ki0Ve`b{tABhzBNxgUV9zgp61UZORZ4gL=qb
z->4BK9p^-GR|W+KAI|S67aZpG8WRl5jf2K}S4+=0L6?J>=@(SDAh%?_>tGgg@l|k9
zghZO|L=|B?4z7Z=#DIQ?gdO%oMJ62=a944%YE&27hlaXD!3`)ud4`p!ZB6>yb@`d#
z0bfibr=2Nj=`$IHdRJ2hkUc^<Cjm?+KvE&*;z=|=p|AtPJpIa{ONWnzk>SWtgHQ1W
zm($+0H7LzEh>B!|(PZrHsJbXirGZy~xoM}AwcxHn44N+@gZAWbkH?8J=-etpD#A=0
z#wrns<MrcE%2lX~BEAYbems=JX)#6X-4jErF5V(d%kJN*DLgHL`~~9Wvh=jfJk3qL
z$smIUh=S=Vo)gYX1b6A&57{y1;Yp%X(6N-<`feDce(+*w#JAss9uMcXnWFTohbj)0
zl{DRHC_O6$odVkaA(!+^UHEQ>e&yiVATEa!b|NlSg1p-;EnO;2CZ11`WZ`k@!jkMf
z?ali`-Mko1$hSwrBRgsLMN8js=X^oMIRFX;;<Y%qEG=7xy7^4Sbx@jyT5UvrBqA~{
zg~s%dWPr?!RAOtS1Q~u)adF-$S$1jj>C}i2PS7DvQy(5}yxmq^av?Jfrxu94oq(Aa
z${Ks_oDzvesp5+@Jtku+G7fA#aHl0J0f{>8bEU9Tky1kZNGO%2<)1xu_H2eIRVGRl
zoH%y;Sm7nb#nVVAz(^bz8t8=0I9phKz0YVem?~)d5~gMpw-1;M3Hp^r<NZ7B)y3!1
zP*~An8Q{r+?~*czWhq#R%8F(=De(f3FcqB!fq0!Hg%=~sOh0wLNZWB2{Df3pjW^Q;
z;$_g{bJ{!DCPwcw(5T49$((f1s3|cKU+w?o+tY;G*)uX}w3v|ar7jX=^=lB8M29~9
zGP(L@?;xs|Xp`$usW2K1oz%2~=6f-E_krtYX&J7RB%a`W@yYJLeSDC^0Zu`|*>t{a
z4Y1>)qsZgO<4z{0ijm6#l}ucIT7JF^H&<$E>iLZ>%@uKCcTq}q2koZ@uAY^W5@hDG
zm1#+z?*8Q9mr?aG9PVktpE3-zR25vv!fZM{=Hrk4`1{`-Ja*zApK>HJQsN0tN+7AI
zR6@v4B}4#}N^`E^sNSX<7oZE7Svf_jwp0|&@!h86pvW^sp#A}P(cgWuqvdMHg)hE6
zd^jjK^T7Tu(-Agg=_d|}NEspJNnDXyCDL9$L(y<APQSs}qf#|q@40U<4b_Enf~v+E
zBRE-vXFpnr&_mMb1x?HjLeGs1UCW8QbwgKLQJ#@};q2+mjC3;1O_mlV=A{wRD6toO
zb733g9Iqz~q{+ny^IbMnbR<J>M0JJPt$s8{ztu2sF(N1kO)XBu?Xgk)^=r49YOme7
z*^Z~>@py1WHvJqVxauVlCY~<qG#JzM9#{K~cLl+ia2p3>^}Yt<jVtMi(Wo^~l@vCb
zdakx!tJmE!4e?Fu3@v)YP=gT416!%=G8!}Wo+fnNB4Xn)2&M*x<MhwN0~)#$kAwsr
zITVlcbvHJ&R#&!yYQ27-<5sIF7ga>sc`a-qOYbpM%E=T*g7ksmME%po-rShuoDlv!
zR7l3|dQ3sHtGo0A22-E@Rz}(>MI-$4S=9C-Zfcr{9&n=sOQhpse{)puiEb$FM#ErZ
zXmW(~LRkTxgEHMYUqtVGPVY5X6TAzFZydI|Ka`C0WT@kKa0wW=?s8;E;x`9QL<r&|
z<NLZh^d^~}7)a2&8q&W+_AR8(r5{Pr`w!}lhE$sLy`AB~+_2D)FF*bC2tO&WzDG|x
zwch)NSiPI6mHSQ(I$U&~yKJb6cQp4*{FyL5+L7GDpMCOqXiR!xWA}hTgr-4bQOErl
zxbCdaFx?&)N!5Fqx(kFs>2yq46B-)y`6r)z^IhWULfxJF0P`|7rAm}n`;b_;8oN$l
zxZQ6U!WzR+pX$Cg6y^8oP|l%$?ECa^WKw2+&CUKK{W9Y<xDC<8x_dmF<?OE&STkr4
z>OBmD219ai9Ctl-`Qnqk{|GuMO3SHgzdK+s48-YI$st{#Fk5>^Z_NJ&gYAJ)3F1Ii
za02%!^zRNI*c*Ksb+tIFP<OreJ_%NVBvT!{kf$*9RFy6i(?8cS!!Uw}sfm-sWrlLU
ztCL3@jufY!Op<05Xj;4a1`t0{0-r&=@kE1Gl9myPp>wZcP=uWdgOX99A5G^U`R3r4
z2mbzfNHii|`nkf&`u3aM{Ux*`$v{w!i^PH8ND=<}6M|E@YlF})3k^B^<)?c;Nz6<`
zs*>t1mYq7A9V>R1WS)|WvBUjAxL;R1<t>A$tqJ)mqx`~|%=9$WRAQkhTI`CB2lA&R
z96&#4OmWhZ!Ck{Sc5SftM*Zdd3t5@z=~4m8*K~Md8AjRzCSx+vg}c1qbncqqa^wXF
zruus~uIlo^5O*}3q+;|d4TC1cf79upG_D&Ox5znH;fq6M@NUS{MH!fA4-5_^K`S~_
zxf}^{O7gZT2GYHqhV)pPfsyt-<7g7H9sT(b3D-5~G|Uv;UeWoo(->*@8paHXSbRS>
z6bY$oaP?rE-nAmD1{3Wr!ypXA!(dbiLO3Zja1R`un_;x-0w&sh&V?R>x3c(Q5S43C
z8g_LXF&X<C2@iONQ9UAxr_tC`pA^bTpjWq=1bTM^IzN3a1Zi{~p}zoy>QR2jiQ~G3
zMd=3l^#2c=2B~)R2=%5B1HR#I98w>Jj=F{B-Z7D|J;U&@(bUz6-i*mGGNgw;a~;eN
zj^?fkPK8r6=#2m#9P1*M(JA`q5R`>ZM|)yJqPQ!*YZ@|`hJkCMdWaP|cpp8Qv~Z<7
zgPY#ea#9ewEWY~TDD5?j_wjh(Fnq~?evnT3*Nlwal|&pn9ua?M*kBwO879^bpyt9~
z^#gj-tqjPc>S22~!FwkfRUi2d4<jU{K<64sC<T$kt~i2>)DZ6eY!(a8eR|=~SO5Ok
z{U1jWEB5dI%bs0?vaqPAgjeidQcfhNlOhiZa;6kC%ZMN{#4Vg4o{fKzcmdPfO#y5+
z4G<8>d}+^~y?Zdp`p53Qd-r@<Tv}dQ$}e%hT!s`;fN6#FG@3ZeLL)d4$VFdzDRzg;
zOFIA%7{Cq$z%q6qd(&U`?c2Q<W9!{}_U_+ZRZ@;Cp!`zz%2Gm{&X>?J2;~KjSRNS?
z5gHM)i`emU?29fh(^UK$2y@rRd-m<2LH6(2_tBNhr6nb${BrlI5<-&Bm!f%q5<d+W
zbiP823Jnb=cJN-}zsP&(e}l08w0F<`-8^E&uHF0g{w=Y*yrir|Q0#uWgrHlYULp~l
zJ*P&JgbOzuMgP%IV#iK$2l=AMju&6tvGd<3T8?9}-r2o(-=2Sv#L7K;_kR4*<truS
z#br8y(v>21@s(bYr%>6^%`?J<6M?}(IA<4%)g3Rrv?CBR>nA6paRP`vAMe?-57Y6#
z;b?m<Rg{#Kr4nn!>Ed*{_~L{)PsGLI7qR%c9~S#K7lVfVd-1(JAMgEJWJy_h$z?1?
zDU2$B2ttz(eYXfyGS0<Ye^~78T)cnJ-jDybi}(;f{#sg6dbw0ctV@Bj!&*)R%7Bot
z@Q_`Yr_wwB=i&{-?vM9Er9kukk7~+FDoe2V6k4)kA`-n>Y#biS4f;P8ZzUkdkM|LK
z_V3yK>&l9fEA(O^x)%}x9THlZBDfKu|L5XQ_Uzlg`)_-2cA~Vj<VvxScnbM`x|j$#
z3T|;Rgbe2%!l3(mx}na2Y)sPw!S3gXUEtO3y?ge2yqhR3F1=ig2R%sfnso7d;58`6
zABZA9tR~qm&cy*(jq&wvNDHR0doNL0TwY#$DHV;}ba93x2$Kevh~OXAd2C~2SAYD&
z7WywFv2HhZ-MeSs?t_G~<Z@|wu`Zcd1#Y8I3OdT=gm6Mh&I)cg=MX_WA$;{aEKtl2
zz~fMW2*9NMAT(u<%g4KcT6DSOQaM?)yr7_{s3M0zfFP4uNPeMqq9pXNxuGEu-=c=W
zOz;U{3E>$K2*YLt@NgzP*WmeA9A@v{zmSAadC8@c^3t-h^768>N*H&VREA$jT0PUm
z-*Q94p+hS}AQV`+leiLs2qf9OKpvae3ZReS9QN%-8{WO>N?A#{;Ig}>m=I_3(^jCv
zArqeoK8nEuAV3y<Kqo{g;Sv}a^_aUJyP!?`cSBJx7Xv_0>Rtu^kS35U7pF@lhoB7f
z=yhcTH#{utD82l}t)+z9b`~2%xWbw+i2v*UefxHyq1;teUQ&LE=X4I@48Cjy-D4n-
z5FHIU1V6?-8g`6JBfPM^obZH)1V7fI&-=T-?cVbm`dzw$LVl6^rAtI=8eilhm4VaT
z5Dc8rTRs#L8WKW~+ldz*kLE@H8qJf`8XF$2wpr+FcbH?%Q6sn>h)Xq#i_?n~Z*jH5
zf#)`y;~@ONX#OdD<XBv!9OKt+4~~qE4USCn?JI1KMaP)FNv+diFz-XJrtsS~f3HD%
zO-~1;;TgM~XLGk(?>E+ywQibfou-TMS#aPfqQ#{}>*$S^YPF`esdH!scS}h;khF60
zF0P4JYpd(3Yw9|QHF!n=-&|T8>$zU9X>8zWJai4W@E8gY-(5A|)7hZb;M=&fs%{63
z{kXfdxHK_tXspv}yC=qUq{dyXuF+cs`1<Pk*+-o!wML`W(oaKR5}!SIy{XH-WEpPi
z>bcQ%bAY~Yq#`vQAgOcQ!3X7@b~~OeY*ynRwc1+xaVo;w+}&DJ-@i1~QLnD4$BmWy
z)FO3H#n-s0)i)giFvU(!>#XySI#fK3ySBPkqs9Xp?`&ZOvNrJoJpwkfw=kb8BvwU^
z-fY&XZ(0_v<AMjN@lvasCzq(9N`c0u>Q<C}<>K7v0)T9`iB_Pmsm2BRS2off0j3oQ
zO?HKa!7es$#q@nz-=@Pw5pC}+HD9w_jeEF*)Y5dP8U(0r;yxYE?rG6?Xj>=kq;-vL
z?v@U0SJ8|}eU%X#=)z|60thyVzih(Po$77T@-;5i_+y2-`?jk35k$~S9|rgSXx4jK
z2WnMS*YIq%b**h~d`PEit;Xh>tY7-E$$<527Q#6^2&fDU*u-YD0=HfuTo<|<6ErK;
zx`qBKP2+>5rR(6do2p|DPe=3Z9uHcZdh9lVb)(&+8);F|LMr<E#sC(}m$lxTh2g0G
z20W<r8fNWtcj^)}tJJDa%AnFzwmT?2fLv9EB|a!~v(3+%NgL0)+A_T`Y3{0`#jVp^
zzCyTKx@!_N8`MpsR!f_zx%E1=G+&EPRU=qPs|Rj>*(U0)joD{=br6SZb&U#>b<1sC
zf@Y&y-LNp#1Xa5}y0k>~R%tX<gOmVt)5})aCa+c3^xLds4fHylPK*4M8mK*~S)o&@
zCKvkf+)?Wyx#VfRR;jKsV%?MNEapLN_3dd#Cs^xRtwVz0PEEDytK&4QRJ{+HdhtZj
zJypvCid^!d7H(HxfucPom1kXMo3B%AQAk2FuoS+_gPOb1dq*9sSzXlzm4=euo*%zE
zNb#0DDf`&1v8Bbw^zyCC%#A9Ix_Y3KUaQloQwUeqW_CI@U$l%E<1}lky5^}R>H&`1
zYQ)tcJeRa+=V5F5xL6LBsPV{67u3T|Tcf?4NVso?q*)k`<6$a$hbl(1PTggtz^P$u
zR5Lqt%|Y>(1Ww|4knu9}pr)fieZLmE>Z+|%#}jUwHn9W9K*9&(@W8E%z~{C@;;{0?
z2ih1Y$yMBzTv|yPt2Am|f1MgPGWkpVCvZ_#PfPD~?~U>5C?_W{|K1uHNXI^CU<~0F
z2n8eqT-Y#`6`NRrgaJ>!SzFaHP?>9B&PqxTX=-tqt?Mx=%_W{K8mQKJbAP1@s5P2e
z_<7(22J+c10sa_SvS=?J$QA_RN>QK}d!wAPEgHalS9Ozxhv&E^1aJp+b51%RBgR?X
zY!7Z-=V?~MI<@sGf}mr3V4wi;1oy$*w&3q1djmiN5btQ}1N`3|bJ#5n>Nw3ZZByS8
zWHP1(uBu}Jg!7&9<k{k_>+mE)GZ>BcOK7UA!~EERacm+G&;0<NWN$>U2?)fUXm9V0
zi~v^P^(C9bq)vdlsqV(Ltrm5(#<lq&`1?KJN$Xns0KCIBV~t8vbCJag42*sp^IT!c
zB-<Ud`EH2C<7;qpmKJp)Tx_?!gXXBZ9cuo)U`|?}vf1HphpZD_@cYGhgq`g?Egoxh
zc^n3r8zJv|Hp=E0PsX|0=b`%Usv&634<PHOOjp&a9y}O*qor9*uzYbG9vwpQoEs6~
z45nK#jK#Gno4rew1h%)%KxsVG>e`!&|5CaHtBa+T)GSxE;;D`4p8L0yBtdL}G$8<j
z&n5!s=`-E};F!=Q(?T4I27^?Zd-w0qkGnor{RFF*?H)W6L`+(j%`ceeK|lahf`CSN
zAn{=b_yJ}C_st#psuYcjRwsZ1)z#h^U+mX(%r5bsnBYmP>wFU(QjB){)VQ6b=$wLu
zjDYs+?30A2jlzo#`l@7bmz{>Ydb^{o5?rB8koFw3fQ+C+)nS|KXt7X}FEgP;0l2T|
zv7XlZ?HDHG0w)5;HI-DeR&}kby5B*~)Lm88&f_x5V>gg&U3EhRAzQDjG&9}1z{tlr
zK1RNvA%KYiGX1c-V?-^}tkrhubPLqN!*SdFZp*(QlB}!ja1yGkvo+N^*-scG+XD$F
z904%aKx^0-8(S&T>e8V~(=4ynXuBWWxwW+DWCMMUB&)X#imNtG)Rld<1qmZ0%Ow2R
zz8DlkRakL^CwI<awLj8k(ji$}JLuF=dT9#c&dl)W{dV!&&?+`_%kw^vCku`QSI?1K
zE#I=q_XD%d>gcV?)VSAHHH~9wl#_d@R@^6>Ec^UfdPDygk5(oV?sTwv4Wr=gw2k{j
z;@p<2>U52Vs{3x^4E1=Wz=oAeOZh)V0>);tf6Kd|NFh96Y;4*YEQj<NRS?@$HP)&7
z7w6_6uV_b<lh~*1)ZK5lIyZdztJFMgc%XkW;mJg)=`?yT*2{x-D|nd()vL9TOXOk<
zRtlUPq9(*YqR+_K@=?^OGObn>>=(#NBHXYsa@l2pK3_SkR{Nkzrtwr))|erkIOp0#
ztG|sh7yplw+<>2lN)#$22z6Rw4We@bEHHr2UIC2Nc&6J5eU-wD>+km45}vGHOF49V
zHZyTn|JS}qJX@jVX<QKB2)7N;F-&e5v@={mzdvJ!2Dk?Rx2k)KiwTZsP@Z62M=f6c
z{8_AL`MvxV?p<l<l%h@px5NzCjI<R69m!q|X$2C*>S7vVKqZ0oRt_%3J7NG42me7W
zwtoFQBLEmLe15LFq)JPBV=bP4b!D?RVJ=I%%++*l3Ww)g1Q7c;jnQhb#W`Yu5lds(
z8ot~P4zm2V|KpVU-as`R6<$Ro03meCVC`0{rkyQ8+YUnog>_wMRErQFIw6L5pu|`=
zIH>u`zi)F=vi;Ljs@qf6HiS7>t)>dD8~fvCztbt9YVrVHK+6(f?AKIceCOd}iaiPu
z#Tzj-=fkH#3e)eU1E*A7cud^fNc&AK?%@Ik`$LO2p+Z6B<O$9M1h!H<t4k{lqkgcM
zY>!6#-C(Ddx)a`krLh_QuYHrH?wX(zF_#vE9~Z3#<5fC(Y^LK25Z1!Y(VX`B;5OK^
zP>+MTsdclC#K%}m)O6KXFVi)JKkKc~oa#GMHf(_%fu++T&~2xy$v}wxF?FkHX0dkg
zcBv@Z_k&ASosN_C7!a`0K{>jG??3Gn`KI6U6}9(fXfRjXR88CyG`jl2*}S0k>@5T>
zz2%Je+LZ?~VIWmPP<C|=a*Mb5;NdrFr0}0Rkw>!e?yEhiB~AJ{n=Oh)Ly&N((Lx8d
z2LKPX97tn9dL(<9Z-L`(Q*^}G3<y&{a)@l+z?rBz{E8342FowtEzU)4=a|hA3*k&?
zom3$h(NWD=UV0#14Pd|*t_<9On+}85i^Ce*n^K?(ZPrwq4=SPQnuwn<yrDP@ziqot
zme%)7+U@a9qT)3Ko@XbVv5C!wb|L8U*(-5p-0qmZ-64!_sf|ZW(VzgSe&k59`Jx1x
z&>el7<&8HsG5rGH4mqc4Gt4_|iB?yqR(NXF8q}--0_Zk+XDl9j4b+^n!m(TRce76@
zYN9kNRk#`6JE3o=syBme{`MuXts>;7Od6Es$9UxvQCa<+aht=IY;|?qs*2G7)j%-W
zbkqunb>=Z}+5}~WzIr*P)WyjnRW(nuM%^`H>Yi{oW;;}4cB?hY=8wZ$@1FnXEB;Ps
zfBzRgidJenjPnk=5X`&*Yw)PA{~eI&kj#ejUhd3d(15T&hgUA@ORi)k7NAH`-Q=MN
z@pL>=b<%HUu`=<zKQ&$#_lIYFe4sn%9c=&LM2_le?-Xh;G05(&5@<ZD>$Io;M#~%E
zOw3DZJq%#4rto$N>nhuPy}GC<Gpn*%Gj5NzZfen*x@UnpcPsngk6GS+bYSuG-}2|!
z+{&izajU~7gUC8iHh5`k>x#)=2m0~YZcs@-j7&i_cJTSb*?~oB$!71?R+p6)WEE&9
z>`_*>!=xED4c(D*e)+VMSeC!vrZ+x4xaofcjkCB#kYQcrK)F;`cPVx64u5|r5>NvI
zafO(6^#l?8_0M)mJ7+0-bF;Rp?2=MGLT4>_pHZOIRFLxRyHERg`$JFt{a8ER50(|_
z%HHttfBh)3Ux&=P60bfosFbn0UuF717w9aE<u4>i_FLZ8o9Yv9J@e)f<>e-nqpC{v
z-7$ylQI9qw`1d;leS8=J0Zf0tfF1w+RbuXyy6Xee^C$e+OmFrd&X{oFEZOQ}qu;1B
z-&etX_c?z*Cgg{=nviT_yM!PE-rjmNIV(45-wVE5f3hoBcvi+2h4261$Imdlz5Q4L
zEL>#U@}oZ<NzS=k({^`s-tMq1Hor_u(a-0l!@7BDF~RDyv^3LSo%YSIxA}n-e8lu;
zZF?`|oeaWv6Kf00Z}Us9??is;=gZo}W;pMWf^X|_4f^G`_mZaztLoZ&MrLpY9IE1G
zpE>NuXD?%UGk-1V1M$&TU)#L=Z|`hlc>6$d0eCx5z;i$O=-a5%n$yIJLfL0;J;U(v
z^Yh2ed)&EW(y=h$g+Cv~XyTmq{<J-Ta;@LNieh{Edq01mYz&UxyYJ(TxYp$F=NGW;
zN59?|EJ%@_&Q)no6EC#3)|8z+$@%od-@N_PS9k7w{mozf_NP0xzz_HbY<~KMop1d2
z?t_O;#H2{Fv-1jg`EG@0-(keC-F?@;Q{#xX`d9`vUqAcYkKg}CM9O((dU{q)VeQye
zjSAP2afQotyS2WWeltr>Vl<y5lV!s2uMht7qt~~+5XkV|@YK3>Pd&AM{e}(R0v{K@
zfGD;Xu7duOr+!G2>v&j`eXi(Abwm3dQ`2RwrezL62V>P2=<6`Hh3RqnLCuZ=^jR-m
zZ@hTsd{wz3k)O=};@bm%{ouWyz5eQp+qQ1m!rvrdFAMP7{D;I#SMSfpA;L`GzCAED
zjTde#j^Uj#rlkk<8cYW9ego1<piSeB%JjC1)RNUKP3`1^d)Ml7h4SWF`kiEjO59t<
z12|G?=KKC65(K|zcgrf<hLE4f(`y|rc2wuf9Q{={toEDgD$EWoR19h(OLsM?dL9M2
zZWb?P85_HspW8J%Icyy0qu*fDaO2wb>YYqD$`?PCsIQON?CDmIhi3bHOZ8d86@8mn
zXu`CyvxP2gZ}#^+uul&Tcb%18L-&a1SZ9BPM~`!T82&#xlB4T-WVdHpJ@N9UiQ2y`
zr)QCtr8uh}oRM{|r3w!9N;7ijDZSyEjCXmKb}*|arXSh<=>7Jwg1WoY4qKMh-C;Vt
znROQV?P8+!Y1=|iQ;h~K0;|<#pBflwPW>vq+YHNIaNPQH{G}HCoWpk3>S5^=p`lCf
zXIn}{1#dS$Y(Pp*hfdq#NNJ5c?jND@Tm8L`F?G8MM>=QqvOiQ&a2{L0CF^2})x$AS
ztLn#sr)-8gO;dhuk>JDM9Fot@j9MKR;e~HrK?K@L@326nSQ+-A8nqFxq?kstM_aAR
zKX)$alXv!=>ql2fYIS`uk1*(Fo6_iV2v;0F1yV8A>w0cA-$bgmrdm^SK3ja~=WiUy
z*Du)N<UN-PzG#@Y(=pg_s~i(n%VH`jqS@Yd)s0$p6siPmMP61K`RARlAC&i^3QD#v
zTfFgf;J!<fHaiavccB(D)>#`?dD`&i1vEcmG*5x9qH`I^-~RI1S3Ws&a~f)rfb!<i
zUSA*9@A1q&V$b4jCB~8#yG>+u$Lj#vP!6xJ)>g>RNTWV`XX~~P;;IK15a431s~u<I
z=>2@(7WG>e@0ViEiu+*Tib<8A@zz$WOLH=n+1&S@3H<TF%;w<*D_jcS>gKrj7G1gc
zd%wzWRTt1oP9f+Z8BfwM(eE>~7i5W#|LFzhmY;o=TG>60R~<O)cFR$?ArGd{b6c4H
z7;WOs2O@B#PnDu!=sFv&6z5zxeUiKP=i3>~?Z5hG^hNDW{p9865Xn7QsO|%w!rXMh
z?m*l$x2RGz{zwVHhBC#u3`us%kw5-uv#$^H+1GyY{<8rw&H%ER{=UPWitk@Vp1&Rq
z&&r~P>-DO_)6rl5`R$!ZDWlQ6LqCw^B*vaX6Qj=5tf`r@&yL-{*;=nI&&&836Eq{Z
zArxWGitb7>#6MS$LT9a74{Jc%(lYbV(5KwutOk+A;{A=TW;?zI)vBV4lMYK1L~eI1
zhIrG-n@ivp%vmiCDv4I4HZ4j3b=?g7lKbMl=b4+yKulTjk~{iS>ut3JdDKnloX3*j
z+5McKV#>OJhI1l5H1}xf40qTbWA(7jpT00g=`d%7$<iO%Obv8qSUU$?HwSh5DyFRT
z^~FS3;moc2YSimhomlCHmvvys>NetB=WesjkF_S!1dT&w-4HBhac9_Ur$Iez_W9mg
zD3Y7DX~Zr7xwwsh>8d*l2nzMJ8(Ji~SbW+u3ulGv)_C*a7_JOM9jQfB@Ah7FP27+a
z!W2Dlj}n*ny3l~5V>w0ldFGm64=ZXOpb>!E-*|Ij58YfxtVeeoX@sl&VT(Y!tYs7_
z6<)|f-AC_%f@)i=yhW5vc<<oNhK9!4=9&N3-nBqAb*1YalJF4lfrGZi$0!O(AfTeC
z1@lgzc5JmB+S^fkt*v#{dOPDdj(6H=8==~1wH45+9qp~&b~$QWTdi1u0787#_f&zZ
z)mE`q1$=>myw3ghJ_#ftK!_)<xogeOS~>gdv-kh~``?fA@BQzyX}Sxm3m(<$upt|8
z@FGqZxqj^MvE%1Cu9{@1{oql}P~%&YA~Sgx&Qv%LAB<Nj6Vk~drC2Uk%HT$YCopCJ
z88_abnqXE*AJ*!KT5Hxxv^t#@9@)TM1-S)QLbBJqoh@4P0T?J_W8-4rNQDD2MkZIt
zFa?NSBo`~yYVBJZiB3a|hIGBepf{u{;6}yTb}@|9<*VM#!VZ39kBwEvBYPnAm&oM`
z`7n4uWpa5;v{9wiX|?GZiC#nW#zh9H(P&CB81P*&muD@oTDI6^U`&*4OcV;aLV;A#
zF>*zeMyJuJGc*zdc%-Bo%?3Y{!2nO}4xGPKSddTN$Bs{PmNXOFx>jdWX&e|)DP)jL
zrjX0VYP1>@$4{>#>4pHK-e5EsQuJ_5=M}?YV6~8}56qc?S=rbnk(IeJyNy#2b>t|h
zUZUYgRnmfK^ct;Nug*vl5t5M>qBa?gU}qwP%P+wKcX7d%Hy3S@EXR)N<>oEyOJkc<
zjMp&eNDf{>GL$b`hw^FR4mFqzCOsj!mLdx+SRPg%Gr?dT+@X${6Ht~I88TBQ32LoY
zlbT6_bS9JqnRi>v`F<8FX51`uR$Jy^T8eS;s2>?2X1TvYo~(sxVN4oADQ@1L0(c|}
zODtR99;3hxdBbVmXaYn<BthlGM5k&rdM&)6lk`TDfgG?D?as@!;B>H}4l)lTv@R6{
zC6-5`#QF)yoNAz%alWxNza-ykvF3G{8DhuCV!=R0G-@axu2h;?Pmrw;expK5asE#D
zz-)4xm7;hhK4qp25}s5UM2@B$NRmdQgV=p&W$BJvcEj+%k#^8_Nn2(~9GWw72f_`m
z97iCsS>6NfCz_b3wv%nzR-G)w(hAHWi^#gGEw@xgz30@Qnm_}X+DD}$yDWjELEG91
z>i0>2UY}wxjw-PtYd-3$2NfPWmwBCss$=6IcXX5j@bO^ammpV|<)Rq5ne<WXApZo_
z!^x`TWPLKx>a}M50F_RM)~B0o!5L}=d$F!<DJm@3ZDaTq5u-HNW*`PvC0aWz4*enM
z*XR&t4bho3fqFI3oAgG5-fRejZq!HMC4vJh7){BV+sY~goRo3#_*2Hkj=&^bjt-_!
zq%MM|OOIBpOV^8y8e+&u>483KG>j;+T1xPNcrY|wf-<rq!l4=$pP}>v4|vGN$z#x*
z(up`#rK5ul?d8S{Ly+EN)b6z46zg1MFR{??ugUwXR%T^w!xDfJ+a1KQ@yd8CpGMQM
z0_!npYOOj$E75C-!L-;IU?96JR%`xg_)_!t7Vpa=nQK;Mz6oFIaEw;5aq;*_E^7F2
z*%IQXPFAH)5GSii@>@D$gkJWb`{lhw{CDD-OO~3mN$W?>_9M#4zv=&_2E=`aj(&9B
zdmBJ&-k6poe<1Mw6{FY4-z{37f8^xZY8?53C5&4a%a2<>&zx%<<nMbSA|7A7{rI`7
zu*;mNx&Fq*vXYN7Ux=0ZP=;QncZyC|!6I@`%{ABYJ>#?pA4xE1_7~;VFsVAHnsWuO
z#rZ&jA@d6_u=?&C9#5OD9DaMGz!HSNTy&|+qPIy_b<{jeKnVgL+g8yHbSlTyl35`_
zNf7z|Y0os@rU2B!`Trfm1d#NKkdIM6GG)pXteGeWgmzad$>ahY&F2Nx9(i&6Q}f<k
z|8?=<f0llK=FFMX<tL6>zx-f<F`yeV^j%(!IfZw0<I1&Lm|bHnl~D^e3^g>>*HnHJ
z@38<;Iknzl+aPfq?cFOUayR^K{_|-Q4O*>X;#0H#x;k^5hbl-q+Tm#9nOJqj&l{By
z{la>N1O@m@B@(HBK#$O{zTut?lM{dFX0vZERORzhzxLY3bi)IByuw~}?fH5d(^56d
z10=UeCSCLtq|<cOGu?9UGE#Tp-b>E}t9rUiEH??_zZVFe<1T95@JCc<t>D2uUU&O;
zwb61is=WYVX>-NxW9=~kpSoBW?U;5aqIp+OJ7fMTP0aYV>Urllr@Q$$-A_Qe`;H^`
z!Lznsy47^9zhhir&%V7wTMfA6N&&$st6P)oU_BrCu<RNg)!wpKzjVZ`%&kQSjvm^Z
zzjgH+|Ed0UK%cz=!*kq;2dMy}_`h1O-ewK^I+hP81x?*vajh0>RM`E6^W$$;Up`xu
z*C054Q#Q`2n!e@2O?$iggl~Pyuj~fpjh{-7964Hk?w1?b(keXNoVlWmx7Zo)1`R9y
zxRcDZsNZd#GJ5y`IGYAW#*RNOJUTcYdcD+v?{4-6>~o0q^8DZQ3k?wS?Lp+1Ds0=g
zgVsMeWL9D8>vTFR{sLATYaeWF_=pY2x91S}`gc)KzsEkl&bXF`F?`m|_J*^t{bypU
zi~{55p~d+p!)*#$$}5`&Tb{Bbv#WRqjr0Z8j3&fX$}EcNQ9Nb|aDH(vev+?>&v9!-
zbFoL6(kh!xG6~@m&aZ*Lvjjf*x$=pvW!FIyjC!8pxN^tFM;^esy055(jVpJraXGCf
z*`GM>I(9j<(X)cR^*boV)&E$)O>V^t!^YL$w%#GIt%GbrMeiRgkrxNIAwJ&7;p2){
z8dW6}kb6G=)8CIsV_V?}k*whEJZ!rkH0!I&K34xr6sooM=}nlG+eK#s{wW_<z%%7z
z`kX>Klj6swxp)f~)zYmrzNYKx2~MSlD1Mr&S6?Z-bI&s!k3?^rI(??nMFQ~)Tpf>o
zNqD=*RjOyDVTpR@#Sw)KwIXBqLf7Pi-!CIX9BO*5cUv2XI>`!O^<Gqf=p(`LTp4~%
zfhD>xCfEfCDdL-5UZ99Pdc`%l;P?NOP+;+smtV07Fo%_X?X;>@(r2oC-vci7Ww?@+
zb<3C}0f%T2*Y=Q_&g`fbytM5Z)(+vz9eMpOvq6T?^_Bg7pprk`mmb(Wg2xx_<?hl6
zSUgnT{5S$5dBdo^!~)0I8(sm&oyQC;68}2y0(wKQ{l2ezRuU6m{*be<{9ePbe-IWB
z2Bam-5Er<t?$}M_@l$DS-$HmA)(3hKIa^~a_l@Q6BnFS~5A|~#zJVYj7<`jSHoZMR
z=|14|`d>`(xCK-ffuC~U_B~B+UzYpM_NDw8B~i?IthUdh+kqtMO*z=M##Qtje9M-|
zcb6tOCj#GL{OfH^fsmAP8nK%6_5Ju;^AH(Z%;1}6v(mF4CAEyxNZ|MEi7$UHjQId-
z!l1er+TckL<^e4#LU_r1iC;%+8uZg64)FK@9ea3I{#W#R{-N)GH{rtxM+(Z}xF<F1
zFeu#*I`V`gHVgOOg#V^NUcMjbhi3L@2M;s!m5LWbls+E-sy99AG~aZPByz=po%C(&
zOFHjtdOD!fVDu7Szl1lrAiCbZ;LE~;cUR7cp|<YnBQb6dcP+5Xy@<UReT2?&#V%7H
z7j)eEwg>*9J)ld>pc_6bJU+m&8NV)=PPdzVh`-qh$^A`=5AD!H_~5Q9lClYjPn1x{
zZD(_y&P8~J*+xDPpm|>Q`B(XM*UlseKI{W*%JP6eiI{;8dAg#feKQejWG`J09N+$h
zK2g9o9@IUrnhBVb2~IVMUFp5;`AXsYV6&44uno#>s6VsE1y#ZEFt<n@L<&9;yQS*@
zse|Zuh3x|^uRA*#IzVlU4zUQ@1e!n5^zgX(Gbx6h5w!Zb`W;hxb@}pdD?cG>9F!p>
zwbbWCIr8r^{1pD3wqVqF`^4MhgSy0YN-&F`DBima&GnUApXTTKP5@oqyd=nMpz-!G
zv$88F_s?hW&eD6(Ky&@&%}?MnaxZD}#d|Xb>JR_Lw#~px0mdD>2bWAs&1>D>?&m7!
zz|Xy26Kne6iEfU1uHf|Ee_^F}V{x5T;f`PT<C1VNMYo$^<1Y)mZ+0#+2LHain?13i
za)YCj(S=|4Z-*_v&<&fYp*r`mZjJRG2sCQ-`5MpqVtrNNvmqXE>~4~gOH0|lrB0hd
zQ_bbAX}&<G(hhiW+l3qT9k<2C+N%fO87DxW>rge4_&<Dc>gp|gH0eUk4Yk!jTbE7j
zmX+5bOlJ>7gD1WD*`c4VRNttny#u;kbMso|FDG|pKR43Bt=}030iN}a)=m54;uRmQ
z|M=t8%U*wRQo=*jg)SgTBoe*{i4khOftL(n3L|Qbw*?$)oloOO^V!Mqu?J%INK&aH
zSvsX^0!U>uokVdeRZ<WM>L(x1$48LSG@zO=YH)8mMwm*ag0(S(rJ%X-T<!s=Lz3tl
zck19N27OKI_)gt+UK^n&?%eslPwyFd7jN?MuaKiGaJ#x})kxdI=4`}ZZGUkcd&2*0
zbYSj5PzHZMHcOX>Rhvp2@xgDBQDX>NADrFR1?Q7Cm%+IdCZyjt=o`6WvedDQZ1n)t
zA&CsT^0L@{F229r0#0e{ZV7c({+=?%x8<OQKax^DKzrm_wz@Z%Zeh6@Mc&0Rj(4e~
zxHf#|z}$nNGybzE^*Xw6;g7YMie&6iYOE;U$!H2$L$5izj?i_pHB<r95#YGFWF~z8
zx89xHoSi#g^#If%i4*?0Vc3X$np!eSSi~C^g8Pv%fnr7?!e1p}3t*>I4n)u37tr#=
ze)P$HcshFH$~PYegwBIb_&)_~M2YDpoq6!m%qYN?ev_N*UqY^_g!KCvA77vX_?6au
z!1uxibH<lGgKB&OE`nY7up|*avhUojqWl*4nS@Ni<s{O-%tWO4E_;vNh~6&B89yij
zP0A`JgRwziYa9&Hf3(Yh2stVM-$btCxSFYG4CnzMLGVM#4k&aO`5n|ha}3J2+#P;@
zNLNgH{=jhugZHrXLgDu$vjIATd<u{CKrMoQbB7;}ezA|}&{tN3FGHfaG|@emKc7|L
zuQ3FD)*me^ha`dZYIpbpp!5Q=3Xe}&>7cZSHWDxKqfwV1G7xC;$50}Md+yw9oGCPi
z#9(OJ;cgXkG{(%0!4#RN8hCoW#izSM*kZNxI}``=<~c1mR{)h7O~No3*c{i+TA+3G
zY9d-^a}&<q#0^eZ_ZGh|RQ)aW(hY**tr$FH{9-_%l!$j?FnGHCVoG0h&PH82I{!Wk
zj;ez)N>Bk_>R)0-FM9H!Uk#d%f~<uc(%np{bi=?)7+nsU<dH*%M@2>TA%mkLhYlS&
zJZ>SX>{JBSjxWXZ>)$^T^-zOZx0m<?<=}pN@3jxnry8CCC|OAHYvU1$C`0Jyyv-ZG
z+_r7|WU_ku=8gZ@xb<6jE*t+6$HoAfp0>k3dyhW?6=0)-;gsJ<I{(<u#=Tjn6FdEx
zoV5qJ=Dp$njhi_%Ihh14p#r=uzi561Z9*98PyL4R3&K7b&dh=VOU|K~Jhi?5o3HIt
z1pux#mL&?mfC?Bv4q#V_7vnEpwR1`?wr~wUeEle6N64#Xr6+Am$o|td9WG^5_8j?!
z4}G_LsgevlR$7+Bbc6~hD=nK&HkXzbhuAoD^Aa(2-G-H^*qX%Cuf`@Ov>LgAL&qdi
q8v6i=V}ZoDen=KNHgPQf-os=C;UPqkm>6%j3+<f#ef^iw!2ba<E9Vpd

diff --git a/resources/uvcdat.jpg b/resources/uvcdat.jpg
deleted file mode 100644
index 16835ba2c63dc69a4f985214044de61314878456..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 94795
zcmd42cT`hB)Gr#O3y9J?Dpjh0NGCSBg475I5fEuor3;CG0#c<*jr2~WOO5mnQX`#&
z-a`+NgcrZ>yZ5ej|9S6y>#p_QoXk3NR_4r^J$q))-oL%)>hIMO;HI{^mO6lhgan{T
zyZ~1yz$Z0dhj#$Lix&V9002M*ASYn}kP=HI#0x;e0U-a6G60}W!udbT1|-7&Rp%N2
z5as|N`>#4?#QVQQZ28~Y|8>9i<J$kIK`QWr^naAEx&NzrH4AtOxJE+q?{|&(BPLRE
z(tnA9jEt0=l7f<wl7fPQl8T0!lIl7Y1qC%7^>rE=T3T94YPuVAv^R)FntzS_dzbV-
z-X*7@qM)Mrf1R#=18z}Wb0-xbCAkZ@c8i4c7Rgm7fR{)KIdQ!H4cUJsl53=7<is8k
z`$4Qwdz09GQqpV0&XSW62ZAJ!_&<Q`7CFNmsmBzI`c{;8-I=6=;<Bmuo>aCm8;oN3
zWvo4dsjsuJvaxdr2nq?`6L}!}P)=S!@#!-)bq!4|Z9^kt6VNMDGn==z@9gXy96xw^
zdHeYK`G<V|5*il%H6lJCF)2Cadum!vZeD&tVNr2ORdr2mUHz|y#`ccRuI`@RzW%ZC
ziODJC-{~0?dU<7aZGB^N>)`O{_~i5qdw%hcE)oFgf0Fe-Df{2(x<#bx8j%NNl>g`=
zx#mO6q_@b(??_QFJl3bQa%a3N9Yn?SBrdzMjhau!0K;tUF?yYa{{c$i;2&xKLD~N^
z!h-)>l>JY_{uf;{09sNKBJ)UZ0e}EJE+;}5@P96s7H=o}E4oZdScOfeCmeIXkYyEm
z`!z9<d1WWk2qaJv@Me6Mts<QX`CbX%1^l{xTmdz}wiM5=yvk!K@n^Q!J<kGa?-r(j
zicg)l@vo0c?bytKj{5^(EbrINTJ0-9r#HttXpN+E4j<O+ans?p<<lbHm@9yh(B+du
z^vijphS_t^5(-(Nd>_L;L>AP2%^1ylex|n%EyOWSx~huX*EJj#Bzq`n+`0lxSDJ#L
zT+T?K)z36NJ?TfVUXCMVw7%B68A=adc<L2kDF<3_1g?q=L5($&$VM;5)VgsjP-~<$
z2C$!32t4x7U{(`KNa!`7ho`u$xSuSC<+9#W-9PZ&Xv`9{=`wbEU~5^HXx${)W_eM)
zlMahU?&(d7i}UgB?gc=enRp?)=Doou(z6;n-(vCUvgfXsa#w)1cV3G$VpGtXZkXKH
zAMsE5&0;c{9-h4n!O>AsbA|j_rRq_t?m<H6uK>Z}0qh3_87VBP%A-4x5^Iq!CGGC=
zJ`vDQIPN{>2Q@@5&FPv-n4->8JTp?x@tZs>jL{D7;+ox+;nj|kxYq5gOSvw(z(f8C
ze)i*N)^l-_l%9*Z4obsj-n~{}k{<+iX%TR5>RKc)oIq(k-CkG!J_<_V_R%C3Cm9PE
zx@`{S#6D``BIg)t;J={H`gH}Uwou`wfzBc@H^Nf;D=Is|gau+Zy~lh9%=SyR<WGM=
z4~k-=WwWmUQqFo;fNNl!(}?nmOT7Sj%!lqQ!Gp!m?-%qFH70+|bs$NqWBv7RK*knt
zC_8!F#=v86cLF_@euj!$lHMaNo<?{BuTR<@>93zk%%6MWVti61T=bmbF3wPyQ7B3U
z$%r?BL+G13uK<dS=#=ns^Js&WD?l@I*p?`x-o&nKsjHHDV_j32KtM0QDI;G|f4qps
zqmCP*l2#fHhF}lbi>W(K9PyXaf7%Y}JL7n>&iQ$w$)KZZ7GbAfk>c22_2UI#L(Y46
zPTqWX>EAXh9``c+{~Y4P&I($$Ax~qRq6=#s0?%+GIyLx$R7%|QnokSm%-z5zh^e{<
z`!gC(8gGN2LLEOQuzB%@E8s=DRw7+R$9}|%9U_u!#3CsT?Q1s{w?FtEJhaj9E8I%i
zwAt8|LHC4tdE_8XYE4{%4XlA!EHlFO1NYn$9a9D)m#8Q<-cC^8Bv0-#GlnPnW2xm9
zgF~{vC%xAPw<>8WYc;W9?W0d5z%TAbsBo>BB#k2wuFRiFeku<ds|D0>9lipjo^&xE
z3I$yOT+0~^>Kp=84j9`nGHyl>S3BNR(EsEy{bm_nR5rpyoTaXiTLH>xI#32jx(bUY
zZUZRn%%(XW`Tp7bh0g0L6xV29cao6YN+!Xla&v4spg`ZC9F%}Fsb8oF%v1{yN5Qd@
z@b=6PT`!#cQ2T7=4$s@}w!Ye3{Z_f%I-#?+<laxMbioLI9>9RFRb?iO8jA5!1-NE!
z`G>rP+9?-I;cg)xR+Fd1eBSMHU;gz$X253TYe&CmOGQd4NN3Go{)S+GwStFLrBq^*
z<<N!DJAy{hs2VdmYeQd+3Gn}$Y~%%JFx|MdH&I}0Eq&8t_n5h78_K*V;gQDwWM1z8
zF?a<?d3=F<!6jgJ1-L%m(DSwDPy42V)3l2Z>&$14_ku5;*rOiL&$3sznqbRq9g@bD
z<2PIMdSP&2zFSHPlB0}Zdt5b4DAVe}8j@G@{9@i3>)-4!i`l`rv#*+^=|`s!*Jtn4
zWHYpQYntMND?5P=0!G5OUq!XrBqyxBQxIqnI*^sGT~D|$Z0&C?w~S&|)c#|l98q0r
zab)BJv2_)PHUdSIYY7{;l8T09I-2%_euBVcO^eDKi$mGTnJ0W;6R{4`h1NINo+|)(
zI93O<ux(I@fu15FYiaI|s?P*y*BNTcqfvitWpIvVH>9QS>*WUzRQg3VxJ~3ssud3H
z#LBRLxAMRqS&MY5!of`+wsab9*VH?>TFP5T*=GI~DLN5s;Qh+Wjibk;bR2F$Zz=2N
zY?{Q~akFiD%HK>HzD!mV9elrpT456$!&?L2O<mrfDK(luc>3x?{!^J34tcY}p~K@U
z_d`<Iu08Ujc}TrZ$-HWOaf802L0c+Wqy=Wo4DCXU@2gU>qiK!uxnyZ7VssaS!Nte1
zyH|iqh$RHcb2mg6<LMt7yEG&Q6B#q@-c^6m!^(K~6G`{AZ1MVpj7oTVQ4T`m_mchU
zgiTmBry3Llv0L|@bAI1mQ;JL?N~Tgtz75G~Asu{{tR&S!&!y=WMCd2-Z6m%jc~s|A
z^UZasn)Zsp&b>Zc0Nsxp)Ih5MX?zOQ<_f^&aPWBPGM3JXmGRRLJ+xE!-ilD+Uerdv
z-%bufVgt@6)myUsc3qFvfbQ7f43uMn8!?w0xwnMQHl)^d@Nxoe&0hidPI5be7){9#
zjzL#^YN1o3CBe8{)|9Pqe0XttQAPA~D^ZT$!Sd*YD!N7mfS4?jjq2mVRQ}#wkMf4c
zLhZn_XK1~h&C*vAU*U_61m0RNhK<J0-5(w9a@$0;n%$tj-4n&d!1k86k-$Qf9@Yfb
zfJaDi&wW-ir&gxq&15OBk6a$6-94o1$#P=3$}VrOQ}PF81VEH1JN*ptlMY5<UZv-S
zK4-5lc35P>KB!&JNEJ$qs2w^E>{kw(LECP%^5zQ7;!Ky>0lf0j<=-lM-gml>Of|M}
z5~!{K7$0@bFRO*&USLU=Wv}9@RClCLVllE+YB%B{W^9Z5$uMW8qJ4#U#Z4`y%KM&i
zn!UDGZDcneGhLjU9!NMCYFP^$dgIChmE}K(g>^gusz8-8>OZxzVLzbWMdC9tvrorr
zL>Xq&I62=2y?t-<!I|{K8~X1nms8C@mkaO02dEO_I{YWS4Z>*eI}LhSn5+u<tod9D
zXbE?jGEKW9^Fdj(0e$1YN)MlAB;iUZUK@9N%OtuO6s^W`m78S&C{}Q4Pc3dGohYpG
zL=8;s?!6nU2>mMd((=Lc+wn#calMac27d(q=3<bRR*Jx_;;RUJgwZsgFv(-rXm7X;
zDm`I!<U{Vvy~@T%zI1M>wkCw-L@{~M3DD;A;+hrD66Afc@U{u*`yKb#$w1YKYwiOj
zANmvRwi}NFRo8q|3tk`mT#44;eE}=k#`1ot0sVzyjAN$@wXx2n_Nt;-haVjU4l~)w
zvxb*kmKM!EfKv4?%Kk4CeSZJ}2ulPHXnWdQkv*A5KEhvUyf${g{jb83sCLwu#m6re
z^~k*+(YNxBa-*Ep5~;~-S_coqg8G*(Cq+an^Uf3uooqINpNOB+i-H0=Ut*!%ikYYB
z^z-Aj?)XxNn!vxvVXFr^69MtDh0g;=FBPF}fdS%Zgv7{cG-kd#NE*H`>wL2X9|IWB
z5@@)FCS&iRkyP0}WJu!bM&$~a>OQ6<<2Zjk8+@37-YaLyBTNJ$w+pTSN-)zz_g41=
zQ~ioL&~!?3i`eNEp#D8ndy%SD87sNG5P~nn*mYh=etrL&e5QTV)>r0__NTpQ#IzfI
ztye8>#Fr19Rg1Z_2s6TD<rX>k=h2#}Jk@LbsI+l7o6gw%SV6^S@3bp${89wk=7aNE
z`s!S>7(2y6P#K#bmy;r^nzCl&mI`+XyoTbpiGKkBW%`;&wF?<a!fIme@>=mQg3e0p
zl(Zf*#<#9`?EyXmk!sPihfNtsAA-S|Krw_tSba(d{mj%{4<@1WWv2E0=NIA^_!NVB
zzWsL;3T{eOEc)XLaBKKr<tuG0M@-D!8`2gW)j)%G8)2KxlE01-Zh?l4xN8x@6(911
zb0Pr$pX8#w{kuPja{jGmu5MWm0Mxx$w~k|x`@L9gHot+Ow@?P5d%s4Pi`(*8lpfN<
zB(8TqrW7Vs^%ZHDx6A@1EX$;o#4w~M;8;tic}$!$-yCX7)Jf+=;J`Waus2SYcWeIK
zSu7qb((M1Sn!X|W^?<UAru<Qjzx<_`M)dq!V61D$5xiY88dl%ar5Ce!%eh$#jbPp~
z@lo{p-7vmeCgAk(BL!t}IQ?LU{M_pI80sG83(_k98F8Nr4#I|=)=bC(>2NPb#X&}Y
zJWrYetp^h$He6q|_fhw1-9S$p)}Vft2Ekf;eEq`v?w7I}7tf->184hqhY8#=4O)fk
zhzXOJPylPc7?_%cn!#RIX??i!G4$QLm1?d8JsmI0T%iS+TEKFGN`(=(q6z4$vQIYl
zq0=iIxNOtRC?&7FalxX8<QE@16<2iUTRWcGB}6?vj>)zC3Q(n<^e`pw0nhD&!O^uv
zo#-R(EDZ*WpTN6kRI1*~dm+sb9h?Jp6%CtV?2*AnqNmDLON$2|-U|@(J_y!~;pY{(
z&5xn<(ADC{$I@fo2`R+ZmEbc~V`dO)vm<+|Pl3y{s(O+(S|291;5jFOxYnaHnPt{)
zq6!m0f^piu0<hMWzF>TAayX^+RZ{7jW{mIzOI)vsk`d!4y+=+*e1z-dL|x(81km2;
zR{#)mef>3N|MRX+WNPzb^_&x}CgA7O#PByLnt<7=-!}mlKBq`5HRfzNtZng&^Fb3j
z4@y5Z`{Ju7w$MIn;UF(me9Z5?@mCTfzx28ym)VAcSxGFvP>1NY@4mAta3f)}*Jp^|
z-HiDyR~<jA8Yt@UF=>V9D5}I%)tn7|5Tlzv-_j>vs^nl_vp6(;UYfzBp}Z)S`16-6
zvu-~DXOubSzo3<Wi2Q@S9)V}j!wTu`RHDB)@lxPiW{bN%&n@tezcb||{g!36&%0Nr
zP=sS9&JiGHG15yYT?;$lRK6LP(BL^QA((jMcdvxS<Fnb3AMnNi+vGMm<9=TB=ah|+
z@Ymd_FY>k2eF&yqgd%M9sUJQaXL|WE;8A1;jJk|`auh6$Mw{GowqbF5oA#W$bmWbe
zGC+^{g8ovPxY@Eat6<Iy5iR)a6CFMF^7-_cr-f~<0?jOg*ui#-%jw^}ugPcQ{p5Ql
zfz^6DO(?x-LsX|FCl>4^9O;+5b+}Y&0q%#!*u5mV?M8%m?vwjd*Bbsw!yTzCmzgMf
z<Oin+h{I1|nHK@Ef2vf;I^Gm_7f6eE`P5Hanrt5dMG#AQK+N+@rBfG-U^;sLll#_L
z8gs!LB+DMkE`Fj~IKuri_*;Dce9&}IA0Ijbzl5*yG#@fNeSuAK{gE^DK`s<N`7$bn
z*Dl}_uvy)GKLW=K|Fsmb(CQSD)~S>B(i>rt)kx6jIB3<^5b0L?6D#%(?(((#0j@nl
z7+$M-U&LIPW$RG5FirGrTzkzJ#l4$C<llwGgC>?3djJzgFDZ9ecT^5SP$|Kxl)jR`
zA)MG0)X+zh;Ke>Qr$rH`)V#Qi9wtNQ1d@!BJ1=LS9DtuHTZ|vVf77wPYQ*eSIuzMg
ztF-J!^|`x>;5YKrY0FgF0LWPafG7u+eN%MnV}M^<sYYw<$SBfvo8vp<wkxMz>izi?
zSbA7?6zpy}iEKmIugoUR=<Y7|@fNLQ*oB@t#z=Vx=jFbgsWLgT+gP7U=eK?wS^-@e
z=~7WzhPRi~cP%hStkH0L|9!2--R_($p<VTJ^IN~V`s41$w~^1(>1EH4Q6h$JX*CJe
zmiCu_wPoKFvijC~ZS+^E+q~xpeO28*dG=^Kv}Ekx<Ct_q6sq6Awyor-EjCrrhZ$mj
z)QfYZc<ttIGMW_Qr@kuD;>gbEa4P%Gx4CXPyJ*^nmSZETF=IUXSVopxk?zTqPM|hh
zh4`8QoU4eN%ZH4IszS~jpVrEajncx`FM+A%9yHAJIo$NiIJwjALK*V|8F<(3PDuls
zGSWh1ys(TPnOoUrl4zS34=L;iawC=!2-F8zaSw=Mz|!@)lL(C(<A^|4)GqQyh3UA8
zbQsM@@7#494x5VUd5r}wD-DsN1wJ==7r*`M3nRO)!p-K^uW=`g0`cCd7wQE2dj)uk
zFAWf2QY;I5j;#69qdSrNs9QP4CJ7pI*UkOgV|FqR&aFdTq0%pDg^>wKa+6!gE#z?%
zFm>pcf3o0>0uB)w3zn}}fCNJ7Ij7o3WET`GxZb7r#hX>GJkQ+eA}}W<K2Q*<5_~ig
z-)5^!n{ltpR~!-@+BMp(k@k5aB|(+()|c`xDdP6>i2FGaqCb9*jwu`Z<yy>+I6Wwp
z&84hQ<+op}vmKs_FuwCGSPQ^qm%(uvc1py%f){X4uuq&Hz$1fN`R!>wcdZ1|xlDRc
z@cmkKsmzL}m+IZhX{F0)E*lqUvwMI~mWYo(n)g>h_T^6XR}XsYfM%Aqej&tRCDXWZ
zU=DoR>2v}6;nK=paC>Q?iL3E5P2;{s^EEo{p30-!{PEI{7QQ4XyA1hHLZufN4^H!!
zhP+1%|G4an)ru>8PWGAdp>y`LUjmeK_4WP+e8`{;idsXcL2V?l65W+3-|`30nXSLr
zNVO+FuK?{jh3yQ4K|9Cv0jtiCxRhAR8TBEEWTzz?`o2$jvZ^ms6q}Me0}k0qz4W2O
z7KU}3h1xd=;RaBUQeT2-Y2`(++M56wY(8o+s+F$hfWE!OP7hgQzB*4gpON!S*`u)W
z=ZMJf`^q=zT#>6@_xckv1k0v2H%2dp6u*@+%Rd;OE;qa`ob%)7|2cT`{|+j(9UAoh
zrd+RE*&QM_=mt1@;6gWRqS}0P8iX&z@m|VerPs}{*=1%9Q*}5-zPHuOcbGw9dv0zK
zG+dthU}eZ6(N;0hqah5`ffR*8GQAM2iq=Y0U|u_ZTg%m+i@a_s{i8bdYazcNssSJI
zIm+gj+E}y&6unR?k?rYm`?Ia{&Nt%rBDGG{O#dM&UQhNy?>hEl_^^UI+dzBlvZU_7
z$PrKyy?6_A4t&T->M*r`YO_6m&Z|k7=f%9@WrvK_*K+{d72r4l)YHUwTFOnAsKt+W
z@cp;CJNdJGWJqfQT-BH_*UXbp9%&+>4A-SB8qez|VI$|s4iQ7!irB^&gTM=aZxzt#
zYkYLG>*Z_T7#+PYQ&h<;Q7mUhO*Y(_S$-YzWYsn~!s3g758$#ZKqYX;JR8yty^%le
zsfDPOo!3vc02$fi>Kbh+9q+E}M9V#-&>E0Z@x?Np68FyF0*y;yoO~4q91(E4u_7kr
z@U>QvnMJsZwH6zT;N8#K9T~Rce5A^tEm(-Mb1ruK_bCrQCUXaLp_=blUx$=hDeVM#
zyHD`G^dq~et_HE_@!c7R{WT7g;6`}Q9GN%aoC}AMz~@WvZ<q>y^fz3qNNEXFaGi#*
zB3D1&@;&QrbnTENb8NkPP~JbisVG}IKmUAg_Zho9ci#r3$D@646u8!P_iTA&#-|4)
z_hFkOCK9z_>$)>9*pJGNpsGGh^}e+>5M?{7oKSC%ZV@G1CnC}0Y7O|Z0F^5MRagi2
z!|kUSpN#5xm2vvot@mLbltuzJQTOR;SOk)E)uq69kbf|)G|F}9oy%`pN~e`njR~&s
zltGMd$JZ$<eXE~_kUp15q|6mS;2ihGM_R#w`khU@R-a&egy{ZAkc5-!9mE0GfexB;
zd*Iv|rDLCkewUQ{mDN!_|7&{@fSilucZsLI7Jt&WSq~p8^D6)i&b0`qhz-iy=3$xG
zJB}O>Em|=zbx0G6q98kLshsfSD}SS}?Ph#;Ss<57aI;TaDfdDqMp9AMmz$f*#f0)s
z(8lBJAMUe2ZCnG9ea{KAU?w<3($@zS!g2uq!p)^<@n+6`syFXEZdg;*j5~M0xr0mu
zpb4M2AUKXxIZjguZJoT$_wqzKomlXsL+p+Vo@R`WU=XkJSvi#+Tuxh#lpqOg`C?C=
zy-j^8N-O=dm+wym?g{Z*;e^3TLjM(@X00@P<M7FhYB}%ea0-_V@4LD*t!wQamFm%d
z{7Z`WgD&HZ<Q4!(^azf*^wMShbn_s=w?GM2Ju9APBG$WLFdLXA-9)|2ncCNLkh~8D
zp=$kMGtBq8O(MU{?7xe@tOzLl`rsb^Nu~y8pLXAAqx0fa&ybP68sslTm`Ps^z@%>h
z|4si%RREt45t#NhM+K%fv^FZ`CmsGYeMU9^U?et)6=<LdV0ff3S$h4Zmxz*@<Loq_
zwo##_W2UU13WJ5-m>!lNyvtk~V2{1?@rQ46e>)ZKT5_6m-f4d?@@9<->yUxwf^x+x
z8->j*y(wwIgMCZ8OD;$shsKT{rnp%Nb=g;|GcX(CSF||57Bx3$3|f7p+ez|>CLt;z
zUJGuJ#vLX)ehxaC^7`6=cAs=a85h?VZV_B|@Rvdz$c{G-2Axeq;!w3i6E*4keg(b3
zYHg(qf(JcjVcV~%Glx7y6cepEi;4ted7oYwpQDdhV{%Gw{dzNXkTh)Cla!b#ca5W@
z@n4nFu>$(BRwDWz90plvMDCQLTS%W^oeJ$!W@57Kkx1vFym&Ev)_ZKmG0zml;yrX#
zEwi$+%tK1QPZ<f9_7TETzoR$b2sfT&<{%6=Hxt}?ca7JcN5yuxO5?MjOFtFu!uP*~
z?Q-Y0Wh;lB`DcBP*K$<J`<xu77`Q5#amYlA{F_D@UbD|?UpE8$BI)cUu6RQB?e(m-
zK!mZ%WJ-v|_ql53WSx{-G~R){x$<rep`6mOD%6XOTdmI$4H|c??};Y<gcHT=Qa0@8
zGcd#PC4wt}qdJ23)=aKm@vHW9Xze$Tw~v6UzgCtBX3%DeeYkMNAcbm$g{eu-3HXCW
z>OfT6c#nZ+NjWcSEw=qa^qBILDENaNI?!!_=>6%1={J+pQD==JR@TkKIx2{_3CiCI
z+aIEMIG2=W@72uqIiIlQ*Z->jz*VW4&-0Ly+*I#&Fx743_Vhcar?UCG_f-5VN-y>x
zR^M$WtF|(dSTcDv@ZNWcddHajwY=Kq<aU&8WwNI2^IhM+rL<9Q8nqf~oL_eQ7k36O
zt$dNErU$Uo#kOeMW2ZhUIg|HtoBlu9+h5PFNcg?sb^|Fht(KlFpRy7=T<yzuTARGs
z(JFfrD~To{fmZF=(K#1$(aL75UwiR8m=CB*KhS(_5^HL*<MmbinbtkOJ6}7)Nt5hj
z%banP$Gv1BL14bl49+05u!2#nxMhMlj_8E&114zv#UMGBu`Z4eR)=)TcorpBAx>+l
zc;!)W+~UuD9Lqo_nA%fQS>aOSeBr^L9OKXd%VZ6mZHkRz|2B(!y$gF%AZ6<PCwK)k
zJ@(!eAiP%TZ9-cbMTj$b=SNhQPFRZ;6D(=jQ@G;SM#t@wO>o}=pUsA^g}m5)>Jy)p
zL@Nb#lg+^tkrYc)Sy3k^M<@sjW%o^>R?hAk=H;@LFV+}IK657=8(K&P(%0I<M}RjW
z3kXIpqKT_9wR^mxVldW!XA$T(nLb<Yu6PCL+2_P&#-6T620WGi*%k%Xj63d`GhoeD
zDLtTd9U~RYFW{szaUDq(y#i<_;OPI}Z>4*eB-;rp<kfWm_DOc)Rhn8u+epDLm_&3*
z;@X1)6)Id!vCB&%^mdo7U0K|9Ryg{iuI6I`_q2+J%gOo2k5JWog%tfX^RIfKxPi)b
z6}^cG<0E-^o{FDC=l~kgrH^P+2d3^@;h1v}vC*z$)DQ{1L!f4@-mDBNCT6D|B3|i{
z84&(1!>fnlfdWpoHmnHR_NbNJl=0Nr>|+0$3=8c3E^^)Y0<=EFyo5Zs3>R_G{lW&v
z3?c9mi$OaEm=A(sJtn1)N}Vf!CITHe8%W&cM6gf?!P`{_D!qN2IW^%olyFC+Lk5k+
zsg#ZtXT{S$J9rkI70W~=g?;XXx)2?L#VG5G&Uhm8ShuF<d8w<YW)9McKpdivZx@dB
zTl{<l*Z?1}5*}Xx&>L5PuuQBT;mZ}^7IcM|Akiw2Q%U6fbQk#Of)`5$?L`vqfaswk
z#n2@W)#XUSb>}5-wWDKK=29OT@+OjFRktCBO$0_C9tQe*2|7K=a)&AQa@oP>yf+t*
zhXNSYJPi*<zAo*}%Wcyi+P6a(-ZGkKJB@+YQZGacA@jU6wM6A4?)#&^{AE*eA{wf#
zmO!&DPx=|o->t&VjY5&0)US`#TQuj6Vwih{U@prOpfx(?9Teo7!XU>*H%nqliZXcl
z9z-KG9AhV%h9AD02Zo6ugnN#FW;^g76XnUyM1=Z|ds4jIjWU(+rHR^enS3XoM&{o1
zXHAXOshW2+1?>Oc`m?I3GUUU<gfDHq>-+GTqDG{w1H*kl%-&<~@5i0l2<?kO$}v`Q
z{ru?P!>Vrau%!v0%(_$-jrEW8cJ#93fm+X@2QGGOOB8(t2nAvc@J`QLG+V_sAI@Z^
z%MH0(E2oF|R54Z_Y$k0_wf@AqALu>8*8h;m$6OlLPdNX&aTBe{d0BPm{2Rvi_FE5W
z1^ARaIM%~bLRTTsbwL~@>FqQ;;x*q@(+JD7!||rMjq9=N%ze~y;zi^hdcWTw;*6J!
zYed}f$A3SICS&0TKI&LCRAh@@SAAWwn`!&_fIw%C{Ex?zD>m)$qYY8*2X_B9R__aT
z5L}Or(x9xIx#^+u3v{F;wS$Mn_6L6boaB{H=A=Tz%1X%99vqVsfkxcD$YKHe^z=>h
z*Z#fG?bqRZ5u|_0;dYSRVP*Oy%1|CD;uI$;bHPPU;F{Cyk0`_!Rylvy5U|wi*Eg*j
zrdWSjO$jSeUHn|V*yZI0?agtcR_<%d5BAGtZK<%rVJVA$aimc=_2O8Mw$-ZD!YK=I
zVG3-1E?s}ZMK<us#}+fcii(Onhk`UaN&ev;vB92X{&+3rct;Ll_*8sdI4-*B3zPmM
z@{hc2lC$GzpPds7(Wl!Era4)y8QstGGZw6iL6x&3=~q%os5pz+y7$zn`LV0ap+Yno
z?DTe)*Yy{MEG#%t2gW}WP7?)}#Vay1zw4_pH+=f%Fa`Z2^ek2iMIoGlEQ^~6TJogK
ziX^JEGkt`ej(I?a*vB}wRL*#&8xcKGpjglz5XUfpN`_&^Pq{G~rz&Y?bK6s4uUZrA
z>Al1TUnyl_WBc_9{o(;Iw)GQv=(UnjP_a-yylTFFJ?C<yY-uhSc5}HU$<ERJ_3&(q
zfzCeE{ao_W8q$>|hL31=F5Kj0<R9O+n%s>ZO2S7fna`2m)(r74rF;Uq9TQd9;zu4x
zgc-?Ku%p?&>*)0moDeFq&8HaqaACLhQOG@Kfst<p;`4K2O5r=)e#4KjA}u^z-DnxE
zx)>p*as%ys9WQV1BNM31Fj|>~S7NXf^|Rs_y;F-&^MVR_9RgoMmx2-G&gBqxUqny+
z%V%D6IxjiIYswz`E`4P%y>>zAXQ<k0aPi^n(p>Sx0Fr@R;%#%C89__#968o4QU;ip
z+9%&V_hJ`ArQkXIKGCG|DS*h0jT58p>t`(4oXVU6@C`MwvwEoKJ|4A6VNss7)a4HP
z3i$E2U(#;ReTM;JbjP&gHT!Udu^zNuTrZvvztGM|GqXG}(E<;QI9<R>F)V?f;Gu;{
zUGftVuI6UCXWmDL3-dc4t=N>Oz|meUd>xQm6L1{V-Nq@$%v?ETY=N0&lZw(W!H3g5
zBV?*k?$Po(07>*eV?aRqp<=>sRYk%u$h-cMu0t63yZ#Rg@R2n1IyRsx%KcZPyq`{z
zu732TMPA^rgUKr{?)T6522t$V^E(v~1)SqV4Y%DgYeWTsWwyL9w8HCSv1gI7sfm)I
z4BID@RrR4}9u4g^;T%M9ahqXquuoN86RiJ4a7$~beE#?^Q6f}bLs%o0;umgUrXnxd
zu^)^vYEd`06h|{_oiam4eq|}FJXi9X$yOizLz<Pheq??sj*mV;%sxe141YmI{`HI$
ze>l7Aa}o6?)nJ;2+?a}LZbz$q#jWf^7o>&I53QCB+eyNBjI<HBC*WT8KhktkB`S=K
z`uIPy%tW?JlF}r^8iLcXVX0ONJo3U5_<d;?kxSG4@oJqlNbGNRmph>3helTbuXl-=
z^^LP|&&A$jE!2W^3p5kN$4A-ny2!=4XAlt$(V_MP8fW;Y9`>x(S8l!>OkIJ<LY^Hl
zg&4EDX*w>5wQIs(zV4~n<K7yKtZgZdYDx@QsEe#__`Ez=25}|KBC1(;xzXU8PX1Hb
zW%F2g$HVFu?ha1*mfu^4T-XwMZAj!4ivha~QZs%kdYzz|=<Y({1cNN6uYeg2ki?_o
zzTH0f;zNV7$3)>if)%F3G3p}G5?O?M`1IqCPI`=D1v<MADPLJnGRH~;g+EKu)!N`<
z#Jw8XY*y$>L@$SSsY{vP6R;%)hd^PAb2wzUyk%ZuC7Xk`o)60&0la#~n=X4if@4fL
zZJSV~40uw539V#G3k$`8iXOjb9PDsFGRQ~0VB0X-d$~O)%oi7Wq=t`d6{~vv6YGPX
zGpp+v&b%!K{mXXV)3^f4A@sn0*2m&Z4>~%079L`TYf<x*T8Nm7y><PXhb!T)H2Gew
zc1U>}KmYV-PS5d@cKp_-PLzmxt>&-U#zikDl}lcsWYod00AmG%&j|>-y5Y0UIsB7U
zj|T1%ky;UFR~zH+&eoy_znWkrM$z9B_0{yhgN9+XJvhed4x+(WSIg3X+Md85!drJI
z2fcIbYsq(PqofHKZ#NGON_h)}+Ng#6bKCcHY9^n{CAatQccL7umo}%OM-Se$5D^&9
z(c4%%WmS#?-F$cS1kYO#=oY`X;VszwcaYd8jGU`Ay*yt8k#3=mawilp%GHxT{2`-5
zV|mC<8vFgg4$&Lznr*byWLzQOy4oZ7?#U&S<gP3#+zWcGyqQOOblLV(?-Pk!dZAwL
zo!JI;AwV_hr`Z%DgehEpePM)CsoessK>)`c`PI`KWJ(0^I}3~xwJhKuuAw3fP2Ub>
z3=nRr4MCfHDY5@x*|q+tK6&?6(#NFUb#``cb-ylqoD#;RG8gv|LkKH!sD51DQ9d&(
zn>6u0s_MM=n&R?pJND&#NG#E}psh+b8(@V0Ui)!h99n}D1lE@G%ah%`0#rN+r2QD9
zR3(%jXla#Ec0=$BOG4CHV4c<luUt^Sr3}4U`Ap(NFl-Q4K{QO}m95Q{Ha*XA0~*4A
z+xFQ0A;>CiOJ{IHEX}2wBlP;JlhZ5R;%wzm(_>Sid#edi0}glkG=8@ku-(6I`nu@e
z%5l+}938tzzXz54ZDcIWjbPD<<!Eqmcp__WWRm!{0(LIj=r}BCG_Dd5tI~s6dzfXF
zJe4H!$0sUh2YFstt-D#yVUH&xqDr3o;e;EJV2sM2*0B|Lrs(LZ>Hf!e*~f(j5=g??
zMP^^+-e)g8InZc>#hC541KA6zm(n+!U03u34>|LX#Jq|kuUR!lVcNoZrMjs=v@xx=
zkT6J3D+k7BcagsXRUe*<iD|H53j}qTMLDa?e;wxDofTbg`}K*squ>cb-tG$Up!w~E
z=(`M|uf7D9NO(v(mh<&7e6(&XQAtm|#sG8%ySRhm9RrD@OK3<to%Orb0<=`M>$c*X
zqAq)kPTajm-I+i$tz1M+#0u;9=iUK0{G{m=V7D{c^S<oEM*Gv)_v;HyvGkBO7;O!&
zc;Tqfgl#O_$EIm>_Y;w_5=go`IQ~sD4f?%tOG%*~&JaQPmVb^LnW}_Hyp{iWsp=0y
zMI(+28IStQ9A;iDz$H`iX67t3fuU>zuwHtbbI|5m5QewSk_C+YhBbUt@46q!Hhp2U
ze^c-I*<PSiaeZCP@Ez_v2PT`GoG+vp=9BNh+%7*;bTwIX?u&r~kJy!fHKFbjo#(35
zh=UiBZE6I&HI{@+RqVF87l&&IP8-#zFrNQxJGQx>a@#^8^^zgr<)oVxrtIKeo1jyZ
z?@#(_O?p%ma$}zP_`7TFo*w-&dTU@j9+>gg*9&JixQgRak=-WlZTFsGX>^IB<P)IT
z0+^e1U<qLS3Qt_lg57p4cbOs1ZfRxvgS8`bOs8wsSFt^*lfy58ZE7El0%%bY+GEB7
z+dzUEUw<ij0jhslaqBqtye`0$C{~F)xhZEUILe@lvQGMh)8y^1Ao_&K8YZF~Z4bVL
zTPs{^>!tj=&knCFs@U+AY<FLz6WTU_=Ee0%GX07%2k8*z!nR_GL9HmHVi^9$Lnv0#
zm7ri;NbITQcH}*lR*|txC6?MaEgT7OxOFHeqnQn*Ab!6bI;Rc8eM^Z@R<G|Ax&pX7
zCE6pZcPF4r-xuzO!*<F)c6^9BSZh9DklN++DO1rQP>&vPbn;dUe)WsG0u;A&yN~ae
zoLm7;(5>e=znrj~9n286iP*d=z&ZbLqougrG2z}5OQKOF7(4vJ?o<E^%XvG3@>|y(
za{WLyy*d3li;$GQGyn}J#(^Ru3_+t0TyNQ*-9He9=_OhHdgNj7$cW-;J+kgG+Ylsq
z)bd#~#Q<i~1}xrVoG;&0aQ=3DFYTpWAupR$(q?~*618p~N3~fVGIK6&8BvYcO+g`I
zSTL`)40>{~ajhnmnV1y1ZPF}Wh08)(g{7chm&|W{V}C31M)A#}%XyzqsQZw9aE$*J
z_}J>3FISAZ2qC>dEpOKEp{0Ek*Qdkg^uQo%Hx<B=Hyl}I#-7CxpZk3(+N5>PdTEs)
zY^ep56@_~7roBu0$rce)L6K2hc*?n^GiIW@@B%!POjJrvW5xb^K@rVoL}$PcPrV>j
z>Ip9}^VY+ez*ZP_+S;1Up}C}sVs-o@BS5?+fa{Y_hgFI7mU@zapA3vOGCw$9WHR4A
zM=us!!xDnwY%`02FVm?{bua3E9&lIgS*f?|3>uK)Cx0B@`H6&|6dQLL0Bw6hWalhp
zm*L;d_<4r~n%eKRwdqm}i$9fLd<Y4ivo+lNaRQVY#y3G-=3P(Q51RPJKNMo0>ZQW;
z#^(us>2ar#QCtu57=wBrxk>BD_|ieu++4}VdY3BYll(`B2=f&n<QyV&1mbY-dQZsh
zKrM=s6TM{6-$WRP7*sBLEOYexe|NqVi|wpUxa0=6-xJC*nThMp&m8$d8t<rxHzUq(
z_Dx<I%Jz4GH_o1>qw^=m`-(N(m>eHYp{8v#<vgr~OLlCQB||p=7;Km#p{-4ab!Tp8
z*Y>==JSDS9qOhT(LGqOFP+reIJJ96{z+X{;gU?|*PBn-v+vC*n1<F2FH)?-utFmAU
zzw=zanh@uiH@OY5a;XBm`*zIv7;xB00<Pn^g((jU3b<EbpT!d7>^a0UCd#P^%Ddkg
zbW3B@h`zn?)$zs~cB>|?)mZIwo6~Y)<IMs0@EPw<qD9*|;(Za_EW!spg_|XoGfFW*
z?;1!4UKZ|?i*-1Vk%GhCrB2!Qu!S4ss?~DmavLB_fs@mc&3L3ppY$+!hi<ulpMvW>
z`}sySNW?C_X8?h*lw=Q3NdRF^zvMWiy}tr5#2YtGcnLF6?}|0oQIqW;0yC-<L;KWX
z`K~=~I?AcV1unOkqr;-T^ltg?omyzTnyRqeJ6wBuq{Eky5cHe-6WhNN8YXIx?Qg5#
zzrQ$qHRObtOOHOzR_(;LRqr4#owyBIe&pmG>(uL<xt8uRAD`hEfGE}oRRO}^8J29!
zwvnPr{TFsG6-yMDPWHze_<KeyZs**tDpY&|5V%G^Q-}iLm}O8m5-?|{k@fW$U{^1n
zjs<DwwYk8Ut({LL!Oz&E1#VM(!K8{-Mv`~(Vzi*sVQAK!Wun1Gh71567(Qzq9fF5e
zKqen9N{?*`rqSQaN%e?fZ;uOsB?Yz7QjV-O9Iq!^Z-NSUO1q`cY)jl{(<IgSPiHl9
zEeIj^daY<jC=I^oIXy9mZQ<rf##Nd9s==L@70w<XRyDgXwaB4?A+oOY*zUiU4vO=6
zb#mWWMAy3RGP%d~%G>m=criD*7xZ0AMkV@im$X1tpX5dwgiP}OIk{IXL|x8~J0pw}
zyDEs!xB}SIGh+q!OkF6$_GhRB<DN8QZt}3R-5{}_=ZsOPxPc(fyrpJnwUKuU(G^~Z
zO4lN_xRvYV%3-SmFq;AW?&YWE6Z1W{wv-=PCx^atTeGjuXp9jg9GRqGlVcAi{T@Ii
z+<A-+-*?xsENuD2Vl7KCqvt<}D{EF+=GdG-b<0*b1)bUV`jL7Ho2v=06F>d$*g?=n
zXWUm-3Iq_i^l2thj)xTsvpS+iHI5f52$5SzFun2IX74-WnY^IwHx5@(gQ~ih%??+x
z67~skj2AS)KazFA>gw070AqQEZ;VilV?6#7XHt6>9YsZ=YlaQS46&k>Ca-XHJuZ`$
zYpQbe-SFY`@tE8=<M7_LeT$n(!w%hD>>!WJh|sde9cy~WVB?vE?)u34@uKrW2lU;g
zgH0)_NnR-)-#$)0xlcSyfbr1~H@5T0rR6A=_H)s2ne?}v(W^CSd|qTV)96s0oFg*W
z@cP2o0hx^xL9ug9I<Vf3*o>6l>RuswRvbP4K^{Er2Ti9H7|_lEnw#+jwzz1>Mxtr$
zf$p~l#n%cb&b?Pk;(uQwSyu_-E5h)0>CpukRDOhoaS-Avp0am__HPPG@oHSxG8_+y
zz)cdDb38V;TU8dja|UH-mI_1-Dd|OR&VVv`4Aum^UGG1aj+bHWyRNApG{F!c+0G+9
z*-V0_2?Vt%Mz19mo7HVpi04c7woY?D-lM0?hBtP&MM?Q;8b|BAC_Q$^0l7^N*<u8f
zIhhqb3VnsvV9VI|kjZ(hoF?LcytO<q-=W&EuEd09T0E}e4rMUI6FK1vj(#k1NsAyH
zaQgrl_Mi`4*MNL55GkCW$Pm{YS5^G}LsVP_sCTid2gmGHn`t)w6rZ0{UCU_w0O>r@
z9n?r)^Ni#=z0YgmWf2O^OOLTV(aTqbmB|V}Qpw`_I4QK4qCySiZ--<c2bSIB%L>Yj
zZQs>>EOc3138Y|T4;w75*P2(E8d-zL1UTVg&E}V?z6qBy=&S_UkY2s%buuky?;rS$
zd~i||ms<y)8)d|@=;?9~1TYE_^0i)9KOm$#lsfplp?2P5^oUoyHUEm|DeX;v4lLy<
zQI!aO?F76rG}VM5-BrCkO5$fbPCpcA{e74bcKn4ahJp}6Hvn3&<ucY_cYKxpB#<z0
z1(1X-{=v?Tgl?f+4e9qa>WvA|sj%J}!-6lntM+@$@y`TIU%e^je#{;Hu=UsZr;1V(
ztP^jCs*mz)`S`oBYb^vV&Z3s0H&H%&<Fgxs?%?-xKQd+kHtBDyNjW+B@{^HKlj9PZ
zS_bin7HL2);8HpaW7xuZI`{|Kv3OIhAm~8mMWmd+uk*L9h}_g&X)QidaULzpT+yLG
zEASvPSlPV@YuIV#Ob98qaPTikQPeARysvHGM9s9SX*E1}!rj1!O+q*lwK6cq(42Rc
zwPO(zfit-80A}>GmwWvipjrDp1u*wWkz&JAYFsI0RUW)fP*?&bDcJnH)xy2XyK)33
z25gT7$^x^o^sy%Ib3Y9VIL@l;?Vbbop_`VCyp}}F?%e(2=l}U*4sU#~0Nn?^GCFfj
zZ=2HZvW*Zv2Zn8%8RUzMy$o)8XI%P}p3)$<N)7xKddmu9nJUu0d0NdLko8SM!J80_
zpaHr9XJgIqRRIB~jjS_En9wA#_`lQ9U3>k(W?M@)%4?(mm<YX6#uMU9<|JIFbH`*&
zo&C-twfcZAhxEl12JAu6AlecmGE#zB6@$j@+cRz+Qa#YN+gAWv3!zS4YULNp^x<i5
zeR8|*I$xi(dtF>LTenn*_w;y~g=oBOBLu8<D*$PH!AK8Px;1AeDmCpPH&@cPAdFoN
zEiNKG8lvsPqacr+@+v4iGv(<~d&CaVo*uBdNTd1?TiB7sFri*ru+f+}m3;R4otioy
zB^Lw2Gp<cU6aDJ~ACaUv_1RJ5T>@C<V5VlCy+5N~chQZ=i)<1#rkDRK2T~J_fn9va
ziR5e~y1DMdYn=kD;giGlDfl<YkrQJbl?T-kuak%G%d)@L*Xp<FyAW8x{q}uU!Z623
z5RPUEL9<bBxH&|TS=3e7EwNj{M9Ng|9xEGjV$oSCMsrP97ow!H{Qkz%h){+ckA!;J
z*8V_nW^KJ|+^HWu>GBay?7(xEt@LnlS9TNv+CKtPly2-}`9peP7=8NCwMiYh8A%d=
z-RdRz@bdY)Yz!-o0k({cZPu4aq3poxXk9*t=0f`C#m$)Ygai-p$?dEAvxTe|EvMlH
ziOZR&cD0Gs!2eDGv04zm5nDL!X{MP#W4(m4XVG_O``t5gu|aA&K4E#KN2K0+5K(kl
zJ$1@dPD$~wySsj*9{PpFtS$0T4yDRCc#!&qV>l|CN?Y0^-U;O}4ffeQQccH}VlHOU
zBOSn-zF^a=t*P-&Mb8V*>RHvkY*U-J5@FSJ)7-z-?;>s!9zfgZAX2!8F@*i!&e^_5
z-5NT6>-MF#k7ABQ4#fIB;>2Et@96>FW`K6DnI)VJi>L*G>L9h)tjArx3tVfjOfue%
zJCv5It`Xx`C2pf==@||On9>prwF>R*&I-?X2S5L5X)fn(uhRTnDoV@DSHxFYfLwrL
zOu>H{%`m(@m$5tnY?!IiBCK84Xe}Wh53}Vz*i30d1iU?*ICqdRc{CJNt*LY!8}_@g
zF7pR=QN8s5k|7%{*ndI2|DLu$vi?z(sMd{x{{5E&zdU5p2;cfY_$zeZVC(0Yx12ZJ
zW?c`YU&~k&Do?Pn3+c2+?*_g{YDo&#GX7o=X~9TFdj@{!2$m_bNB&tf3wyC;8=>ae
zf5w^UzJSOV=Fa=VVf=USN7Ar2OdGq8D%@$g6eBF~NML$8U4u_Ayjs@7Xnj}}B@$L0
zwngt4ayYWwkvY6i>fucPrzSE8Y}@x9l(<41ELWZbI<7rmUlD^sgX^#RwTI-KB#9x$
zGG|bFtVf*&?X&q;6NUOcNnP^~;PB+r`Z#7#(|G){fGP-2KUKy<{{XbUSzCF*sI5J?
ztjZstIMyoUD{72Eb}CWT((#$rIhi;4DQZS3-Rk8FX7ivTyQv0YC(J=BLz^yHaTe(s
zQ&Ua|`YqZcd;e@w=VL%@QuvZ~dZOaNFcqh%7XGra*1QOVyY=S+^qnvYtcHgI9bloz
z1qN)B)oF|B`dW<99T%~Dz?U@2+dE!;ZEa)>R3e%{r*<x5Ro;q;lYGgq(A|L{h^X#R
zfFv6xLq0gzZLADJbbi+%gs{LgU{;#ww&V<)KuvNWw?uZ97W+xuyvLF?yt;fi<MYC7
zw*vkIzA!A4z;n>8yO4DFAv?Oi_j*;vO?4_+JoTqROhZ1<=}S>QuS8a<CoUCt@ZI)%
zSp~eZ%g{Xq2G$_PZsM;KR~5<h95AX?@U@17xOLUf7)|0i#+{=?*Wdgh1afeG%6}+l
z@PU2oZCG@<Cp^kh;g8E6U&OlU*pJ5Ghi_=RsoCzoKDn)zapQN^93&Tn%J9$U+A0k+
za=rq5f={jQy(U{$PL8+Wf?olGfMZ>XrT?jjdAvQGi<vLsOq@&I6J50)DK6&BX)(!s
z2;`=}TQ=cyuR&(c<fpeCf6}P>rZg#icF7TK;aEj))1O$gVr02zSH_r{KiuP2f@<yC
zC<YPHq(Q&q@}(1nEz!<G#be+X6R){Eo)Z`SL|M~rj>fygT6dGw?swoFJGbi$_lKU!
z_pddghZV}NyWC%khsmD@ofMe#qT{QHA${K?Aa^jkVUeEZnQ~n$CXF#L-%y3!3Tp;6
z=?kKRo%s;9OJosQ0VXBF(;Tm}<n$WrV;UisCd$35k~#iKGm26vN6eQ@%6MeHE6`od
zd+CT)9W3F<=roXfVlI=y9Go2;D{e1z4p%2eeT@Svg>XzlFY#HlJe=5X^&Isf+{5h%
zmElBP4{I{qXhL6XotC9X`B6{Q@lhVO!=yI;4yn22i(65&RQ(N{Zi+T+Wi$9#=FhpQ
z`lnvGzKD(?KUlI=rLh~c-=>l*_FU$40EO(l0?^~kF%xDz9nNMWug3G2bX?B=LL0Z-
zdKY(oQqI(u9_XQ9UHgmy97s+BEVz7eqdq44t&`vVYGDPMyIiGFN1<e(#l=bu91~Oz
z+w|@MZ>-#mODy_x9A9k%UvK>+sT+k}fBfE3#Utkq`z^|Kg?bv$Zy}S&_24VO@0615
zt=5&m1=CI=_j1FM20JWJg8=nAl*2J8Y1WSd-@`dO3Lc3se~kPX`VufO;6xiU2lbmz
zM`roKS>T1hr9xgZWpH)?1Q}pg;V?FYDu1WrRXcI#JSX=3t!}<=H*DT#Z-1Is_v;z-
zx}eZEk##eAZ433jcaaA_QssM4DDtYYM0?=~DmWiZ4BFOQy1vm&vRtm$wjeYv3%u^=
z!=YNAH(b5h7$xoJVb)tHnoQV71|?)sGY^mI%ZtRh$B(Ay<`ws8OB$ZJ5~zuxy5#JY
z_*xu^M8a}{Ye9e(X2;6Q{ATN`mMHF=EoNZ5-i?qwsU7wYCoiC~R{)AuF`~Qhy6SCv
zf&kW`h`1r=K1zKzzGqnW;~K35FiIL1q_<!q>~THllBlzi;qi?RfU~frLI0`vN(X|#
z*s1w;<IDHyTD_>{5kdC*+H#c!9tDU@>`@4hj{dS+9KJKF^qi9j#eg+NH;KUFD2}9i
zK&jxTd>@1$s)5@i%9xU^+I?4FqGfkvbG-DwV<!LoXIrB4!h7t6_coXQJ)d{-#8Y)?
zaumCz_k7bky$LL1Z<{9c`kO`QTll>7!K~1>XF!J`*QLVWDz04%-1i?gi2a#<Rnylh
z`Y!IE$@OV1={I`L%5Q#@vqY4^LD{Jy^=8#{TT=bgP5km6^f!ogc|TN@7u)ayYJ<}m
zdw)`Xe~7!SuHi+m<-@r`Uj6ZGvXF0@K=*Gb2iuc7=8QbfhE4<Z=dknxK`YRMs12+9
z{;~-$40;GuChBwlw4O+W<E|&}5xzuLT(?JOil6nP?fWgW{%=(8vu=k+0W^pITr09D
zw^-lZz4Vxc44KgaDG|2h^<yhzJITWrg!!+E1~$BQLDewj7)t{!i@AW6#7htw5re`S
zW2!qX`J2@21Aqmu!QcCSLq#6Cg=`tocO$MVKZRRh3&E`uC!1Momj=J$x6mL3hqvBh
zgL=jMX?VliUh=t&V!Be2w%T()!Pg;~@0-qcv(GL4vpeK{QJC1XTFD_`4RX_Gwmwih
zW=%u<j@Y1wmID76F&^+6#QQ(v0sqAWssWf62_vwoD?kj?5y<>74rBPZVX`2^q48I5
z#8jWwZ-)X8W<>@HpNHc$3{vL6D)5feGH{l$r*acW_x^3HZm=3*Y;x`C>5GRl6HV?7
zy~BSe_FUi4(9i8puP<qYLG4Du%<R5E*k5(o=5I;~^9wV~>wMyH_)gNQ!6$&YK>@%*
zB$h=2RG76R49hM&BlbmoHfr{_&y?@9CHY3UlzXGpyy~g~b0PDVEiLj_{}*5H9o0nK
zc8j7Q*botrPE?w71?eRs0s=xrO6XA$5s=;?BuWP<(xpZ~q!a0#NEZ;0-lX@Q0D(Y?
zcl^Hl-gC~k&boh0W-V9?W+pSwZ}0u=XAc!YFn=cy{Azw`V6bgEuQTbm+0$J-yLfgw
zlo6tsare=W`-aihPd`ajWn5iq?v14C`LPchl>1|2(bj~Mo?RE1IYkZCaj}RtGF+9u
z@K&8K1wT<&5HfQG20R<BuaQplw!qMb&0;_2LULV$W^|Lf|8bD88rQI@^>xLWcagn~
ztH4RP-F&FP6_Cq}Fk8m>k?euxkqCTDgKOr}sk~mkTy%Kim9cm!YfTSK$8z+CE_t}$
zJ<?;l3mfTXSDf#NUiFIoyiDpXGe|HjRGoIS7b@jwIX<j-dx~0`-*v@FEu$mA)h)t4
zGaVdxOJ$>fskH8?U0l8+_iBdg#e%JURF<w6sv?ZU3<4yp37m8LS?7GDl0hlKSuaEE
zILx)EpTBghoYT@+(CtH8yncE6xf$t&QRtDb1)9z*s9ifTARSmBj1P7X!0DTBKKDwG
z3$WzvGBxkbU{J_Z@)Z&jxsk$1LjD$WMt}(W?Kuvg+iOq(0W6s$8a%W%Dd;ti<T-=M
zxIjn@^7}*^Y`R-L9vR+p<2Rmt|9zZv`h_PF=`rk1Iqh=CuX_E1V4>g&!JT<uqB-R^
zg@+K3z4h$v2?q{EmU(MtSZm_Ak{O|XD<eFdnm1k^Ju5LOajf|97(}?5b)^=^{g*0Y
zc0X@aprJ0dCr0+uEg>4k^xoJNI;Z7iTj!-8B-Y3SQCT{}D4>oB5A<vFs~FEUZS{Fp
zSA)O!=%{qM=E<vp0zZB!%h*d>(^7O(Y|c9QeVVeUNn(6J0eY>PAx*AW$dXL49bo$`
zcK3Yg>!dRCfHyg4FudqVd_WhibX&sc#GEH~umhMcunnbfP^Qvq-@`Q*ak3wd(igv4
z_yo?a!+-w~xsYsqO{PjnVD+}%&h3|VpOQ|bqdRM=znRa!mAyVYHQQ0y`aqmVKKBK?
zB`_JHO2aq`4e3D372^G-8piccER%}^9LHdWcuh>(Hh(@ol3i+8@?>l-)=^?;Mx)A_
zKlq8<!r7Sjqv+6LkDi=E);}KTw5Mlw8fQsGExC5b;kCC;Cr|}P#dJaaEID4@_v4=%
zzu|`W6Q3em1zVUYBU$o)se)t8!=Tu4v`Z)Y+wZ){KlY-&^TJw8Wu$fSFE@K;8>0eL
z3(Uz`3%ei@H{r{8EPJw`II5;?)FpYsGM4FHpUYIDa*TLdhI8fq3Y<)oMz+c#*eE?b
ze2(6Wc`^CEWe;?n8bb5E*ZVHZ&Rr}j`k3+JyTvlE93RNFcsM_mohPSYZ0Y{k;iZPU
z)Owvp@B7*B+gE=Ze*di>`?^HkBh})B<=lFyx#t{!$(~_-3xj^Un<a63mD5$<W@l60
z(n;z_#TRUQxb&D#e$%+dU|*`Rn@Bu_iqrC|kjBi*52?2;?|aL?j;y>29?yMQ{{+wS
zVEMhh@RZa|gQDRmP@Z|>D~-WY;|zFLgMr-FNN?*kx=};PtJ%e}YWIUS>@il=V|3Yl
z(uL1m4=B<skh{Gcfeo)*ATBwPLZAMXN`wqzwDx8StSUbir54`)4zB87tu^o@vz>9{
z%Igo4*A<}`U?uY`fZ&RPT~U$P_6sdWZ%9trpQTsTvL>?v&U{|3sw}IgHa$Ij**$z2
zX{0@J3DSn%`PdjYIfAcoAoCC<?Hba$j1`l(jlyofwvrWxJkz9sZkvh+gmibw`B>`m
z=Wg2!;Yox2Qt#3kgWS*}ay58M@v|~i+^V6`K7)_-bDUF-C`l-P-roDQ({;0lhfg#f
z4vf+)^C3KZ!rD1ff~%Qqmzq_GPaCOS1NZ1cA4+O?)zPuj9iYFg$I6P~cYHG9Yo)8e
zYRK0TGqnJZs)B`-Ah=1+Rgv^gTSFfEM<qJ8WmM@;_{cpb=e=ZU$m4t=AQr^<U#d%v
z2s*?NLI^t$SeE7)-P`k~q%%@Y)InH$Nz1YLmPNeQ%%F4!LEBP~fqApS=e`pP8o4Gf
z8dCK}E2{N_Zsa-9nfE_h*INGdGiu;+b(J`DFx<}dia92{YQDnip$4Mg^&01eFs_oB
zhkvQQX|y<n-bmI@)nLxdHqx1G88-GNqRDUl@#WG|PigXvtPH0o8oXhdv7ZWZBG0VX
z0a*9te6#y8gj}SgLlhy|j5N7p$2w|{g<p-m%X(B;YMM;@WIkw0RV>gb>2>SvXkTtE
zJ8%BzUS0c{*HL>0?~(U>cqb=RMCc*x+J^R>+LF2tj_r;V?$s52Rwysk)|33GE8XBw
z1pbPenm7)`&hM&W!=B*s+ZuD-^X!LKa^DM7CA#uS76>}zzGnQXbp6S#tU1hO%t8ua
zC~X3!P+(osDz|+H+hbXJlc#5k5`;J7k96uLl6#|8+^m%Q4l9H}_0v{`j-{K)Ck@ga
zvR>YCqFBh4u$Z@#v`}L%$xo^@E~@BNio{}ChIG^l)FZD1lwyLB!|#=xp~jRrJ0B2G
z7Y0Rye^;cDzBc6%6w88-^wr_1_{IN29r~vf`2sOVz>4<q2QUvpnk1Q)nJe#@f?ibt
zPTD$Ase`1a!AhVOL;r~Z3^RD)n^{)wnmLJYD0?JF{)%_OE4%W+JJO2GM@lK&eiI$8
z4B_>3^j30B+McYFVa+1Pc5v|Vq2}016H~@|O)^K&Go-!MPJq)1a2X6#I!2{A1Mc+d
zWE%T8IxapR))5JQOkF7IbG`|-6t;V^0H*T&v>_|ylYA<9-fn?Sb$$Qoytv#}`dN&~
z;$JEd$qXmasW=Z_qOC?2-E-`X?+MRUBLD1S^vinhS>}bIUjl*3KYtwQ`M#A#u_F%S
z^{uuLWlzWs{8(ptjgfwlddzBABdB?cqAfTggQ=0|08OhcsiZC<!w$@SjOU?s2Rdd9
zl1p;mf9=?~jy;pIIgd(-B<hXNzT35qyt7k&_n~E9IhA#?Qo^3E^no*p3A`vUI@V-Y
z)D`CCv3*|S$;NQq5#<6MdJY6DEyg1+-%edRD139;>}yyY?rkynq}5V0Sy3}}&T305
zpQn2rai{NG=&zgR$-_}XL&jca^^LOLjTIGRjSq-G-vL$#LS6Xhxcs-V^uiw!Ru@4b
zg@Po(tXm`R_`}aT{%YL2sPgHX1)lq6lX({)_)1*fl<WruJ#d7Xyd`Myi#S{jo6H=b
z(<@TMZ6C4^a=iAXcI{rieMzFyACS7wMXwTgD5h;wHExODeeATAci#f_O|K89l9<zS
z8%IZCp6FsNjlK4^1@v8Q&M4WN3m4R^si(5PYGGAM^dvnSv#cRRJL+QsuW))&XJDV&
zG&fBAuvYR~ERm|_H_j)#i2qB~v&u>U*@R+0Dwm=4N5TkS=PB`D5fS>Z)RXyYNQa|_
z?gY>M3FZ~ltOZ+`O3w!h%k%Z)Si%R74>q{`h#z!dfjnVQzX=D<_9&<~>GYl_s5p{r
zx~16~)k!k>959T`0qbp`f5j5)V#8ra@`QxHRMdc{fCFPqv?6Jb<jy!Rk@Z8V$a_5y
zo;~0f(U#z^H=oU8f{Rj8cmd87Cxx|^)cMmf;yjp&h#~_<L8W(KXIP|+b1F%d{`XHs
zn~~D;qf=|Tw9O+3LC87golIHfH-Z<LR`ThM&fA-_ue;G$JUGN}(wUoJ-&i$ff{eWK
zm&!3QRt6I+zsusI;C2KY(@ZYYo}b8yBNOFv48jN}81!MYLB~;ZGlN$X5BC4<KjE-A
zNDz{EkGC<JaFpH^eoEh^<B@#NeKI#$Zu>+%K!%}HkEL5tjhh)zXYv@j^mTAbT+py4
zwcv3dEot%uJ91O3azk5W0Io>1fXm=C=6B{<H}5+Qk^(Btta(~oV3cNWGud$9DQA3W
zgC*lLN!INCzR5=HD{fyWCSO#-X<ws*Iv(W{Y}gUHD5=OkS0C>C4{R6!&-USydMi_0
zWp~%!VwIx+WcD#~_%o<w{?FMp3NTTyts+F&g)=Q4I8FvMYzsMIX7TPjQm1iNT$Xu?
z-}p1@U+HN{ADfC(XOdqzsMn&ADQj>r;~PYO!1IJLl2v94GNn+7I$_ti6gdvcGk)))
z>yG542v3u)6J(d1S++{ATi$5X2~Sr0@SyccB)GU<9kdS}Zp$dvG6-yR<lh>M_T<%)
z@1GKb{X7++T+t+|>HzpqmEgY^{ZLb;qQAg_@i75enSGvZ9@MsHS-yR7cj7R<3U0Zy
z3+ZkVXdZ{#_`4R`rzL*#Pfwk3?_IA4f1!R)Mg75ajM$}>)GgSic{Oux_r&?E;y`mr
ze%w}co>f%M*qnb#VvAA&xHRywcmLzn5XmD*W4jKQA)P3Cf{tq3?M56=9xaNiOjeY~
zIz;l`9`lV3zsqaIqnK9`%$OkXJ5Z=!urQ}>gy>5bWW8t~)AGDSjq-Ln=W~zmctOaL
zzN24W_DEo`@RCp)x@{%ls0H(Z7)?}x)BOg&K?kR42DNZK0yw{+`O}IQ{oX>WYl-jA
zdB02e0h|EUxlM(4Rz^kZ#qhBtbNg)O3k&Byz<PWd4m)&53-*O%W&%vf?t$4?l~@$)
zha$Daf0`waN$T*-3JX`B-wpRaw0L)PtE_usX@bNW*5O!&m<=6UJP3_l+!m33hb+60
zhw@&RhY{!zBX{;)=-gppfScDmp#(*^3k3v{Md2F2_4$30p6~mTgQ`=IU)J&fEA?cc
zB6K5*Rb59eAwa0_F!DEPyVFi?rCg`<3FWyTiXUiakS`<Uc}@Wt<OiWJ8_q&*RH9yj
zMVOUvh)3l2d!;}tgI0g=SJZ`gzu-DQEWG|@K#!_F0PsX!umX@pllg`M4bKH~w36;i
zq<8VmX+v%JQDf3))5m|V_}e}E#Q)-wE#2LFbibw`y=Ee7?mR55YxPOMb>ElM=oSA{
z)?YR6FLN|wy?L-F+O=wGV^Mka)cU!_fHn!};wCp(@~_vG{V&Env6oL`Y&^~Y`t@%?
zlR!$(?JKmk*yD>H^pB{Js~L75eZwj{z95@o6wbjyj7~I3*BO>_eH4#7M@fp_Nbc&C
zk5%%y=DwWgx7O%Pt!K9SK+{i-#jI?@eV@83WqqZLNd&-aNfL+UTR!gTeIf&eYCB^m
zph0I++Pny=!u!+0z7^U2a?2L?6Q|IEPlliTA}Mx34d4QSlHLiS>I215&o_(Sj|1w9
z#wz5$mGm}LneVoHtDS)kL}Y5dsqu*yk9}bIXvv%fRG4DLkmq%lm9i7kUPbc^bFq0I
zYVOHJze9_D$@I>fRU7SL#t|$-^aENu#topmD)<YgKpdZ;eYMBqrDEK5%h=B#vr-Q!
z(C@Lv>;vk3BPZ|}#~deb{I^xD&}XC4isbgA0+DY5H$?tK>EKz+meGsGWtee>JY!U~
zhvsY7!6h4=SiDBDQ9XF9cDM*?C?hXeZ>1kUc~XS8hv7quCZ2l9+3x{3UH{X64GVmt
z%m6O1<7}c9iLM?CuphHl>9<ac%~cLvB0MX6_x`X%mTS6PQ(X9TNqT$Kc+&5%Z7kZ}
zzjKxyn#-jw|2(VT6!kB=LKaYh7;Yf?IUtd%ggj^43&*w64UfL-Rld3#oU!V|E_rm!
zUIwCE#$i9gaCx;s_(9qYs99!Na>_H<pxve9C)A&xB3_m0{VMq^_k(XGDvF$^?q^t8
z^t+pk(31C>a&-Q=SeW!YnPE%cQ4<G*p`~Jh$%T){Z^pz(0R=uM-3pfd(KRX5Q+BVm
zXrouNR3rEw{@@bzPf)&;_#R=P3*6E@gkw9>%Z2?bodT0$?^eh7A0miP*7yWBZYO~D
zr33J=I8ZHXrzt`LG0Gz{YC2mg>NNAT(pc~kef>e}Gv9NmR!{wYI&4&E<E2W*mbOIh
zmNt6m;ND>m{qFU;jfDg?Cd^}ufyXUY;Gd(4ck#cFG=Qcr9c?Gp0AI3>8s822_yW0C
z_1p}v?p@sdBzf^CxDbYYEHU$^V+hVbXhv}77^k<%j)0ju+;-n+t$#>~;(0ii;b?^J
z-I~TCBdWQn%|`Z>?)$pil-S8$^89A;!&Q`t*ZEnn+qK1dKhiASFI^uc&A6eZtIXJH
zzb({358y=7oV7T+#yME*axBBvp!~q$H0K0Xw6lZMHG~3+a;pNy@d{|h;n=5qqEDS6
z!lSu{gPik8Dg8_CNo4L<x7f@86`Pepv>$p^$?I~BWq1}xlcQXtG}&W!1!=Z$f7D%6
z>As1t6%!P4wmozTpe>7h6McOvtM+_zrL*92)vMMLd{Hche(^pm%P1y{vji1fSyC+z
z1`gjhh!xR2|K!7RN%uufaGQCauY5~D%IgQsH7~=X0Ts*l>wM1a8Q;{VO}&buT;UGZ
z&_;Fz2o9WrPb=q?lWcI*Y6Rc>0CYD;X=sv@dv)2HOPKu245GeVR1Qm!2w+AG#OX5v
zSGlGGxoTQVy|X+}&l^)w*X>kP<cp>s#}oIN?4Mit#2@cJYfRZ6beGcMC7VVFU|0b3
z>qZjWTxI4qMLuZ$d6tI*YvdsE;|Yq<9cI2sHo8dBfK5NxqE+!{qT-S>i5cCV#vacQ
za%MBvcyH6)HQmAK&wI_!vjky`*=gSrYS(vp@S_8t!dfMNNN_4`pI(0zA7daVoqy2A
zPFvGJBuOe9uCv6$zZS}SV_BCR?jLvLHLgR_MU}2J(Z#PACtbd50JBFNH5(m4NJ0RL
zh0z4OH!5Y-tixTaw0rrr%ch|vRc!I0q~&a20qD895jqFCkTHJ=xp<FI?7F0@C1tz#
z`S7|;Z;$I;8>=Sn;^t=6Nr$;bcx3*KwK{lB3aKex_c(GKvdTQQjH7kF`cWJmQ+UiW
zymuUVS`&9H(dHah1L*Du&J7CB$mJ?O%Z82tR_FBFr<pj)NV0QRgYaGSk;IkI{Gi;c
z%52luwS~O2OeNyMA4MjzgiE%Kyc5$MPaJ14O~CU0=jYv=78tn%Fur04kM_%&q_A(E
z*p4p!@=ilBaOZm5?@*A6+3_*xx->AKwPba-j{u>?)d%l#$D8#+s-RGU*vymT3eh0W
z4xglu)<^rres{}uk6<6C1P09ri$r~b!NOw()`)$Tkz%%!&TljD!xlfMT{e{x&F|8<
zlP=H<x@(?UAfEfi2EkVQ-3P}PE80Jz5C;hMSi!wrp0VYI<b3y1dnU_mR;Jlmi~4xe
zZ=ZVjN_<6s`SP8m1!CfSFrB})Bl3g|1vZ{-vMFkz_aWYENx3U0;I9OS_IK#pQTE2M
zHKn9ssMcR9FGUHc_gdH=3oUQOK97CTigIsn{?s0MzJ$><8)oRLW6j>+@8O#8yEzTu
z^Q7B2&5j;z0>h(Gyt+YXJR2l2j9hq)#UIa`=NQ^H-Zg6If^|=yQkR+UvA2dX>Rh3F
zUN!ovp$14l9nc3>?L{Efh#?j~IjoDYWKw^mag?>vCmj3oMg@wv0nOe(Vs(n|)}U_q
zv}_^w#i|24V=`!Moe{2^-RQX&O2SJeo$hMXchUQG?brkeRz;0{Ns!?MN)%@gAr|)G
zA{}rl$~i6D|I9%4K9}eIh^+#;qZ8kj#q49uCq@^r!e|iX(kgbGT}{ijsFw~~O<bUu
zfJ*~dz~}4I@;i5Ow`I@Nv-lybZfuUbps&!aA?yW~GseGESI=R)Deq4(gnQ|KsUnL2
z5YVdB!qG4Eb_Z_)uo#RR9T00dK8$6JxG|gu^b4H<#`Q_k>fUQ-A4Ayj4)T<D;j+lS
z1DYTx(Ow+g0y#cn+paj`p11tzAWo!QN?J&bP(V~Jg<pEN`0c|egDUY$fMVKO@q#nc
z2*Fy<3g8qEVj>Y@ubF21{klo%D%_<K7tOZJ!oqhQ;rx6!*sNTrOr#n+`*t#2NH<>=
z6;0aN8t`|0bG1-A^Ed5X;Zr=0Ia(n{iz!(dl%$Wjh+k>Qox`Q{U`uPF|MTSQEp}Bz
zxhl1uXR<A(b?dy;u}jiZ?JXB@GB4AfwJ#M4dsw6>tIMgFl$vX4pKNl=+8>)`aH%g}
zQ%$x>Dt{pCKBoOUC@SSaR!>c-6WTJNda4kx;oE$fG~G5!+AEe$yu<76DFWC=<9%Q0
zN$Sz+(bCzEa;rZt&MiP6F<lAFOL@8Z@~y}tMtz-t8x<4YTX<V+j37JoUWZ{P)H`D)
z0pU1%=I0@*dcrV{xd&b-`b$+^NrDl+&RxV>&eVoB2@1umziflSu7+8YS7SeS6Rlpd
z({Pa42^gG#E`cA5w!lF@+L-KAIlRgI@Z##=5U<5wDyDivo*fbFIOWXo`I$rdiR7Rw
zuMTfk8PH)li+9*3u!=?djdYH}Xyfqyq+!xv$g)M3p`F?lI-EWE=Da^C9FU{PPf5T2
zLkRxuMbz4?`0B9Ekw^X3+B3(c!h0e=y{|^&>O6lQ^+pcnB<6fg^S>tPYF9cqH3&?_
znmK=H5g!)agWHyDWihw7g!=2uN-iz;BA3qVczD{-5n}!XUZcAe;IIJU*_dY{-F}HC
zDUa^pqd5&@2dbN@KWQ=d#b3L)ckx!x&s#x(^hoJ3JTN#VMY-%=K<R3jC-K5=sLz#R
z*Pnmx;kzGw!lBeZ?%)FX6&8)NMK#sE3nPVvH%aGlDHYo|={Z;#kLY^EM9f#uU<!#0
z^n`SyC0?}$`p^8_`9>g3)%LSHNwr{ytpzqV?&6OGRa|i!1_WoJ3vXd?Namo#x_CwG
zqEyV^U(&k1nCo(ppN*EC%iZ#eJUjO`&Tlkd!zuGgHGeKdqgkKm#Zw~EYM9Trgx9rC
zJ9u65F}3@9KLU^>TD#pg88?7R9<O7ha5Q2sn!+1uZet$sbM8xHHcz@aw=N=X%WSna
z)T6jx)8v!7tu;*w(@zaAjY!iNx-SN>!LZ$W;tL1BRgvGVtJaB-h&S;dOqEtS%EeXc
zR$kQ>pt~^Ot>|7KvBS*Jf*rYA8r5$sDgOIwCh1Rt7Q*q&7lk5ZXXv-uNL=9eOG>Ma
z|EP=Uk$~|6OF|?i0-WCVPkgLu0xgjtfJ_y6H0$1)l3+Jo6X(vonmMY;KvU2f*X6VD
zVIs00__Ducye|8^EJ&Nj`X+WG?3ja$K7!21E(pXe1<<bbNauaHGx1<Dk5*S^=e0t`
z>xx{KH;?=Zxm&5n_A~_FK@Y=-a)@Cb5pq(Cu!#?5k@m~DSyJkhsBXfABfh}jda0+&
z@2<vtd9O-y9{d_??Mx6swL4EQ{iV_a495J}uxRwNyTj;7Hu1cT=i@6cUkgTw(6}dr
z$n_%?U7gRpa<i}KSjbsdo!x9wLU}ak?AO9JuU*H{(PusnW##0*`lw>U?x-PvbDM|g
z1vBWOl(r;2Fa7Ub%Xz^K&-356-M(-8S?Vc}&ldx!6LibQ!T>qLBBTNGppCjBGRms9
z4k+{Z3qz+rj?2QG+#s)-yU%isiu(V37Jn0DvHZD*8H+77O8Gpal4QS4HaQzJ)#H74
zW_)xEumXL`;+Y}K|4S$YmBIl>SQxIT^0*j^Y$uGF5{<EwD}^}8D%94@rjhWU|JsR^
z(eXcGM~-t88|VF{GO;i9=vXMO1GuJ!>i;f|TG`-qj%5jUxOj@gAU8orH<_|n+vn}r
zFur6wIOiQ+=;mird3O0|TEt9x>*Z>{ww^?T7b$A%d5BHlb0M^}`#R8vSx5$7eDh&g
zS;8bD^~tq{jJ!{0Q7f<g14Rzgmt)(CF@d0))%%wnoY(quE!m7M8{veX^jx2J>)AO$
zyU<-RI6c5*0|`>@&P%1#-Dc|P%}Ham$4<FQ)*{RWojH<zPEZ><O6rGhcH%X*0f#}>
zo%<|YiODJ6l~&Do{O?o5?cRcJ`rQImLDtfws#CA|eHGv)6*+QWXp21BpX_@$LNX#g
z^O4oV8K*lIZ=Tf786KxFp2lf^lU+@zqoWV%?(6qMlbGF)@5L^_B6)F;wp|sICf;H$
z2=j|7KcgP7_n0+&)%}NJ%7-dwCR8d<la-6>C^2bdvt_P<`{>}T+NGjX&4~3)!_mZu
z!D{c_oMxu?XA+c4yF{Bmh;DGzo1~>~Ur4CuH%qC`n+0Eku5g`heF3F1>>FOaVf>iQ
zAdQS0_B40&*Qse4a=P7E*&JaT=<`|0UE_yj!}kRsh$N;9^SNJm^;X^M#_l+m7Q-;7
zr7xenubn=pe!-Ywoh^N44X~L$Lf*2(bnG#*o?!aL=PZENis0{LJfrtUh^I7PDl@9~
zKIM=x<sdS%=X-n9>xo6g0~^opzWxJPHdEtbz20X*kxt6|`_kBEZnQvJg%{WhYb~D_
z-i>@^UCx-N_pLm!KJEE7$P*l4kGAo^kCK@Q=|l}5sl^*|xSD2L`e6pbQAE>ryl?m3
z2%jyD&3EPPb>~PB;K+?2%^-t0I36~IG>^s{?@4~o{-J!4Kel0F5X{ta#iw#nCdZl(
zojW(heoeidL6Th$8EV9DayTN-z5*43t)1NT>FyXS8oZ>38WYQnMpapr=|B=+4IW(y
zQ@a1t&LBp>Q>8K>b3(93ef!Xn8`2RQZ6!pg^<9TQ97lUzph*mF`N_Ku7S#y>S=fHq
zw5Vo=F3UC9zyqK#Cj9|tZ-E&^=@ue4v|Yb-#IQhBL*#Y5798N*EWe!ob;<s1EN4bU
zArdf~6C#U+XpM*`IAwcaoLWx3Pznz(8O#f38=&@}U7&Jk8;k%9-Eey>!qcmE88|Y>
zp~4h%`fz{v0Q46A+1Qx~Y^3F!%b=*pdUZ1T#N}wHpM0z{{Dj2Nr9@0KbW4LcBM^=1
z0Tajvz`sRmp+W$b_K@ci(Hk-3PP&b?-~a``VcgETr9t});h^K%F2|i3W5_cJ48XNq
zMymsXR{^peuxaX!+LWtZoH{k%YYj5^o}olv@T3Y4;;pSbVQoADuDVut2Ss%IP9DsJ
zNDuMt%$KA!ZdYu?ZqHFKZo$Ec!;!K3!u&!lwf38S>WUKSTR^Qx55`tr2I9IY16llp
z(-L2mYt>UPuYM4(zSLsw?<Cc;)gJ{fPWU-MC;c1IgQ~m=wAo}Mza>B&K0>b91qW0t
z-49I~yu;SB?8=o8RAsHKfmDS{ECzsEtw2@7O(J$I$tWAP7&dD*L(nLYowO3KwT7|7
zPP9d4n6i9c6KlT`aDz}}x@|iwJJdW^=nYGDoe%%MIHh#$g3v34+A4`^V()Zb#q2Rc
z9qWC}fQ7cREu%VQfFhi*^)b-4MRg^cjri$x0XwL(jyO>@W3FlA^l7RqPW?v6WA)1g
zbPuT(E(frXUnZ=`d#S2F>nqU6fG4BJ_665wY!AQLrZ2mvwajpu8Kk5S`H96JXx#$p
z%Y@SJod5Nr4{F6u1aD9q0@N@n@pH;4IAl<KmPK=P0op$uS||gAvw9b(oR(SqTpvYk
zTF&osE~_DnJ9ACJd+%_31d7nb1q2o4U4Y0SubeB)IqpRZT5Y<=f0)z|Kt!dx%GltW
zpx=W|UY_EKHN>8xX|6AVWx{#l`6cFW6}bB;<-#*llRpjzMLePzb)-Nq&Pi|U%D#@h
zc5R~m6fm}5NF~&I7sIW7H*s3~W=d&}B%du^Tv^^VP>h|syI218we6QrXA_<HVfafz
zoHq`+lnkWHxUTv~J00tkJaHS!c8-(VDXiA4h+Nw~vB;$+X9nekZ6&ufVme3$(m+Db
z+gDrAjk&B$FcolJhLy1Y+jRkc@?Qym)DuW+=NV8ZtUSNd+|m07G(J;&&1A%M<QfOP
z-XGatd608I#LBxaCrn)UKnbb}(B`(*u4x?kqjlkzgQ$}NkU$0wzk^BDrbM^g@CjAs
zJa)$J<z@b!@v)!Y;NmfIG-~kN5W&T=d}mJlJ{hh#pO9$2?_fp=+eO@1WASx0@r!Sk
zE9eoQXx`mIC<E|H6SMG*N0MBi!tc-m!iJUxWRn!j>*Bj+bgMsilD~y_KAlkaO+iTV
z^5sP5Uie^r;3!fb*Y`w6iYs1E>p{}lfMv?dgAyf|$+veqy|&kgKw@9qnrSkxOot9r
zWO^bBk)=~%c>^$3Hpcwt5>OTU(Kh=oI(qdPVDTNvX`z`)-o_(cF4*2Ec5X<)i;HJQ
z+CmS$mUN|mYyjsBPMI&0QM>X)c^_!knQFATMTK!g1jEe}oy07a=9`aQq()zylpSCI
zqyEB2aBWiUk67+L?BEi7O@iL14bL~?^qG|A^@V8Nex`ZvVqSyDoeZj9?f3&sid^yS
zEY;6?(HO7U1!CtB-W*I2#6vIPXyn2q@`38r6she{l>*ajAU4pa{&H!QFrpb3v$G%G
zsl75gxB>oIGL0=16ZiL=oeeCS2LZaNSrL@J;f)I6AG8_syIORjDH(9Vq^Od6xOTkZ
zc72a>cp1Z9v~l5HY%g75fk=)Z3v{2+67-3|uh^ARF!r(X@SIUu4DVUmKlwCwv}FCF
z`{pyl5zwbm=@F(&*DaAHAnj7HeYu;v&~ecOH}djE%unF)?ma*jJLee|%YwF|jmrw&
zxk23i8}RVUiN*S%E$>e*{$CK!KlEb=*I6tDcPY@%r7RUI2B_(^M0L74E6~LlDThn9
z!;$1-5)<*k3}7p48ywA2>|2#^wsB-kTeG3NP!q%A-nAxtiRS7jhc$^vB-VtWTk?r?
z6Pq~a5LB^mns_(rFV)1wh}mt*_W*jo&!Q;K=&B6^ul)+*{D$vj)5xXZCI?+u&HkR|
zFIbJ>j}441dishbcI^{CrB6erRO#o;=b7H@B=?NW;3{9G%+!6WmzLnpzf@5QK<E{W
z>kkkft-H`XT4rP|S{^?9Kp!Yzix0_Kd@Jz|pV5=|;()BXXj|W}Wu(DA-1B^ektCM4
zHM<)Dzz`Nq92rHDFWpGNT*o_09(Hh8<%(z8a?0>l@H~C}lR04BCN)Lyy~5Q)n9ICL
zYvHt(bD<hbXH#Fzle~u;%fAa3B1u5|JkSv_G>vLwi19;)F|l$mMC~nA)W84mIp4ZQ
z=o6ks>P1Y5*3X~M!TKVH*MxJ;l9hXN8~nO^ybdY>3;U4@SF!dtdX2P7GccfnNK-Tc
zTOWwTbma$<rKHS>!a0*4<T-vJ*B@|@swOpx%#cU9tncr3xrc5bl~rU&S8#$mr^~(`
zpoki<O{3A_?Icb(t82msJ_t$MCE%hw&H0PyX^sPi8S7&k*mTgs=(OHv$CndTo9b&R
zu9DaaDm4ugx1y~l${LT#>E4zesPL5r7jl@Sx*Pae?t>JM4~S9j@X%!8jG|}JMdCAH
z5${9dU=O?F*Fd=}pV^z9^XxXap45vZ;n|JZxm&r5M=&<?(^4knG=vnvp*xHI&q)j0
zXR#H|aLY$MG2IbFuHwWVCB(*ugE!3y$2QnZROhaQF&Tw<+bni8&-fN)SBG82QSWjX
zl6pk>Lff<X`l@V9Smg)WR6GcA|AKhwaW}VB39&DZCyt*QKOXFHXF4z2vGSr=+~tSs
zjw<zDOS$HKPZ!Ueni<c#Iz@YFL&r$o6&B$sWM$`2f?27TYufl=CB$eJG_)T?GRlF!
zA-H~XFrzl#YM_e{ebrPedCO<-RuZ-Nsyz3_pUT%%(YFcJz@2h}b&gq1TW&oMyJ{~k
ziLvFeDFz*93&za56zm5L_&O7KHR6gk=CP1i+U_M&E0eETXO>De?#ITBiP^0y=e_UI
zRWf`zi^oLQwFgU<3^Kyb#pZH+56)wr9^;!H)*g^(P6JmGbRwm=#BYXzuRJ%*m+R6M
z%$tT7Ra%y%p7!AUk7aP(ZJ;U(tId4l;q3L@X@t$gri0Cs5$ZybD!;UYBC-owhEPuw
z@Znv)l?=PSsby2pJ2OrBA<BAnuFS<)jY6=ecrH0pS&Q3G-Mlk@w3ojrO%P;V=RFbq
z*d;IWynkb!h$X-JOLa5tAmX%K-G9B?d6E-G(EmE;z{5HEt~&GPZ+3n=cE|icurXB8
zoV~ku3Ou_`{bT!-sI<7#9a;`D`c=^2J_~!(_^mAM$aZH+!#i;BE!mpdNKe_ZAt24m
z?(7S&9(F1}%!%Bl(7^AdEVFh-b3P2#TA9<MANjI&mvt?!a%Po=!Hv#utM$d!np~uh
z8DrBrKU>*uqeRDg=ThN9!62jQ^mRSmV=YV68_Ew5pcGq?m}8AGy0sJ`o91_Amw%kQ
zUgq~n)fS-oNqb2wNf~CI8OFy?ctdFzyMl5G*r*Wa^Uz1KVHNfk%E^ldTVQ&*ysZ=G
zwLOY0&e$`}aG-pwe9Su|{m7EF#}pn2V7=Fe=K*%l(Br7LW<IOE<Uop8jmvtvt{B0$
zu7t-Ih`28~ht0R481ZDu0AdwcV`+YkE3@L>AWz*OOBigeMmTZ9I8y<2!x3zV2RD4j
z^BN}#(x>VAJz()}Z@4fYi6@FlJ&MS09u?H_;pMiRd{4J#pX84sF-`oy5o(WvLig}$
zonNN<;9pRuc1h84$C5XJsfvmG_SDCfq!IcQ^_gkC`_2{4&;zI~t#4`xN&+42cc5Oq
zb|#Ttp)x^G#`g9t{H1EolxRP@^KFLhVVaQzX9Psz_Q{)`V?eU`mJ<oYSAYL(X2Nm3
z9mqDnmH6E9hOy2bII^5A@^eEa+6<ZjsNl}V00#G9`-i{*A^*QOTc64feIJAN6u*!;
znHdceYLep+6vuaY2z`f~*92|xSLT?iQ$0?_`ey{wVgDc*E$h8lw?BsuZ%z#HI623d
zdfoJF*lD(!U-R|KMv50ArTY(4(iVTkbT=|3=!Gu`72zKU3LT+`&9qEGF<pAk9rw)8
zvEYSLa5YyJ4VE6=j=rc{`5gDa3Xy+Dh2zStj*EnS5g|Hq;tS2e?n!O8FB2=QyFhr~
zCAcP#Jh^3_dgYXLX-LJb)%fo4JzBqjk?vQz0SuQGUPgBd8BSkGU-w8^N5-wVYRLFJ
zWimWTUp)oX1rhilH*n|TM-G4+`2QZzjNXt~xKox-q$P;zD&$!B&~`ld<;E<Nk^0I%
z#vL}8=kEe672$BxkH=17i$>@OD~8MMS4=5;EatV<w_+;*ONM`MEAQLZnS*ubJ{WtK
zk~}OtJ1qLr-2?4CGRhe&(LvOsU()81S9)CB+Lv|Hcz=C$oxsDZ&J3MG-TV(PW7fAK
z6S)C6Cqv<<On<k4!DXwy4>y7ByJW|u?RYq~_-Nk9)$lfz(YjZAMZHYbVqfjDb~z0Y
zgfU68iI+}px4v*?k2X@fqC4Q61En-W&Y{aOK=d|m>_?;nBzW9PdbCOR8$m~Xj6d#O
zyn~RqQyH&@unprX^X(Tu;D2@nze(Z4cHDuADf_$S0!g(72j0C<y4fP1)hTPXv7NvB
zJ?0g7T>G{4ddOgJ9<wdY`WGVSvQh%Uf5G-6sO?#aw~PYIVnrnXG|TWTsrp&&hfh>g
z0U`%;yttc^87}n?svWHy?KTpVThOb2$OfS#*~FJh#-5yfnP0LRJe?%;8l3--Py|lk
z0QL#DanR*~q(#s<#88;k`0$&gl2Q`Ca_!7xnx?`FzY=_3)^zP$od5CX+<A<($SH;(
zhI23?FygG+Dfh9^cu&Z+xxAe#_XLD4XX&?n(*6$21jq@dk>#)wBrev40Q)OGExbO0
z9n;($0}-M%2JHb?Z!JizHr3v8b79W#=vn%{oW(Y9AAh$uD$j}$f_OaQY<VZwW7v}O
z_xuOBe1X1A<7l6*;Fa+|yITC;D%>h{TMF$1)(b2Gau#;M!dP(EmjaL7NAxgA;QrGj
z!J39>o5;uVri`IR9{^Q%bnH}CLLA7&{X@Y{!TO?5s(@i}C$%lMun1qm5;v$nJE`ed
zq_t}#?4RGZTwidh+iWB7_geZW+HER<Conh9Ypx^fD0t!Zap_RIAP@o9{>EUp)2I=A
zk40WyUU{o(x53~P`c%J9Kuvz(awO`0A6ZyN(GaC-&iAT!xMuUxcE54PYBU62G+YCj
zNZZt8PPN(~1#LeIur~+b>+Z9kE};!ys^s6`MS2lx&kQS3Dckzcr+Sx~A4T5%_V9nq
z4WGlYl30ifx?P3VIq=6r?LX7r>3p>p$+{EubMWeWKj(OLmAx!H)&{Xa$ZtDCM#JR;
zzWNl6i|ooi|4_R3B4MwO^Xhfip9^@dm>djn``dwoM&NXW*(K1`r<G(Bq`}&juYE0T
zs^u+8jw(LHO_QC*3q4$?_Tc!)>Kc+++RA}KO%jE(GOf;a6?LlFy(J?TD7N(fo-NQ_
z@A-<-X;Z`qjNxmY;xnJfA54>P7HJ{1(nNk(sPTlPsYSPRMlAEtJr9ppB~jN@Y1}R<
zZy$q>jR=RfM0Ggx5J}ZmNz$H2duA(6;9PB^5znh6%5}&u%uWSR-^)0hr(WHh&R-o1
zI72zKnS|#(51RKpl^QC@i6k)X46+l#h<93-8Kx`SFic05`W}s$H+Z@64KsU=YW5Y`
z^Kn(Xad8oJt48O68IO19$sF+XU<ZPsK0r_T4)+bM+)+N06Nu|OBIi5!i+gu}++6St
zUMvsVn!kn+2eQF_`Q`dh8-zVm5Iaiq+$yl~_s!*F5&ppijM)b&Fa=ri`-y-e+l_+j
z(-K*d#tRkyQlShYO_WsbWGp*$Iqh4{p;l^?6^~WxE3me~VkFjF4Y4CK#}x0#s~#1L
zyV=^W4_x2-uB=A;OT!x%L5y09;2Tq0H>U)qAO6ZEOyo)|U1B-HXa4a%j)S~yUCN{h
zZ_gD1tjf}-wTv$<MKx>8AutpSgKQ!@1eed7v;|o1(}Q>CP+s${{4YpP2uZnc0c>7Z
z^>T4XWkpa_T>Yy15TmAO_iz7%_m|j^Jm9y=bU<n*VrHqnX=WzmrOiurGJZLxJ*Zkk
z!vd?^Lq!Q7*%Q{$a4(!iT1r$IL*mzu=5hnKs-9*DqumEZeU*vl2(WO3-~vH1z5QAB
zQ*$%TuM(A+EqLzvcyF-dLfN?v+~YC7iBNWBn{mrgq@Y+z`tJ>l9b~%LjGXQ1Ebzi(
zpJC>S)p#qXyj<6*Nx;c=OkX^m8@w!1gfJr0k4s$>_W9$fsSrPl>G9C=&{6+Q`O;VM
zN5&v5hV{~gi<+-7va>Cv|D`y|?Oi@=r(DX$t{c^ic{(!>zH~}6`X$iDS!fzOphoL@
z!}@CdrTU14Z?t+l@OHo~Vo6x;^z(|%))=<j^1kGF<3pC|`<`R=9_9A}nCD8D7PcW_
zEmub#C|_l?%bVeEvS~TIz$O~d)1$J6alwK^Y|`IU^cTq#E*>9dBxpGdiPVno39ihE
z;x6}9tfr_6D$Ja!OQVaubiS=s1AYBTy2JlG*5|OmTYxx!fUseR-%!@^WiqJ4cxYan
z_Iw?WIlBJW%RwT3vHD~kSeCIVl$)%rTG9?Qv&h9a4r6UARW#=$*MW?9B+(XjZUT6R
z4IC^Q7QS7D@isq+0}RA1{0B)#yVhqT?eJAwc5dc*jLx4FawC?Es~uQ`L6NOuJEVT+
zE^ju-tg0OkGTg}y!ZR<pA+pMG=1;lwp+ppPouvC>#@k|ZXh7MOD;<J7vsXmovDh@1
z{Las;%4q&WkF}I#phvwZgk2NU^w-=o_=x;)oCKWWtnI<Q|5>?Mc31p7GbUK0Zro-3
z>IH85@*4FM`in<sVg;uej+VYsve%GUM@N9>pckk>A^wov3ro&B5E{-W=1lg^t5-cI
z<*i9!q{~elEq$Tbro>gty(6oS6owUqcbD9}oiUq9GjB28KcMgMay)ep3%7z)7|T%!
z;b)`d%0F1tfsTQ?!AS3L!6z}px+=B)d@+8N?0@2sOt2zw4RROi2*|W|9rf7a_btb`
zrZ<fTPdFY*4r@``s1stegyjY_SKj0oLQ1xvtZ84-%Kf9!4~FmeJ=xKEqXO{feFE>L
z=a<Xw=*%X?A6D<dz1fxu2dADv=Ah2K#Or_{eA3<if1tttpu+#LXH`;O+y;onc5<3+
z$WB=`w+6*1aC&@NAlV^;ZBd_5Ra8rk3c4k`0}Kb#lo8B{;v`mQ4rHrl{_k|-CYzUb
z1@;1*+_zo4+GG^SIbJK@Pv8GAq`Qy~w7&yw;9|f3Qdxtis5_BW-dzrg(6KT{8wh=;
z+Yi~qbB|39dwbkvdope?o};@uhJ|*6Vzb1^xlR7wOW-Ju{yB8P?2~~UO%>gu1eEy2
z!x}y=s^ZN*hcL#d;*U#yco!3efuzgM$?mb8LY`4p$vV3#XJ!+aKMR|Sa#kT+PBM|j
zRyk|I{MasKe*Q5`MNLg@bLp8Z@LDU|V+MkPlP8$UM^FQYD88|zs1tDBH`i-7wpO^U
zxuP8yI^CMPM$QHnD@=sgB9Y-e=DZRww!4j=x3fFE`rHGr<L(Py=H^urtbL^OGQ9aL
zhal4RiDRksuKDXgLkAAQ?s!{q4wp{T)B1PP%I}jp=^yB$Fss51`@C<xmqo*Dw^<Oj
zoorDzbt8Nf^nD__eT_1{B?K@VXm6K}Oz6BtWVY48y*xurrsGX~v^F{JMRDpBC+Ham
zh>nfdp38uD3)Z6(!3$-mnkY;Ka)*#+8<h0~c>Q6ya%iSxjA*txqzZ!C7w(PBB@WY4
zm|)O})mReijTtCmp7;uWpWwWpXzg3rlSf+>ds|J{+YOgd<3e?_NK5Hl!b~hvAePzv
z>U5&(l@r&dyns@c065y=lxLsw4NJvNPRaNEllUrL$9+LtXGN<y+jTLPGa3xm`Q#(u
z6yf%LO$b_SEx!0c=cV=iT06x;lp^2;t6ojm5m|ynq3B4eSf(Yt!bNbzu~`1L!s8Z;
zp>?;qTapd;7UL2_>FVxavJZBEkeB{-VIS?@=jNpb_%RXm1El<#DecR|M^at-t{<m1
z#)xlcQH8`l;(a&_8{6d^q#%%swT0G>RTVotalF<=-Yx!WuT=Pn^mD=bRxI#_pRzM8
znzuU0vu#{A<zo7hrGqxPrUbzC40B2fR)u-9(Z=W3Z8}^#tfXqQZBqJf;i>BO+=-W5
z!D%E=q6hkiPcg44&kHOT^}uWmI@jL}$_m^6Bb3#2C~mZV%~Y^=w<f~AshaJVWrTIS
z9f0SAMN(zThDIj>la7?fcMF-(Esy-Y3Y=i}*7b$LC4+17%8k&+`hCxh)$A2hrayCU
zX_qRc&&LGiIb85^)16}cIddJQU7$xNEj@x|4$w7D{a%MJ`<$EhG(00C3(wv6O380D
z$!hgPh0cCXcxSy}C)`F(%_w8F$am%o$-c5?OrYUTjFx0ow?`?yARl~}r|K-yI^_&%
zq2Dc=ZRMU!J)Ek&TOaYYG_F(d-$d&D|Bp2Or#!#`3G9LvIUTlyZu5bgE{cGv9Ojt&
zbXUu4vN~3NHU;d9mEStzd82TMmu8vx&;>bX#j?4FnxBOaytBH=)+?E-jKU-12r%^k
zpcQrD0XcmaNQHeHWQ_G}Fn?<$uGtX7Y*{{IQOuz5fckwt+=<R{h3Y)u3&Ys9zFLgQ
zzI=2H3@3!a1P}%v+Xm^(3~d`OpRH3a;)}K-a1$LeGP082;>05-SY2#S+_V!Om&<^g
zXyUCWtKrcwD<Bd^MbM1kuyhixbU(n!%$eD=Te@i(CAQntbyqW2w9GxL{KI_AwcEO@
zV<qVPt?E-JuN6m8PmxB4Gi4QjP)^bzXtxwN?CT>cQ~~+z$g9B|-BFiW;(8byzXFv_
z*#+UN%)8raJ!D=_ZdQ3<L82$8;`<HSYbDL>Pu7%WA!#ZcLN;J2A<9jYabvnMb%gex
zKRy2Ld=v3XBTqYLXFp1n18%Jzz7zDv#f(t)5S%nV&`YQB@PIT}ymWQVhx1u3PTKz%
zw&oAr5hUv95S8Kn)lCy+Fd|*Y)f$CX-)m#W&dH0-Pv3bn^-NnZG{Z{te^%sFJ!Lm}
zeckOMo`vudhK7|RJ7!?vm<mi#^?eWSlB&rnVr>$|ChY>Fyc^{CGZ~2GkCU<%nbx+*
zP+6z=fCo@r68b>CPKQNdz(jX$({pSJ-7@dHHRjL(tnL>ut-u!|sE#GMZXpB$H6m}F
z-ZRtCP<x^|_d@I3x!oQv>LoBU@~vzm)8^a-0lC=TiPp$Qzf<ezmh@&qH*Hgn7KbT&
z-m72Td<pT8Li7TRl>~3E8x&kj73u-|wcsqpbWTwx6wvD0unsYy4MJ1?z=mr+oGIAs
z<SqGIZ8KyQ@GEqZ#>bQ)96s~Lgoq`PX!EYx!!3o7p1yve^^GqsMT&Q+uC-MZ<X~d9
zjSIsIg{xn$3Mtmi&NzF`j8&esCEa}%n6D?K{x;rN%S-1A5Fq9QZuLXQ;x{}nv?dl?
z&_nXimlhe`d`DE?f>in%(N7;QYbRQKosHMttsUZT<Tm@BOZWV1eB@Ydj!xqnOBTTO
zQU%_bFG37Va4@Uz)`jj?`^%)xF%s+6pzR%B4_b|K6u(QF;5vP0o10SHzZw@D^oh_s
zerTGJ@SciWtE{O{r1LeQ@I`szAK|WvkF%Bn7}}V3*ItJG!s0q#e#_H?R+=lypq5gP
zCP+-MMZx3!XlW3eeH=J0M`G&T=h&n&K7*#9t?~?ON?NQoQLhd)HM3|>;vy*KY&c~*
zExy<BQ*-KXCk?+iITxY7lhbHV*~T7*rd(bzfpaAlNe_Z?ea0w)QSK(wDQC3%JU&de
zH1cB;(`p#OI)^yBdI)rdS|(2cRq(&0m2)XKadYN9EYp1D+IigEY9fVfw}M6)r!g`}
z``Ew(_;muNz-}C;unB$VbhLhJ_pQnL3*}y}>@(^)jJ87`(gFG2dU>L4%F&}*wahNc
zscM~1e4TnNJYi}gX7a^XEIs1o5}H;;yqB}Rix4TBsMrVb105gC5PN)40gLO*;+((Y
zOLjH#a>e_qS}ft|U^#~|@T+|;i4Wkn^9=o-!kvx<igt%L<!H3?pn{YJUt26%qNy?+
z(wIC)5xSK{cfW@#(rEdGi_wRb>#8&yI{p!2wpxys%gHj=VuC{-0SwXO4kpSis_UE1
z=V#rj{-gQq<|Hvi`0g8|{+h|%H+2e=S{Sdd7S3Cx$WgLmPCdi>zf{b(37uG!#>DRI
z)d`mijYez-Wvr`7u9HqQvCmZZEd!7_pG9p%+ZiA%s&h<{@c|=H*awnMWV>OjaTZTS
z)zY*>rQSbB+U7mrYQO*Qt!R3n_-0t1g_YHJlhRx}wuG3V{N>>^sMjM$b;~Sv9I?aJ
z9Ji8;t9d!{=}}91tSC|{mO$@<m&rv66c#|-y}Wf?d9~L^%X=uE^axbQm+zx5wLCPJ
zOm=j-UI{~ycC`nagbU2j6vPyO{+v`UQ~G&?Cp=8exYZp)@)9FmaQ#vx+FJnzzOIy2
zuiv?%>Q9D459}ne!szYGv-OPlD|E@-KtaG7U}90&I^<iss%Qb56b-C>4T^Ro$v;rF
z4q1jbA%i^_{7&1F{`_41%AXpeCCHbiO|HEV5=t9<SM6;vGX1m}(sTcFs<nt-)ky*+
z<)3A1OFr<QVe9{IOJWlV{Be&?PMKj<OP$Iex@$@F^l3aUABHRE5JKm5{*4@vM4Y)e
z-UNNk?Y5=nLzu(`RkFI&8}+mm-a3~owp68WL9pQGqUYBLHhk3}(Y<!olCLMhYR+vm
zjusSBL1RzFKW3jb0-#rjoc$nx^%#aT4#R;uIh-<CL-Cc$4fe7_Ka!It7=_L~jiOQ$
zb+1uydZWC(FGjsY8@)spbB1#knrz>6KC!LH6o{y!+b`tJW+6>Na}oWZY6#GGdp*zd
z4TD8Kzj<(P%F{;9P83f=S;3u`Wq5S)<zysjCLAtr`t44QOo)S?-j<l1zXLjb{P<s6
zs*D>aU~0@0O54oYc6mT++hH@WK}-x0wvkU_sij;tSO!fEE_H{Hc(HVbmERkyLOdPc
zRySDqwq}Hv!dw&$iyCGA+`sl&WPTeN_KaY?+#T9Pj}u;RKeK449#6QI`^{?5kt-$5
z&Ef}pZp<rLi9Qh_s)a*X#0iIOM=as)U#eZHk}WH?p$StUS=xLz<@$6|;Pe0+V9#ud
z4vuEDerOsYr=M&+xO1}Oj0NkH)BjSviM>RS=uG=EhUdO97VjT2n9RQ7t5nyslH)x2
zb>Y*O1rLyPr>`E<?Y{4cLOr6|808j$uhq>B0C*Umh2$+hoo7X@nKY@J)P{PkVA?WZ
z4e@?f(~a*=HvAmjz&9x8d^nf6KK0;i+jux}Ts7^CIt80U7FySjpq~7i-{s0Gv}hbs
zz+}H-<(|Y(axiTYC0aZ2Yrft;gBbF|*|j^&#j0&3hAD5#Fias>)}0~j_M&XKUNN?^
zm4^*W$$9lwWz0HXou-p5u&-Xj#Vba+xF#p=&z;p`zv`k_{QbsykVbE0M4xtusFJUp
zs=AjJKY=E8wNRj_^5!dufev4&Mo+=&Cyo05hp)E|i|Xyy#lb=WK?S5i1f)T_!9q$v
za_CXIQyONJ5NT<pMkJ+1fuUh&q@}yNXNX~7!n3}6zwht2&)NH&KVV(_!Nt0UwVvmG
z?#~^HLHe;Se0^)ewym>rIf&~5qSk5a8P7Z?5gI)#1p|j-0a84yIvQo#H*}odRR$!$
z`by_}yGO87FF36;yy|IcFkI)w+^&QB{OV>-xQvgr(T|hw&N(Fo89wv!3L2l%gWkN%
zfQ!3=%iXWrcuk}(e6n-x#7#ICwZE4iAMkR@|4-<tCE+!0t=_s>sy2OXd`yad&#=fX
z3=IIx__8up6<VVr-v8q$P4cgwNsp(1hpbhajhB$Qe-<JonM}MpXe1C5>RtaJ(e$Az
zq1xa3VAu^RDR9niJaQ2Hw7`6NFAW7`Fm(SJzV@3<R`(-$WzNlqE&BiXK(Bp7=|ua?
zxCdXaa3V5A`w345-9Nn9{C(bV-0$LR1NuW(NG+A;ZOZFpA3GPg=5#_X{ggD7e(za3
zm7EGLlua&)pCq*a!{X5A_**UYXx~yanem5}F(Fr&*4WGxyW`iHSMfQybiT=dzTO-%
zh(45nVmYB<-{t_fJXuVzrcbOJPQl}8PPVkb8gf%nb`P(G^TV?cQs2cu$^2!H5E7_M
zLX=DOCxwj~fzQte1^9n0WOs_QB<DE)8T3YE{7Iu1j>#%$tQvu~O|SNQMW#&}XT3|&
zo3mKLU(Et;tJB%YBbWqrnC<D!1YU67@xI1*u@NsHF!}Q8$g7di5Q`A!lZ<8^Og*9C
z2<n84Q8iDS)f7^iy;e2mqIu-Nc`eB}o^fU->cP8N*;xVaW%vCEztyfVLu?w)ST*{Q
zO4!Sh`T64C@+V>UlE|!#?U}upnzD{!y};yRbn&Y!&x%-GS+`^r132i)3YNYvYN_f^
z_5y*wXlXfr!H;3Zr%g5C*7&jHQGr@erPg);vi&GLaH{LT%1q~G*L|UJg_D5|j`u7F
z^G|rVLP<z+nm=&v!+_HqwepGvUOBU4R;dW;=NRB6;${SzV;=r>r?Oi{_n3-G=T-8D
zI4HG$V1IjtP)+vA-aAB9^-~tzM;_k-lY5o&*OPl?OA$n8?C@#A5MDRs7|)8D@i{Go
zHR~-n^|~)5DXs1ejV2O5`m3Hodz&O<J*P9RL<vrkhaG)cmP>1plbt>o@PyLg7ootU
zE5sQ9VL-9zy+&-ZyY;F_bgT<r-sj%DnVAaD(b=`Psa72X(>qUx7dZ>XGtRMoAH{|F
zy99+T)O&aQFjg=V#U#9CuXOI3eE^Xx^5@ar_~P#kA_$&Yfl@rZQQ{_&1ZoGq3;APO
z#S>|LKhUE7%9d`nPAWT&U+(vO$jDedBi(sz8~8nh1xx)cQ~8?dYCx*W#5z*4YlgAS
zcVAXU_r3UN1B@CA>2GiN=i}%%b}4gNyZf9{6TzwbhZH1$V}6ep>#q~eC_B)nokTFr
zHH6{>3eH2`jX*`wP&RA-Lbs&z$9oPu$SfFWqjIML-i35m{s>0>gph(QW(sjjRFB8M
zpNSTC=D~FYtDvpdgL`pg!4Cli;8%1F#ep`pv?gjaW~V=7OxTm0UBB*rk0&D@@|UE!
zDn_>|Z4s!y8~P}F=hT+-8hh(h4rE}diU8jytWe;XeN7JGM+G@1=>ATYG6C#M(jGfI
zxN=mmLVQ5i7nX&tAl|~=fWFLTWlOtJDA3@X=vh{o!_u`;0&YsKR!BL86(qR^{638a
zT-#>KGXFoi`A?Li1c;kLLx}@_N!~O;S+Ptb10bO|D~9xk(mnJHtfbpLQ<MQ*Q+<+`
zQz6z=MfO59d9;4_&u?0%@zIK@<-aMt+IEe_3aN+$l;bne;P*-*W-#iqO02b^m&>}{
z{N&NZH}CPp3H9CP-W$jiyd4U?aaIJ|KoQ25j;vY+V8A*+nM26OpAtWz8sm(lvb<be
zkO6eK8sa!;<}7-yDMWG~Fg*PwVFtCc$u*$jn`PoVI(Y`Ggr|)U$E_Xq6*?KtZJx+Y
z%^@xX-a@6)LvBxI3v_t8KJ2Ic<S|m2^^t7+&TSy`sc(sm1ABpLpuhpa+?iK|5WMME
zUA}ofu<Tn)oRg14Wn6-<V25PN#8>bJ%~(r=qe=NbrC)A)IC!-VMOO>Gf8d^M2i(h7
z|2VDBGRag~ax>5npgjL<t$t8>gMWg}22|}48KOvRSjnuT@BD#S?Jytf3kR{GkQ5rQ
z6y5;%l)uik!rf7RDgjH0m<7#^r#*%dR62_NRM}Mi7nTp#T+$;zc-~c7`-af-LSToV
z@Pqr=ZiY~)x7QclL1OrC_gv&8f-xs!>lOHfQa3m2s@f#y-g~^?^$ri)3`y3Lbe>wO
zqXjT4*bvmZI+Igeg(ZgoCPehbd|12<7a;p;iCRWEuPj9&+3*%<$jV$##=^!t)m%hL
zZMsWVEa|-!|JL<T|MjhmwLKJs15IpxxwBqpONT#`gz^^Io}@HWPaT7AY`5`krq08T
zJ5)}RK~Sz{oqQrdF0^R`>#UM;vEP0-Cqmt>er46%Vt!5GZrwG@i|b}^OR?Z{;PZ*a
zKSdX<rj`NrdY|&=hWBNg?+yz{IQ2(ZM4i)6wxY-&n@tVPnHY;d8LZO!<As(yKA{g)
zWnODqkKgP6OA<U4xiVy%?H|`ILn&n&m960kt4?uuFwNMSBp;b5U~Sm-#YRQ+VX4D^
zO~+op*`A-fzj~g%D|wsiNFaZ{c02GN8h-m1G30u%Jisf&VSl1bu%Nd4V!S5(&Y)-|
zG(`h1beStNEWwa1^_&i(1eso<6@~~yMstKF+zZ9psB(qkz6|%Jp?<|bCUc3H51Gz6
zJ5%5eyBw09ZT<#72^2ZGU{~y%hQAHRI0rNa*FrZ=6#4*x#Q5;g{}W38SD4)_j{y_D
z_&{gDoefQa`ig0hE7?*ZW~So<L3zJE@@BW^s19?)mL!~KUgN9C#QgL6hG0)H-Q(fJ
zJGELo9e1=gEcVjB#rXfm_1>e-2p}QSEwkLP`7L>QjuODApj2Cx--Fnx@S=qj<?Mz)
zcO)onjSW&^`z)z?|C>F3*Ao{0WvW!Pf_^uxn1dWG_n^s+Eu4RMNfQHxM9z8Cl1&X2
zmQE!I(UmV|4S%<AT_DWDLg`LL_OIhe{KGJl=s(Hl8Q{=&F%cGEL(b$@Yj-#5+u~Pb
zw?BkQK@6elge>Ld#KlM0ueg_crtV#F0-Z`sKq!{QJZXr8Gv#HLGp8UQsfTy-6YB?h
zxla%K!9SL65$`*&U&UX2yOI!*;liX8Q&>3dx-zWaYj2(AXWeTDSnj6CXwl7r*G1mG
zPf9frI@i@XNw*z?+b^^?-376O=GIo%G(=p1z@fZ_vl%}VBnjYYJ|cniXHeF#Uk_Ha
z{T`F5>*FCVG_g6qHu$yIk0k8@&4;>W$|)bABZtUUr)D~Awi7~BlGSw7D|w;Tm*_%h
zh~-=PQDX2QkFHYVw~^PDu)0Kwfu4B;E(<nVAh0iW03<$gGFltvfBroq5!)4Sy!n+Y
zRI<0+nwe3<j}E}CCDloc%#WhD#8?ng0Q3~x8lbyAP%p3QR*j3QiRIh6FtmG<Cp3x(
z56n7Af64_QS4o4cBrQ1;h3;Xu3CiJVh-8y)_?M}WgIB&c^J|?<LpQYstKZR88!p81
z2pf7ySq-S~IyDU^>1^n|!kYU@T4Z`wgvY=9y_Ia`IeuC)evyB|4$z${0<J(K>SH5S
ziFH*0H|uTwDyhv!`2gF=y34;EE#suI51a?{9{-DdAo=fCo0EcSE$;q&ho&^^_e%NW
zeY)Z1`TL2wF`2s2jrJ5cU4P8hO5{Fzjnz-(i`kN~qc^jo4!$z=W9QpL#*~C$&^ojO
z8F9JQRql8Y%;_{m&1h7Y^Ky=^&aQjQ8=FP>i+q~w22u;@xWwuJzK_3Jj8(iBG^niW
zpf%X3_vOQJQM^S=w~1N*tQu#iN6^%l9XZ9qu%aHv?`BW!o$s`(P`<sZ6fHxCb!Q0S
zJ07~(0qU=FqM(U&8_FY63T+}vCL2_W&$a?P3AN!$af}HJpuB`tz9yx+0wvDaD&)J<
z!kmo8eq8Cxfb({d8R!$N^)>xnQI&G%o5VLwIUJupy#e*bCf%tZUFf&xdGLl>128;M
zujYsh`OmQm_A7L0AnYe~Ues>HLKsx5t&%Jf4l0;dT#CX_y8Xv45)Pvl4{L)gbpq7S
zQZn5E!wtLXMb#2{4;q<Joe(L<F&=Cm#F|>{$zXT=>-4Fi;WZ%WfR$8{6fTbKz*#xu
z+=?g<zA|}9Ec7Z!>_yaGcrnziYHQ$&gB}a!d!LX+H(8ViYfk~L`p$D{x$KJ=<{IiG
z3B4XCUQz=RpdKC9dG-jIkAHkAXE$%gYh5)VC`A(_>4cy^x0<R$HgnIViD~blE2Pk2
zq@Q0JyZkDdz%8Dx?2etoQoyBVY(Lh(v-YOgF=)wl;58ndQMUu0`{-_+S+2Xq>F440
zo`57{x{kf2ZK;~(@-OhxUGv3`0d>|e%uN(@w$Q{K&&!26_@hsR@2}r9xP<04H$#{l
z>C<v3CWI>9QWP)EsngDd(M2yzoPQ2yUz8@7W={#mQjs@907TGsJ6J`!l~KWL1s2s%
znCYJ9pDTFg%CtTo@u8?A{}=1@94AMuQluRzPc(6MNCx@<b^ejh_Gg>|m-$!$qa>q2
zYQ0TnT4iR8&Z3)F)!1|F(XCF#bQZKZ8XI8Hc|7J>%3H+SX3;0aT^*1Niq-|0?n1um
zK9Obl$D{rG|8h7ZF%s$2iM+J$V!yHEU?~zd{0o_2z4kHo4D@^^WjSnDz$v<~ncTlb
zFVB?!b&_LIiR1Q|#n%F+v5xULQ#~hL$HH1ImywGEG@#Np`hlY8sdALt4mjW%NR!T$
z>ItAPK7E@dhEeLIvBtP#PeO3Y_!p6el_&ON6(o_m)%rjH)SazjvbenoopF0g=Tqu6
zowjWDHCX7Qkzg<*xz1O257?d%H_7|Gi_fahZqZ#7tRZjqFS9c`nF<lQ3g%~RJItCU
zE2AEM{;^x8oVd>Nk2R7BeNJZ8Br*8IF2H;?kIyo*<!91F+meqf9OAH`R7->{A3o|p
zFIwo6etC9g;3_$)zBAA$f8?4|T7Z1z1DI>>160s^SG;c7o{@)xpXaS_bzCr+yPxlp
z%nuTOUyJvLrz^<<(@ex|D|HtoGBZ}G@z5gO#yHaz!de3N>!2E)%8ZMTK^KSR%h9_5
z`Oz~P&tlihG>m_dQ!iYP0!rT{Z@YF9rM6@j=px&=BV#nbcGXH9lowzOF}2N$uZB5L
zMFM>#a?^!-8i#!vBbo(k7+BB_VNA7)O=v>Fy{x|&i+tTU4ll%wnQPd1DUvFZ43l`s
z8~FgfpH!!zrUi{8Hyxw7M)yYho$;Oa6oUsz(Ied7Ek6LKblZp!SyXKYe8rt)r^b&X
zyj?_0#kx;CsVn5C8GA&)a65E{rknkasgSC{s2w0QyCrVEu*A;!PP6~Q0piR3wC$=}
z;7_>Sa!8jUYwAl949Pn>Le%No6-sRynz$i!3OBXa<HGffhUCp;(9U5OcrgE5IT?}^
ztpEJ($F_B>X^G`&k>vgCERdjnEjIsPy(tuc8~e{MxQbR;Ro=y2xjZzvrs?d?)iwNr
zJb#@a3XZILb$+ipopP8nX<)$1I43`66KM@(yxK~z&m^y29~hO8K@s*Hc0CLi$zHVW
z`-x4hg+L<PXb-fjan&url=>^0#X^24Mg9>H2mQzNbk5WB-v(Fz9$@{~t0PvtgT{ZZ
zTTVN!s6o}Ubn&?@jr4+1V*8bl%JwP&{nm3mW9*b_&d7jQM+ik8RY|+b1ELQd^J_Qx
z=>|vH0-*}%Q)LQZd0;0~V%;XvqgVl;&BR(_I`>j;1<<r$zs8$<IW~z>$XF_dOc96o
zo;IGcmqYmmhsv@x4;RZXU=u0hd@9e#dmYkkqKb=^#ur1s*!%8w7bT_+#c+obR<Da5
zzQ;99czFeU%kn9CZK3XMonx1L#HPrHV4mkcT^|<_S3gf+OZ^@dTf(Ku{Tp20OW?n6
z)g#m^kwO!XP&aPZ^P1<Et&m#h0dHufZ7HZ;j8nF~G%Ijs@DokJk?VLHZe|Y1Si**)
z3Qwr$i%yDdlHZ({CyBang@nZ;s4c5$l9RQPo-(aZ_A=CtyBzPEQx%qqyD~~_{>N5+
zt6*3=waGBvr2p%@4Lo^R|FwB9!b2`2s7^Pnq@rs&ZP(fV#a32$v2DoUKHrJb4KjVh
zLP&dqRlG*h&mPwSEpC8SdhmZ+xO7d}gwT`e%P~6Aozm(w(tLO13zD;JK|k0C0^Pv@
zxF@?gytm!Ig%!+Xh5f3M2unPXejDFW^aDO1a|Bd!FEeQ52*ay8-xW{*4$rxT2guH`
zn%;d#a+?yJ{k*5lFoovK%H>4PKN^+5LEx+;88SP6I?RH18);%-<IQJ^+ZE>sC1dBL
z%ANau(zg5bz*2M#8lBM-pKm*U$RzN*wI)3EQ|!7Md*@3so-u^?I}n<T9or8>@1>Gg
zVPrrEUa4g8Qd+$G<VZ^iFL7`ge6v!ROt)N{ZHi{HY}8fxQ0QB;h=9X-JMf0|>Wqg*
zmzM3NS+b1fH=(aDAmZ|kmn4pFuD!~ltYV=|B3lD11{)FTDFP;;w}c)m*$_t>wE?wE
zv<Pk`Ou3E^?ANBmY)^au!kD~dt>bYtNN?0usyy1D-B=Gs-b1Guwx`si&s=$`?^mVg
zLqWE{46$=KCWF4+f={@~)^9^%F8t2Rg3UTxMh*kWRnPX&mukP^sV$F*6KoH#r4%N!
z)cY1(TRzRzWtJWwM-!X1z~d!eawWH?513AHrE#iVvnf-iu~VS)S9C>nIePmy46mwl
zvfa=SHeR+Nhq)HUahBId>SmJMGGTbmOvd9S@TTR7PRqC80dh7bBofbeDQnBZ!)sO2
zu_x1IfAFaJxALK?LB`l(@K{4rfV=g@)lN98z#0d?{*QeJ#UEXZ-r&VRi-4eC+lObI
zRe!|oYE3(lHZ`fNx$@6@Wyu_-zmmD_$8Av@%Ur8G5b!P*NqIYFjZ>`IYUPrs<a_fZ
zv6UH(kZCoqcowSuc+pa;*g{<qU#TNeUca6gBM#Hk;TrcLyiHqA@wlJWymNlAjkNen
z!g)dSNv4M}r&dcz1KigDL{glc&h=eJga`pZ0)x(ThE*fq<L|D`^~boA`B%9A47gJn
z@Oki|gl>~|pYQ#86>blMBR8?R(#?#WVA3^42pcx9Ly2p2@g)+4DQDfc_PazrQrnEB
zM2W3FznBua@6lq`b>Z`>QM{uaA<F}F%ySD!<io2|Pm`)Fm}F(lqP(eS2lzG_eILFn
zx?i!v!`!7VP4BvT*|nflOc)wP+Al@Js`>+9SDSRnq9wkx0(UG`LK#33GTm!c`iHVq
z_3*I(0tG(sEEq0T%?q|0g9qP&|C;6)|0rb7$tY_6hL0s)n~d|?RLM?Yirmmw*MK6K
zq{Tz-3?5cS<15tbDo<D0Z#xo&Ms0tgRpKgg?PWSTJntkjqaGq7ALIGV9*%K$_T2#=
z?-uOppT;d!NA|f1T(I?=bGtn($P(C0p<Vw=vav37>#|W;9lx4Mv@wrY$&mCnv0%AU
zM)j2K+gRz4E9Ck-Ug@(s3gB^`zrQz_zj)L<#yzA>#~9X@yeLHieK#>EzGfb%h!6DZ
zmKR;o(l81B!kqyY*G9T6s!U#4$HZZjagUla0vdB2>?8;&Iq67ADSVNwTd($G*B#nn
zsQ{<afHy@4^v<S^0X=3Hb@g1e4u{3xSmJr%4#&Xm^}i3DzIuAH7BnR-{06L->3+WC
z#oug_be@m}h6HQjyoi(f*dEhHlzT^Gc>{N5fN8{l7Jz_`6Pi9m;;Zy~a4*=tQR34X
z(I06-kI&;e_jvLj)TT5Ba5uqJ7OG*R9!Sbn|01TJ)e_N@dOLJWgBS~N>a4Oe^<3dz
z_wg{mScK7wn{4Na&hOqz$-qN>#ICBcTl(8!7|Aw)MP#LgYt^MA8lBX=Nhh0+0ow^w
zgH*81bME?=!<@f8%;i5Co6`z5v{Z(yUn1LFl1zbp(voyPf^HPgiM97eLwhFWZX^d-
zTPvQtwQWqdB9VRIXV>kKmkbUFzDbA==0QD^aZ4gCcm70MlN-R4%p6}xyRNcS?~aLs
zgtf7;*qOWY=!Lz`rFYcYvu{rEOU9-9Cd8}`$+V*K4Fq`P1}&cY9xnEV9R0qqUryg=
zT|J6UYztXcVPhWIB<-A1Z&<Ze*p%_2Rw^^_V}zHs+=l$Nq`(TltskvM|8PRThgOSt
z3P!j3#Od_2%Wk|3=++91qazr$x6s>|^XKVhi#VD<`+Ikf2ou%8W$=Zni>*)oe@UcQ
z*&MWsxOp%f-)l1$!P4$r-t3G%?E6a}8}h<FCW+oWUu@Ia)(YOREW)YcO-3Pe)M5L=
zN+V@;^tE?iNDGx|ww_Qy^t#foR>;cy9!>*F+67GGf+bZooV2vM=FB8g$4)FjS_!Rn
zlXQ-2j@|jD=24|*TFszjfZA-y!mf2fm-GC<)`VCEmy5Sw4)iznPAs1w(<LwD!9e~A
zN^jxOIfxM7q}?F#2+DC_pGBf6sP4%uPI~ZO=i9ZZ4HZaco9|@`(6aQGgck-w+Y)Q3
zQSd=P#|O3y2F_=+|DMDAk5_})knqcL{QU+OJsofFhh4M|(h~7Kq)!gj9~em1qlPud
z5yMez?3F0bZg{{Cv!_;`S=yr|JwO=_-H#u_YW2Rtcej!fyib{zfh4>!)y($K7Lbt^
zo=JL8eS-J&wkxft$vve#Eq>*Uj(YMuh1;z*m*5V-f56^B{Siqck#JWSvD$2<$h0E7
z`m-hNDrIG!+jEY1ZozNwebHHO*6&eIuE}hb)8WrQtq#oHS>LcI5RUFO>(^2iKc82o
zur>B!ey!D0+UR#O1^$iL{6XZn_+zV!e|}O@4H`U#X&#u^AKcoz)YtmcL$dkpJB&Bm
z<Q`&-z43!?Rl2h1qohy|G5;bfmdttH;@J&QktA($6^`1e;QntfwG8zM#bLx#uEOWT
zIvS2Lj0y%~=6^|sm4^_qlm+rhO`)P`lBVjfigI#NCS5zzB$)?ipmHNG27i0^R?Uxd
zt*?(vl}iCL#AMTan~iypM@(ADsG5yS1{c7uNK#fFEE|o)#Rm*${&|$lLf!q%ZkVTl
za;~K5#O(r96gVqdKg?(BGI|l$^K>-NZdS|@f}l)tS84|&T8@1kHo-Os5D(9zY|bMm
zSGUx7zW?AnY=Hmyf4X@7eWg<cqlF?P%Cv?*xve0=?XS<J%M8)_SnrccZV+V%1%9gB
z$PXMGpIRHfLXNZzsn^bt{8+cgX!b}fQzjth)F`O<(X&`gcvz#;RL(Ab;IGp$e;((=
zyYa<$B}gvm+!1b~crYuSp14EgL<=Q^pY}9O9dg!2_+L*D_RKDk`?ZWx6zEm(!KuwJ
zMb@HT*bOPH9f*;PKJLYpc+qpRdO=cw89Krbz9cz>@Rl4unpn5$VH_g+wvLjaGeXA8
zob1r$cygvOKWd?;I&FY9r<W0j^983)#4qPU6-ayi161#3&e2#n+adkf)k5GV3(6ZW
zYSXKnq;-^XC8`EF`g=LkeU;Xao%ng-TP3O7M%vzf?naB{E&GJSJ8`_lPfdjG0imD&
z41}ch(JC#l)YywrsE^LoUFYfF{_Ya1!5Zzh#sP*uW(8EXc&b5J8J70xSz9wu>ev2R
z-A#@iO*uq$cb@fRyO_tXHSdt@YWep7Ue-AQRg`(<H1_(#DFjK_`Q8I5&wEAea#_Eh
z&T3TLxW)Y88dYs)t$kM-y;R0;5qm%8=Un_<&pEH${Jr&AM-sgWU#5UjI@8~85loZk
zuKAiL6gsL+_g;wB{(4HNiAjyXo48gqc!rIEz~E7mFRGv^;;R3$xE&vZ*E32fpAa@)
zDoezVbVJpy^BNST%*JsSA+qaXSgX^xOUvWq80t@eI|v@lG`Hm6jxqZvJV-15DRi3U
z%CAQrdE_lyj9fWRCEI^?#Hq3B6!NaJHMJ?X3|2IbT*QhNWi5?q`Jb&bc{zh^DvjTL
z;Y4st^^0v3W^#nt4ln4jraQS_SG!#oJHbTU`t2`rJM~Y|Q~fx(*G6$N+zbVCn%vth
zC)lkhyiTVbogK={E35X0q78o354UmtaDrIQz5k41!u8<ptlGhMp^cYjKbWJqPMN<X
zzrW#egMUONq*=}*kM5=b7evRh`IVNcQCU_#;9>&+a;X?O@@-29&Fd(sh=l$ItHA8?
zjrYn&7$|uzNJ7o@Fkr=mNiu2CC>VO@g&0Xl?MsCCbg{ej<sE>H_Q5|_rbWITj=1bh
z*wlDO5yS^>>~*<am3(M=AMOALWEaa}L?Bc$9FDnO`BP^2qfh^VbOq!@XbRYH*$pwa
zP);-({cb=7fAoyD(P^VIO8IA<GcCGa`(D+;SRBD68UL)rIfE`es`~4`!gJ^A2dSeB
zo1r#TNqqZH_l&mTR333^by^mROhFPsBgtWUF;%KD{#6}HqJ-?cWCN_!7&fnc<lea@
zTXSlD-LF_Lt(5$<Rz|{{ThANs9ut%fCJ|NJlPwPoa}RFBPKocF%fxT8W2rp$h@(le
zjBTrpvlq&kKZGxC3Vgv!cUx`>V(8P{+z>Kl)7^g(dm9p1Dr6VQD)vLxz}Z_shPXMi
z)~2jMNX31*@o(p`fqgLxW`1P;b1S+^Q#_XmG=p!d7=sjER7V(A!_>yj<WDoI;b^Il
zw<^K8I4Ph$I1d@*O25{xdM*Fv>f~LI_(1Av-j$%>wS?C<v_eeWJy|yzG#wvm(w4BY
zGKe)KetrtnF79pnTgP}U3bzyNg?|JE>jr9wRHy6kl!P!V0BdffC|>##hGE(iCs}6G
zSaj*ukgo*5nF56e-~W}p(1~<&PY?@c#aapm%RWvpL*H`tX9(HmxO!em{k*SOsXS-m
z;=-~X=Z=5j{z<u7s`8=NWCI_Q4X5vu;}awstVzUD72Ltbk5<&?8s6g;S<rRMqSx`6
z)gdk>xe>}5*e4g+Z0`K74Ag=z8ftij_yC353#Ne!XTTPYlv3Ij*oMz^U~V9>MY&KH
z{HywgKd1f0e#Tw*;IsBztd)=7MrcPe^k3~B2@Q0g_QpJkG_I*e*LE>`3e`Wf;O~Ay
z-bbds&e{#x&8eIB?}oI(x0fh`9~6Dj{pgz0F`v}Lw_A}jnvJMh6dk>kE=)jEDWZk_
z<&yd<qk_=BS?5)I45vuHG24owP5N%-P~roibJIxz(3o|jA6pBh?sI)QG}NJHrV952
zCYx;EV5NY-9z4gd9QD}@3tkOrS6$_uvMnF`Os_}sPAMq?A5^>rlvT&D-#k^0Q=&;$
zMK8c%ZUbELFXU}&JxA8uR}@wM>E&@=rk<t2xK9CEog<n~cP7njVMp<|CC|vo-kw}{
zVd;6W7kd)=R7OVpRire^TQs0WH?Z!OMA3bWMbYe%RY7&2W{|q=MYq|i?{(Yy)E{dP
zVS?$PMJ1#YtDl*raz5gu_TU4~K?PMVtRRURQae;o+typ%zSCQ<HJ7Iuzd#@AJ=$b<
zLL{eKZG?w~Ju&VKykL92rvU^!IC#c}_uRuQ!8}OUgMn7(IKkM_Uv}0>>b*SUQy^CC
zoDNNBJbmPx{txCMSCe*JxANi<_jjC7vvuT6e?z^G{Km06JPgXdw-z*(9RHH|Ra?Rj
zxT+!9ILUcd)t-!k$vjU$l{;HirH0GV8|Cw}3b4+y0VM0grkGhYa>nYBGr{hOl3_eI
zk792yl+&^o{0FQWEeyN%)=6<o4ls80B_#~6C}%4Kmz8*?J)+ounE`~VbBd&Q0=jC*
zB1FuaoeA)-d853$G6aMCc4rkAzPrhg=0}XPeD(}|Iz%OaHbuS@I2OU|V*}ozkeCWI
zBzeXEdJSx=%a2m?-1=lC-p-IUMIoMKp9J2poTXI+qPi^LJ{}=DV2MH<i?n+cad4#J
zh4}AnAlvRH*5hAq(*H0m`OkEdF(ooc5A+_?k@fzOE>>hE3Xw^{NbCn5GUwWwINMp~
zH{xnvIZV{MJ?retLx3&t+A*-Sn(irIQ+hP#zl8}45jr}p7AkN|8)GC6%U>xp$s`r~
zmR(ZtQOzavdz~_tQ~Cguk7YZ5jeWbCiWBPj1fSC#J)DV5m)PWb`Jw9ExQzcbFo&I&
zI~d-%vH%T+7+!$r(KoCk%R!`ld0OR6+J_=Tn8Rw-T?L-cw%BNiU-LtTG!s%~`Ay}J
zVIy-KxR~I$<OF_MVzr@T9_^8BjA}=Ut*h=R>{kmLoc;bbU#ws=;VueoQkGNbBBJoy
z>(94rVF-~teXW_EtFUq-@ox$Uj{4XBy?Kcg*~y3w>EIB)f*AHM$*(1s&Q-TwIe&D+
zUK$oBZFzObfSaBaFK$Cgh9|%A)E8EN3!32lysns4e-ZDP@Bm!?ax@K7I8pq0U3Oh$
zsz5673vWLQ058*4WJ+~gy1g>u*OP`gm%W@CWxdm6CBm1zU-wIeM7T0cZ>7vfs!J&y
zZM_zE%gs~_Gz=(OXn%L;Y}~ymmKqvx?vX7IGmVapH}P(tdSU%%E2ZHVcd}>ej^ReK
z-_-)aQD1O(UitSj&mlix<tqN)5)FCFptM+=8p=kc>1hCqNT?CVi5js@Rj~&V0E$IA
zIxjrUVZSjK!+Vo+=*x{S>pmo`rAxSldian`PZxw~IfB<3&2N~FZ-{KxBV=fg0JPb>
z?j`jte{iR{-I^d3V>ck)N>4t2>$ZakfX<b?!rX>-`YO?n&F*#5>zcMwNz3AT7~A8^
zJgBeoL?6SMA!GM6Td1#?U1<Ks8McQmT)fzpzhkDfE6|Uyu-r3mcFwk2b^ftbmQ-)o
zQ>f$aR1WS9c0T<)?0qIErNv${5@@01R1S^qRV6Nfz(d2kZd8|w>tC7UIi=DkODf__
zGoUMrfI6XyXka>VR@w}?sPHNMT2TanbKaL3@=zq>`OsQYtqUGH+neYzNOc<Z%F3Q|
zty%J2EQht|=IY$mmMHFe$!Ee<mb9NV0_;RX<4roQMH_}qJ7lOI%OP5UGt^Xe!8%dK
zuS%);5%Mao|HhBm|08~k(;aV8aV=bz4@jb3w$xeMF~{VtI+x-rj`*liMU0m@2xu#e
z0<jcD4*`eob7#}l^vVZ+?ixFiT~pY)%JYYb_-z@P$F;$`DwELyJK0SJG;YzZj+@_M
zDI<7v7g9~;nONRu;NMc0dLM?_7l9`yTYB307OV<nYjVu>C*Vq3@gdIoE9dRy?yGL!
zI8cbprl&Q%-VX$>DIY1xo&2%6BtLim2(+FMlL6c#ywx)XbzX>-{`^(bMgh3B#3hoj
zzA3ngaa*COs(WDvUDKV4>9wdU52>G!k$}$9G&=y$ypk6VL{Fu1W2}-cT8<k1+)I_y
zt6F%w?F~0Rbxow2CLE~O9$Tc_l%-pDxVPN#7soha-HZJj*C_Lhh`+;zKC9*Rii}uA
zb9PjT^WVO>_SvHuDGroO>o#F0(5j~Bm-UoVQ%p@n_ryCF51!<5#(Ib&=s+@Cj7MN&
zU;qbhk)F4}CcMCg6y1m)8s(li0&@k7)|^H`qH~AqJqfee9HhevONETN@??JV&cOyd
zR-Vwa#$uQ;-VMUsUKGUxSP=RJ7$Tf)_5FWC1PjzYyYp1pGLJ*u*SFTO_zGXxpNs;H
zcx#W5q_U!a!a5&Sl*u$o3{O&HrFpu_15mZAZs9}Zj<e<2mqi}-A7Ajmp_TCqNRd%F
zd;!4W1WJC$#rtC6D_9d>ouavU(&QweL$266iprzPy5Lene8(ysw~kV$WOY4wiGf&j
zCLKxF45wZhTU6*j%p_a^uu-(qI$!0>s+YZDrFo%@YQH)EMElUXk!`^avT^dT4@-Bo
zhQjVE$d^}X*(0>)?1(bAFhi<1u0W<LoCwgEg>0Q!9u3yqbZWV8-}WB1WW0X-CwbgE
zDKc!%kqcIVbawy9hc;W5s<&oQay?7od0p9l=qd4}hBmPBD1k@@&ZNV8_FVW1bO>`B
z?`ODW@mKiI*}{=GYCM~>bD|_C*vFcf<2ulrFHm3eX*aZv`)~(`gNkE_iSvdK#><Ws
zD6Jz=5=AI1T`{YJSL3!jKMykY)e*y4La85wQcFURKvs|~bT;ob0Cb)SozX}heRQ|E
zarlR&nx`o9e&FLnxyK9&%L#uHsIj5_U8dX>1uJu3;DYYf_c|oB>TUGrnTS!#Y>Mrk
z-PAd32mWR`tjK0IbckFz?B!pQduwdeov0jvB+E0#V49<KoH|4vE|8+D5`sprysEF)
zly`hoClk*t+{4&<^%3m#t`TgO$)}UF<^o+E^efBeE^M+;tGLCYFD?@_jn75jFPr^K
z@=4@qvo~liipS-w|Cd{UQVT*mzp*;0{m@wCPLG7J^*cqw4{c;<I#y^#sx9A;Ue3YZ
zF4Si%)|xgKQB8jKmt?ws`3`mkt<G#2-tyu52cMu+c^=t|2g5j3uD>Le<p-8e!QcLp
z{M6}~#^1#$I-cvW%?;mCyA1D)_f4*OOkLP%)Kgkp4oO&1r5Ia{z06j4;jWS7!_^=!
zkpeEuXxL3wV(&C31}i5Iy|XAW2u=*C$aG&%uG-!YQt&JjEqPY*>HYQBk4a9iWCL{$
zgTL^pb)X?|ZS4W)+hzEzd91kF$EnwolAoT`-z(8%4OhK(Umv!}R&sOgDA4}TUlJ$f
zS*j^a2-c5#^-JO88OfiWw87!853Y(m-uoqX>#VQh<^I%`^rxohL8}YCNBU^)f%NqA
z(dw~5j*EONFP(3fRAf)V5&m7POutdby^rq!k!;1y=lIV_b9g;7-}q*$;i+J;<)TcZ
zc;{Gfeb_nZMBN8a>RA#Nvl%sj_hJ3{<tt3WjMKoQm{RjMb|#}@em=ALYpgAQNx)F2
zGYypcdmtxx8Rc`D;ahW0UVi5)MX2X`3&}-Q_hJ_2^4+=rhDjbMB6XzI)>f91+QXT^
zc&ThJ>nk6Is$=1{<*yD%RU;JFYwJz;`i<<Lff{`Ok|;O%^@(DY1UuI}4YC}KHaya2
zl43bo*?nA%_EeoE<bd0n8}7b>6bI5z)X5<ey3DVy#eFwp{KN_l{zd~d`P{ky?PNn<
z0SnjjU?ZXs;irN%=WjN7#?@b+u6_>X)x8Nw<T{cMwL<L4^u2myePi&WO778id11OC
zHb^7?a)1FWX3zd_q|m>~p?_WJn?zPMk!1dv+7ozXK0vCg(GYz+`*c#)+GBeuj8_u3
zw6fri^M3NHIf*BH(X2-{uAy+_cG#hKG+vC5gC~K}Dam1<cceR+E}2XUx!PZar;HYV
zCf=%>I?O0mA$>yPL2>|2$41G>uB^BR@F^!FkdGi=SMZN=E&a~j=}th%%_#iDS;3;B
z>eg#raK<q#&zt6mE$<}H|BOBRsR~Q(fKF3j!cg)c*z{wx?DH=vL-JR(#A(K&Z8_c`
z{Xb~8*R6InWQ`iQdX<i4#8ns*=E)KF2NqsL&4kAO>QdpJ!k^#=pQ-ckBx^Z7IWK*!
z>CI>8{&xQ#g;QR@6I@e9y!r`|j?rZ@icv~-V?M-5+9OZsRnoG^!umMn#*p#)<gX7Y
zg>ojbkyk(Lyt*6}zs@qQr;dbQsnLC$QYM&mxDse^PZJ>j)VUYhPYaFuq-5>&fQBwZ
z#{b+R3`6vd5F>&)TgG)CC$Hf~HC7A)DRYPu*-qmqRH|vEZZycXkt^M3@%Lyd-&97C
zwy>UCFSqZKP1^o1l>lU{vg2(LwFUZ!JXdWOXxW2&e(zYD<3L}Ab?=>xMG<UyId;5Z
zcgj{MYil1Bmq;a=pg&Gnbw;&3zI^V{bK6_5O>1>eIAy6GDi@Eg&7vhx&(`U*YICg$
zo7Vj<6XKctetxi_qYCAdVOv@~ip`)7zyG4XkshSGb|14AMAM^xu4;Y^`MP6i286FN
z8>315-w8VU4kmsXvU?K(#zL26l+LzJLB~@J{%2hZpuv}^;8I<vUFw)KZVU^{&a%4f
zBz{>@zKXT~DC;y)a2DNvU|97}9z8qxaYM2Nm(u2@X78+7J(~%8nxECclAdFVE;zbR
zTdPm|0QR$+pO34WoR2HfN^kV1A*;H~l}aCK2D7;<f>rWF4P~ZFZI)&HUW{`POA&=!
z1;$mxS9vwF+WN!+Rkg_5SLc+D7(j~_mcPq92ly{IUvRP{5s~@B|NCs~KTIE0|DO;o
z?~M3b&mYN{NDk$?Ch}PnPdgAPpyq-FX{VWa%$aUJ<BSL(!5_47KKNWOa{wN1Vm`IX
zRqwdl=^yXcSfIVDl=K6wEJ?@CCw?&&P5hk7RiMh5_qzs@D!A0gEFx?#a274*BXZY#
z*MthksodH`-7{God<)fR0c25bV71##M$BB2YkMgzlumNQhdjhWNZ|okcO3hU68rHX
zkavn{p6&>^l~VJ(2Y>5=6?IdgJn$?vmdQ5X_tHhwtd0C2Ugq<|2d5++*P*)WO^WF#
zB9qLkhHyuHupOUB<{_@u_p<7uRDLzf&fC&B*22WFm^hvZ<(~1?=zXUUxq{q>=3}A#
z_2|>3*>mgL>yPwisM%3G>wc{djuCs4MvooS66bb)cDY{@sF7KH@+s^OX)B!&-2qox
z&%k)O+|bECM~SarF95;o^-rTM!?-8@zG%>uttV<xJ(PN@-aEY=38O9Qa0Gyzs(dxs
zV}i-NL>tUBe-E>)SUv=1?;l{t&a2Uc$Vi^yv=N97%9TCceq_C8!V(t!3Q+Q4h93am
zN&fwXTO-~SDm@?|6-+q=@8LS6m-&u}nGS18SpCbXeOn0NHEN@F1HAxFpW!=cyPkmk
ziOQU(|2R~uNuTAz)FE5*1$=gw=#Ew*mu+`0mq89fB2Q~0yuEvEU>Zr_P-rm#?=EI8
zU;i^Xxx;X0J`9}P@@Onw75ymJk+jcrzLy;Ky6g$j@Up7h>Cd*D;sWOToX~!5z=r$b
z_a9c!2hPV~pm8v0!mR=z(@cB>29}JtU|Zlpuv&D=h1Yfnd=Z&me}Vi>m}^th$z)Zo
z!XZx1|B}GJTDAZ~oKPra<Y;-AR&^PEUniT>IGsSc5QP)!_qY=%7#aKSb{PJZ^JYUx
zb7xvreoY0spP5Yc*>euE2cdFz!5>gPcdh)*#0tKwSpEvENQ%LJ$7kTOGbT7OEC30T
z+bP5Jh2`0(rs|~F4)-Q(v7*L)&y}tVG6XP1WZmpx8ElOLr55u~IGe~x<~(3rSyb;U
z)2@z0&=JGlh*nn#N(x$#`5Q#1fKyVZmWS6UE$<iJXnrMW>)2L@G}s@SjOMMV%EC_(
z$|20^DCF8E(#D$4IY@R~aOiP4ZD$?Dewl^x@{Oj&mJ>pY5-EE$JJl2zwSfFuj`h*}
z`C4<{^w@%gRRfk4!a#*U*+OsnXSR>6xL^>H><b2N@0PSp6hVZB!)C!t*i_c_&O57=
z-&5uY>-WM5dP~Gb)60c-<OFprpIaL22A-R);Xv)wSyFfV+AVO}#HA*V9ovar!g!*9
zWJxw>XYkKHY1@?cZjQ$J5&>_*GN3%m+kBWM{>qjNt&!n8Dh4c2J8IY^CSGl0N+Da+
zk7|PXJN(xjADvD)0xaU4z@SbiO#vb<T*y<QJnLn&m<~%0+0#LDc&hSpK9q(1)tF$@
z?Ufx&Z}9gM1A*}u)Q8`B2!(p<9v)FMrI!LR*rfDZ8<Hpd4Rxg|P>IDvFoclgo{NRu
zw(*fk5H0vn^ga0;=1^`!JZNm>+@hYPSd-l90Y}wdE`A#m%C)A8XiOx=x!szsW3gHP
z)q|!n*P;bo07JPR&;VLe)c>ouL8e!7h#9d(6{AgkOOx~&gVWvWhe?{f*J+J<-YZZP
z<6oiEV7dO<98$SIG&f$9d6c#Z4g~QR+W{xZ?g;WPC5d@2h4quwxl4h)scBBK+=!Ng
zAHkYOJLhhbR*rnG*^$P?6IG29q9Q&mz=j@}I#YaAI~IBBt4l-2^`q}GBgs~w7wG~D
z)Tszs-fX#4ja#cr>-A8m#Km4!P_S=UQ|4{GWZCk@d?&;W#X;$i9gT(cjepAZ7L7OM
zAv4Dp5n~loBAmhbYuCI)PdwO2uwyrVT*2zE?Nhz2c}q!OL2hpHoB6sO&u97jyP_vo
zv*71$G;L+tW~mz&g-^|P>k<5~^_)!&!Tg(XDm8FzXNiNOM+e;aw=rlC^;bI?e64)m
zSy^R)$&4bup7|i3e$X~zqrrI}Q3f7qc+}{)swE{qNcFD1=8LU`_wX;W&mjqtZcQ&;
z1uU?PklDzHd<)H|O>dTeESACi-CHlT=kaR^8<u&??w<nv5>A1svPwG5OS0^%9;oE$
z%LLD!WB5p&K&MZR6`N-E$Yq87^&`EsvD&!;p(6U`WLJSbuKI*QKpnA_{Mw{j$868U
zWvS6fz8zpzOBTh<Eb=pJ65Uf4;JqSqdn*U$9mv^^F0`}Y@gunJETXJ@RIP}+m94L9
z4Twmz-vgH;fcUNZnkzl9@BZ>#HHo}ZV_NwOYV@0L)&|3Aw#jv-8VSzl5SlImnB^NL
z!+EO#Uw)!Yw`wk3+g4w`I!0jM@?P!(m~BI4X<%*tm$~?Vy27mp2~g`eC<cMfo7+Ah
zz*Y#7e|9r6V?WVVK6{Dm;UHOl$7&oe{*i}9lT5>~RAa=fDp-oCP&nUFFK!*9`U~2|
zscc6v%oc3SepeEabepE#t=B<q^%A{i3#d`-BttsUC$v|UrIfY82joPFKK*!cY)~E$
z6#nSz3X)`2=wotzJ4Yzj<;K?pw74ixjKdwJ=p1a$at80HnjbY85;mNz_Kn_8-#xeA
zzj$QEl1E-#bmX?=N3@`@+IhcqBgKM!^-4)hd_Y^l2?1$}uGii7?yq+~fbJ8kL*NA1
zWa+IQ4+ru1*3pgUn508vX;ti#d}+bJH-=E<xwCv}GQ}UqxxjD8e*JRXo%xzMG-&Kq
z{a&(mXFbhVk|Qlg0@>DUdWlI7Wbg!U@_@z|&3N)FL9J)!PeO*YUYv-ZDZekL6JvcA
zz+-T(G(q_v!)IH`^;>7<wD-hPdi2@eC>=@kq|{noe{o`cNc1U*8{pRKSbXKRnHxZH
zR+yZetm2Rw@2LKd1KK8n4hF{-X``BvnX44Pg+SNh4o;*<6-4y>6{wo!rgHpYnCz+=
zbxu@vO0C!$g-}l~gHogFK*!t$JbD#+*WWvA%%no<P6qDQiBA7~+y?yRsHOchYKg>?
z?7qbD080@WW_BYIh7?fM?yX%9%=WhA$JT$tsxW{p3p)GTLZ_p+hQxo9T}5*i!zlsR
z^Y9&inAg&-xC#ZdpI7-E?0Ic6HraduDuo7*rs?VBb=1j2AvJ(#L1k|A7~-5uxZT12
z@>5rL1vCbE&IhSVLY9ATg6WUp^>MFu>1Pt!eMhDXb&hPWsNZbuCR<?Hs_^c~9nNvK
ze9(+$T{CyC)nCvUFcTA%8Ga+ji($C(V+1^CuGM35&Y%O6?$-X^Jnrrwq9&jcI7_rY
z)uHk*Qg8QI@B5ipi19*f66}t6c7g!F4t^B!MAuN{Y|cV~UFo$MuCyO|R!Z|4oiY|$
zBd{KZS55m%vIp8R=Um~T@l1|IeX%DOAzB)obGYs9WoY8bRa?>0QCZ6ECAwW!v?cIg
z)Pa`qv#P{Jhix~wRb_(De013SS($}u_xsxoPWPZKgqF~s<2pYUz89yh1{m`f_lqb0
zAsr_OjN)(Mc(9pWdOeSjOb))<xv}XCj%teGPuAJ(<|dIp>g2Z<64&vFlc**`!}1m;
zU?63AiagjcIg7$?bmb^*)`zAFA_Ntsr&f!JK_WO`Y4AV*;;=~R+e~93Z0;L-gb$Gf
zIKSzj?{UFiRo+k2qb#3TW5|0O8=80h3%jA7;@&ot6fy|8k7C*Jq|4g{MTJ%^CO+zz
z;{QTU706-}I0XfdFi=+APScHNrqGbs>2YY|?F!^d0&_t~zt~U5h5)S#vfAzeTJ_Sq
zbFTQA4H%U&FeIVID%<u%MPfFNPjKs94eSo}3scyroh<S*-ZkSyBX07N$0Q6Sf#jwF
z&0IAKrm6OZi*CSaN?-inOOdiL&|r*YVP2g&;0!957A5wh99;SDHgas;&Wz;zwS;T3
zL6hf*Id3^(MCQ)hF~qwBc$2$pK`%?9xuwvaOHbcPXR^!#za>tn9D0^e#TU-ETwLO;
zZ9qKpJ~kin&JivxKHG$b{RDvEHVempX%iuvUP8UhOba8DYc6j*xtaO0?&a`X!WXlI
zE)~cUXq60qx8g=Rkz8kaGS)^VZ(iK{57h24(FSxtfXev;%pNDud$_B7X^^lZa_fkY
z2qLxspjrL>KY;VsrH8JH*yg6Ze88i|#D~$`!iQVr(KpnAP!T;>Zr@wr7ye5Jb7*bl
zNH8gaLWY~7*#9%D+_gh%_p`y+)4LnqyL;fAiCr*axF-y~wwhmS`qneg(aBZfjOXYZ
z&%3&P^OI9@c~IIEswX-aL?tlHZ<s4EQds{0m%p$9^jN7kLVbJAkdaOD!mNE9<b4xP
zN&x{<)d{`aP<ens0B6FyLVf^Kop$uq1-uOU(HC6b^e&%Ay#lq=<HvwIt)GRcY|77$
z<;Em%v0X=Nf9p91fzW2fZMczc<CD{`(BALJ#wLCr?)EQa`Vf<l`QzFZ>kH~m$?KiQ
zLs}IrMB^oN{TUtbmvY(f)1gHUnsE=2AABeh=<!lD_9Mbsy-??SBobRdiYB!Gk<t<$
zhLhb@-zLs3F3A?8dQB|xvuZBgvAKQQw30-t0-J!k&|AqS?xjJ+jNL|wj7x3$RTmJ2
z^?d>AGRvd{b>T&=cOmRP?*LTnyvvS#1XQ)3irm{(x;<HRx1)@`#`p#cbA-8C{h7mr
zj`GRy_<I-P1oEd#!*fgj<mK=i&d*GP$~nk}-9(cn*Rbh7X2#!{$CLG6+7<Y$4B9Q8
zyh_qE+q;$Kkk6tZ#(5>*KKI&HEa;dO;7V7N?~K4>icm_h+iHs3efY7xGK-38<pTbx
z{w9@Pr;!c=Y&tmlo@puEa#k`({4D-|d?G*_!`<$ms_@3x+;ZphfqhAeGzY_k%R1G5
zCdf@%2;CW`8E0S;tV4*wYn9Co)HDo8acUuJuWm*Me2U>C_TLb^xijn|iJA!6idO)s
z$`+-spbTY;`-BlwX0;4URdIzG+jC}lYxqYiv-P&GT!RgSjKna3r-%X(gQNQ}8`x_g
zJof@c_$F}hmjugGyH5aG$92M)yOp?Om~xOe&A8tVHs`icDz0;w5hlR;V1CFjl-v$G
zf_7K?ll#oIAVtf+LneMf_H&Y)O>o{q1SIR9#tK&!sZW4xGW>;ZgvwPC(rHrgpO$N=
zSkdW>uZNGM<9PbDao&g{V4>488v-q`$M`pQCbhr*sQb)x{Jdx>Rgs?1G=@y78MxS6
zi?sxGvFa1xe367p4Ky;A{mMI7j8Ts3AF04H^%~~79-gp>&(M48TjRc*pF?Uf?_>J0
zsa{YdC7{QQ|3DCkh93ntpS*R1MnF_iISw1D>?U0^Cz>brgAhjaaFnv0;%f)+-7;Wm
z`=rk9xIMeKI<fhil`Mm-XqMOf!Myr`q)4Ri<wmt7H<0!F=W3cmZfZKu4~iUv%kF6w
z%yx)QrxNQgv}K|s(ClW_R9$BQX9;_N15$COWA6mqXnNksehUZ|P~OwuL*B4W)*W4!
zt@~+fZ)=`)<B9jfibICCAGQ5^m1Vn{=+Zpn8D9@PxX3HxS2-Rp5*p7{9Mds#%1PJm
z8Sy|eT4U|+?7gu|mr4##m*g)kvu8zE_$>~apXz*EVz5RlwvH+2(m2kJKc{9X-6-pk
zY5ypDdw79!C&x1tXC2H72GlQAe$my&E8><EQcaZ<=Pe`DN1PNrXu6qGYh8zio^T-W
zuAJe^)V}FBP=TVP>rL%eN7xUL8Lq?SkIzD5C5OZH3t*l88{_X{uSLH8p!wxO<{r(y
zDp`)q0$3ys{nC5o!1SGeHqKnobiG@}b;jyV#qwp4p}%~EvwYR!K%G6`TCqWCM<T|!
z5T<lb2XaI$Z7Vr7)2QX#?_&ee0i>zHOLR&cJB#e0jt?^R#Z+myES(vzx%|%c60c7M
z!2`GKA`Uhj4(XC!aGMyv`2omH;(XHO;&V}cYgW5b7}ui3ZPUudxz;{w!{d#`_t2kD
z7&v9gn*EkguVGg^mXWiWmg(wI96!-lo|nyrzs^m2cYTOH-uB+fBJ!t$^~$Fo+6MUr
z+WE5I$Fvl)$IJ@zGaP68pI;cYkoyPvaOw$5@(rh{7iKJ&6jf;pNj5JQ{V%%SI;zS4
z?;l4&KtWIh34tLg-3^<lw19Lo>5>NNO+=+ZT5?JwF&fFyDIhH!qkH6lvG?zK-{0To
zeD8Cg`}@bv4*Ub#uD!13^N|swiZt8Iz<K|_OEV33E^v<sS3Ylcpk^TIF18F$5LD*s
zWn!<tQ>)TY@c|ZmMr{<1qLt<*1Y)~+lLr%<gLf#}^5Loe{59JdyRFT>{B3z*XOH;#
z=3DFdcH~re8f%OW2WdEgJ|O|60u<p<ZcDEuMdx&pnreG>)4Om4vp{oObLGrJN^Zc%
zV+){FP_mR+vD}*e+vQW%6qJm+@=|ftpMM$i)ZfrU%Hf~d>hY9WDY*Zl$xmpnJ5PQl
zY4?FWm%gDEL0MwMAC4zYBBdWR%)DKACO;&!fLOVmlP5r;!+rczx=JZ41GWB|i!0so
z8%3Jf)VJ(R{i3vn9B7$v0b!x|qrs73Cke!L?#WMW(PYy{a`}x%Je=Psn}(EzO%EB=
zY9G<-y)*D!aO9~gDKl|+-{WGpLuM3)QD=D$OWQS*i~!KmM=JB_tY%K0(I4LUd#u50
z=J6~b%w_CT^t-T6=%Ea2OJKR@^-Vne*u2y)x>dz|e?%(BVBpB>XH&id(zr<x=ybhT
z;?y{q9rn&{%C&6Y3e?RPa_h>z%nuR{5D)MN&WfoTe%^nz91I1No9f`tc_UF1uHx#U
z5`}WpBH2jZRBLRuGq8?Fz>@qes7{aN*h|ze3ohAq5}FaYYXCE_;ODzFo@Q01E8VS>
z1WzH$wuos`3@C>mUqGo~qgTvdjm7kSO{~+?E3Ee-E;0~Zgm|#;Dl)DSAd;$5MDL5-
z)*5u{{Q8^`Wg(<Rw|m<AP!TTgp&T=B-mN-b5_K+y_?vrOyS0{)VWH{b4{AA@4h`b%
z<k!ppd|pdCS8|BbOMjy^qzDndkQA}d99!r+?!#3#J+`|_e4P`COV{yq)$x6Iqa>`Y
z%JE%WQLkm1{~cs{ARpD(j2LJ-{$0qFWoY#7&RqaW{LQ+CU(3!8WHkO_|7pNc20J@i
zcxOsOF?g?DBSA5OOs!qdhkg0pCbHS-__i1phdR$lyflzsIGrAIZHmvlK1VKLb+@Jw
zcJsB>k?|el`<RLAoJ)P&rq7f-W%sYd-sTMc7QXV#%14JsFAts5<_08!*}c=lqWSmR
zANyzB{L;ow0rrXr0c8L0FJkx|Mi{Ji)Q@&qG6K&crj_S7W;&}TNmlaRGR)2DF1;mw
zOMIW1=q6UooIYrY$!xIn_aMHQOyM3Q`OSDkxO96T9i9Q7wF0_>wf^Mq^rkb=qy%rF
zU$cH}pdFVOt$PB=`c={mh>;tLr@M}I;a173t*!@JV>-@#m(!hix``muB7WqTy>hp5
zx{x|*{^$#?@63RfO7J$U`=UNa8vk_Xu1y%x?Ch$lN9;=#m_7^oyE~9Y!t~^2_0dLl
zO8Wh>maH21`)Agqxf9F27bW^v9R(?yYiC{ziDL|G*+<Y06eSKE4e#L2pEDh}MeHd#
zJkz5PU-egt(Vlto&#0n<yR@`y9iSq*{hx99KhF>rxJyRhE_AlHcX0LoqW|Q&_b_Bq
zf<XXr`n%V{6#5e0{zmS;$BN+Wl2J<oxeM;4iwoSfTT_VD$jJW3(-pV2$jc3s%fJrL
zFBuW&AgB+QC=yy2RYHp!`8V*bY4|^Cyni)Bg!>3AD@WuG>gdgai1#2F=)RV~b2(78
z;=_@`C03NvE~Xw&`Y=}w?mF_2thCw@_NZCsZyS-haTf`57hWGo;vQz^>Q|T2Hs|h!
zxt&4sn|VTG)w#j7?7*af`~v{a{|}!9ZoMXX=E}ryloI(B?nXH~AphbC@-|MQk?NJ^
zki{|WO1RwIQzoV7iwge`*k5;KT$|$;2z6u8oe)AKooU6Kf9Z?$*V&bA$}HZ$vn~l^
zwffI+gRAwCr@Okv2{zS@zWO};>AR%h`dOXXS>DCXLogH8M@ILHb9%>+GpYRHn9jay
z{Nhz>JCHSgqJ3SV)B8l7wN%ac+cJTO>M3>dsoNgqwZsq_MGItr)3gzX<*I4?RHtXX
zTJ~mRyJHOzhAh63_c(N)rnLH%D-86d(zZyJY}1`39;-v;Q_7f?0<$c!a6(<C@Bl@J
z#{SdD6Ssqft+*nHZQN)3<%{<FPt=+<QjdGw1U*T?-UwkGN}ngX_*;qrOf>k9wrRnP
zg?E+jgc*F6Dnh<0ySO-kM0;Ro8)7*Nc&eK>xwv)~Isg<G{q<k8owpm}2>Oduz%DV)
zriMkEfFelw&%gcgmyGFsRJ4{Y-0Ufu@PxpycGN=CCw|#4jEs-jQdKi!>k15HdZs&O
z#~Nl>9h<%KFye`QWNrxMtaI+)f<bYwv4*RtkbwNDM6|Pd22l%?uP>%?_L8RacDa;g
zvO#-0qYYuV$Jv$m^*gMN7A<TD%pN<WUr-q<dp2Slspu;Y+58rOTSu-U9!W_s(xfZj
zeUVqFro9oBvhAfRwcxNfd1%|(d!}^@^XgZLU<zvuwUd%rRo%#|iL%C){iZoU>c)Fg
zy&qQr@DbmZD8NIx*wAj@%9VpWbBW2ww|i!CdCbd|;T}EK@`1so7H-$9Slb}(Ye}ry
z!h@&3Drw6?vwEy-`yt98&8(6!-s|hmy=-X^#d5`{$uYM&PWJ*aztHj!=hl;!S^e&i
zCfrTao@N{56TA)G@N?Vea$dvkEIyY?A5sP9$;Pcmp3I(U&3>GA!N);xr?G%a<uIWT
zK)#T?$VCzVljUucV1Er<&VyV&+uGQF=};9|#w_B|%r2Oh=K=K?%>r@cEi)hs2OG&L
z!gqv=AMQ#}-=`;6mG;P7TVu#wfy9i>Sj63j{dq6!*ce*S?j!Q7y7dkC<k4|7y6951
z?=rx!WXdpFjwT;_q8tdKG4A#y^lS<nqQ@Ug>VsDc{*@;8|0_)n?p2g~8KrSLQ#PFo
z5djPMophS~L(m;Z<EM<y3Y#%hoN{}&S33s?0@doi@P<8ng|@WUXiAlm9!O}aX3?5E
zA3HR~(1xC^B_E!mS4x<q7C+r$Pow$!Re_~6GH6m-+VEUF?%FO$s{V*;pKTRRIn&R7
zF=Jo*IM?yo@)5hIM~{FI$4os>+dk>-r!+f&6c8`9V+g9a=v?sav{!9`-&?5vbS5Q1
z7G?TNvF@!11tTG&M<!FAs1S=ynfKc&a;<w)ovXi07e3XdQ01lPAM!R>uG{Z1B4$)a
z-IPfsHk`lNP57mJHjz`ioE?)I@X-9Z{$9IFX?OOxyzJ*sNsr7iZJ>0zg?V=cABr*W
zLIFXL{KmNka7yzAu4oeHz*LuVX%g!PT_oU0x#NQ|b1)$7SWURQ?8O-0mv13Ka>s~S
zf#`t(a#BQqv%|i5B9~sLOF<w&oR(h&YKMNGX;G{fnk;R6JT!wywhxkw?5B2C?I~b(
zIo2<@EFNBXROO^z%9$&5Q&f^80~m+GFOPt8mLR94^IYI2@=S2Hvy0Zt++01+&tC1R
z>*18pAzv8L%LnYYv1{2cKHeo^0X2d_@OBB<Lo6}%Nsv5ei+e6{u*rI%-R_k590=BL
zL2TRQ_qxtC3CUd^RahclO8hdSw_PtL2?&B71tp!;9xb)^u5xwBlRZ~2Dm68mV5y*`
zDz^DB$ok!i<c*{~j`xhP2Xdr=`ot{s4Zt->I~~1%Ud}~z0`Qz9HR$6sDSSDJnc=9n
z3rV&MXfuKoe*W#w*?D8JFuTMvfAhmC-log-$35Y>xNkz6@)ds8-B_Lq=f&vKeV^%d
zc;8--U}PctX#z@leADfB<q8tZ4()UieW?<pTI83Z+sf1G($}Cu>rMzZa|&}xoQaCh
zuI<B_0*spMwupn4VB6Beoi{rU>SV!=V@8~y1u8{`8n_Pf#(6)0ch$0s{<HMSwcXA8
z>$tLP)sRg8;Dk%J3q%Z@8E&@(oI#6EL$$E^g=Lk^FFDG7i$s(i=rJ<I%7#t?NCD&9
zkA5;Yg;>;)eJ6scnI0U95!KRo(V^QS&CSsi(iQ(}-N+sZ`}9Bsk_H9v&OJ|vP28hx
z?t)#)l5Go?M=d{6C<Ppd6h#sqyl?0#J|VY9BzZu6<2~3#dEb<l86E|D?-#=3F*7|c
z<?ih2Od(cLzpsGX=VjU4JbuJvJ*(Lxe=@s?O-)~En`Sx8TKI6_*A;)T6PJ89%n{zs
zi5HUm2!_mK?XI5JfrV;?i?|5P+89)Z13a@m(G9j&ps{ppIafzB*iySC|4<Ntdx9|U
z#3q{(615J;0oc&(uAH|4Yq}}gGnIfo12Pa8v{zM?J(La=Z*eN4ZcsZft=*_EV|EUh
zblIH1d%JQKL$U<4uXRS!7i(gk?wEpN3;^R*O9_{wlD~98TG+ev$oP4s&v{Svl~lP1
z?(row(aO5^B@GAOv;11GYjzk8=E1@{Mcz&N$ihWgsG~%xZe9?^S-?Af!Kv^Q50#AN
zI0|bDzj8FXlmHn^-2EV6D5o6x71nJ*y#jMUR~~Ibtte+TZ!#_4Be11S2%oQ5+iphn
zK|W^UAV6T~skAVjT#o95vrRe0)~>q9c2>~)49+?04MY4{h_ZhX7E7@#8;)c@#ZIn5
z9`;yg-<0LB-Qf>QMwC!y91Zg(`#PGZzcBu`mf)#+CXR(Z4CWE4$ta9p1~}Fh{bMBV
zwxkPu4g)hsIro91f2Bug$@BuG^!tPjIF_*#5W&{lG+$q*hNB#VS=(*2C{5>gs6vjV
zne|?6Hb5j_Bbv5k*F2cnZ+_I?*shuLqsQNR9ha)nx19<o?+}zzMB<)=<6O`b>>Dw!
zo%2b~9bx+c=7IYG{fK&ZKsy-3)j3~P7^-jAItdNFnz@d+QhWs6F=Qy~&(O<uPRl46
zSpSRXO8*kcd;EXwP%iZ)ZU#$fS3yr1wBLNJGGVV%0j4XlZP$!)ChcIOW!-!=pN-=2
zD9-@)<T;q?y?hnzs4P_unx+7bo!(Jn$yUL-`qeq_q5U@8kc|bl>R^|Z^{MX7Fe(d%
z>I4&E+#ViJq=0q+aY4yJ6a3>aEHzTA;WF%jUw5Z5%?bm4Hs@Nb1-QyW94Q+$%6oM^
zd}(rA8{^qP`;2x~Zb6RZX2xu4!CC{nnZHxLZk_bAWm1&>c5Xc~+#6gSp)b|Rpc&`q
zpnqghpW0g!gp#lN4gyLsSKzr4%BfNl!@TN`AnVYku;qCKQU;}87S}12xP2#mb^ME|
zrj*|W4EGNK%|MGZv^-U6@bJL`6vO_R_PV&OU<<~K!pLK5kM^^M&f4jZm!q4kCqn)N
zPLEmK2b`|*xv%0@x65mzV)A6%gcK6TZ-Im2E-Hd~C&gSSMmp02OdKTY&fkyT{QVKl
z8mAkww21nC3<I^<h&_HH_~V}5^$Q-}O_!yNCJ6DcR3UAqfe*&V)*yIe?ix8h_;M3O
z1XQ{&mSP6cfhED5_m{+k<K@-}@gsere-9v<lY1~y0J!H}*1vCA&>QGU%DQ@bca6yN
z-kOVU_RgL6HHK9=xs@li9{NrPr@&-hwE3SE;D37t{~O5Fjg{#+St0o%ekqqXu_x|U
z(W6PIN?s{jaVF*P=Gx|JdJ}N)!rN_vp7<08i66mQT-bsWRVTP=vW5Kk<(&G~0)w-{
za1<!8^I7a8-@zYcXK?=aH!%B+X0@`|l$6n+)1}kPI{>(=;`e{?bQ-VDN<~QcJE~Le
z&j)PAgx)+x_TayH3(37SzsDfT@a4rD8X#7TY_+Gw)r+Y$>wZ-xVb_Hq%O-x|#`(+@
z#gAJ4(8~1(`~b&|Em=i`o0Ju#I>X_Xw)Dw(|JM&S*R2&&yzmK9Z#07MVoCs5-oMCy
z3PQLC?tC}^fL2fz|F~xN<B~n-dxdocBpSj1n1MM~-W@(X1DG5h?ib+Gh3~BcbEdnT
zscAQ<%hSYm4S2;cKQ+M=O^yC-8voW7@4CptzANk1rC!SWK~$r#IrPm7+4o-`-KnxX
zaP^oTjFXmE(ju_2iNpq;$KO<*9ma{sf5~Pam6M`;EUczJkIvOZKWSLwT43~H^YMp9
z$Y~V$bs8hIF&gVbH)n&76CCH8DzCB^7w%{yR|TXnZlbH*e4O-wjG~L&*JIG7RJVf?
zfw&B>tBX%K$u{@hSWeWTb9q@{<v#@X4sSM*&i}sU35eX!)P}YAK}6+n31;e8UbibJ
zn70EX#V`GW1J%C%wy#I3J@LD|<b&sDRj=cobQ%46TXp5?!!c*mSa+`K^X|mUZ0>G?
zWqwqTh7uVjIANiFh8k|`He>qc=6kckd6xQc)zMJW2lIuHFW=6Nb4Cp0M-)l8G6}CJ
z0i8JIjGy7F+VLP+7n-b?B&%nZ5${W#WN*DbLVX5U@;iR8akZneJOh*9Br7S)Xv;Tn
z9ZglP1XZqd6m%|=u+2!5r2Y8i5>nP{1a)CO%UfvJw3T|CN{9Kv6(wwTeBsc$9Ofpu
zsC6{s|9=F!z~1%d*8l&w<6^HGYB46T=fkn&5=hq0^ACYPy;}U`BWMbOs`*af5vn&G
z8Fq%jC+E+L@wL5GYte5zwdLuY`3(DMIE<=??{x}x-H`f~07-w-V&N){A+(#V^=61v
zLVi}Skcp4VZ*oviFd3mv7y(0aI|{kUBJ&xyXR=-#3niQ_3?|jU3nas4XDh~~&_3eT
zDYNyhO2C@#WzE7lPnA2xv=!m>hzYJO-@V;yO+UddJo%qf!F|8e1!t{_`kI81Hz2$C
zENi`v$Cprcn>*~w(IFPy7EhL;p^H>EMG(7@yJmhJ?F@U4+9u^cTp!E+%6_;mG2Py`
z@Fb}8-nHSKG8`v%<XZUP7gic+hDCLb+T47xIa!bJmpW!ljD@d9*<`DQK<A|d*oThI
zh?!dj@18}AeljUm2pkaLE_c%L4RBh&D?T-7eJM#3zur(TQp*Tt`)jD1;9hc6m)%@T
zRW)t<tZ?kmYX;?*K`Gl&N6TW<?o)yJBZAn;Y1XN(g<zl2AY+5LJ#vFyAK|VHauNg&
z_l^1=_;`H9)G<lT%T$hRjZk8zRWUk*E7#Y~iP3uoopC=w-(ajL(`CU2A{6YMKk4{v
znAA}ot;WEzMpcvDXwHe8z*%>JNrsR4OuMWq67^;+uFU?7Yeh;%jzSI6ih;FzJ8a|M
zD}ba6ab)N=b2W=r7>BWKISmH7@da2E<UX;92&K!HJJSEsIsJ6HVD@tJ+!SMujP{08
zVQIU^?$1ufbT4MVew)8O;bbQ?Ecx_n>+f7za;68w^mI2O*o&iV%2+6g+{+b>7mA;p
zC1R*`st%0oh~#u0&htga#A1<sG#b{+Zrz`*ZJ@T(K^#9zl3ZVT%?*%V#q>!Z@Sc)?
zd^1!*)u9K&iAMq4)P+HS<M>Oom%rYC$7r`MmMbq+c|p9oaoVCWsk~WiTEc2qnfV=*
zwSwenp1n62{f_EiI<`BB&i0P_0s)V)xs-AIE7jO;GK+8gvt%w7H2)B2)o$s+e?MNi
zMFL##lEYn6@><c0AjthzZYQ#p<l~wcZR?4eKcinF1D|}Te{j1j+5LOYS5Lwog>}E)
zFRPv1&vbPJ-HQ4S@Lob-uO`u;F_j$xqL7oU`D%KK$D&(y^cv*D)Vo&8Yv?kW$_uCQ
zM#E~8U1zmf)xhdcxn6P<&T`Z{y6yc6gQ{bxElT;#`97P>E><^Nnj*l;{SYV9ICcu?
zs3=kAy<t<jR8DW6-pwZFh{pY?Ga{Fw@8dCgpe1Yiv{s8?$Bi?~-I}s-AwT@!%evCo
zs7(A9BI*U|P`M8~GWvsG#)7=O*i*J0SN1g>CBOZSZVc*5km!*}yfwVk+5<K_h~_Dt
zyC2)LOl35(q#daLagrb9e`mURyWDhPfI6q#bG2%ZQyTTtBF3kMzU9e31chGkD@Ap+
zDeWv{SplaS9PNrJg2}H}byO@E^=`_|vNH6&$A;Ppt<-)Bjh5~>*XUvPoVG?n@5kqF
z+d~LScyGVtCVJ5JRe^|#RD2pgWEiG(lY=9adV@C%uQ{I$T-|`2q+b+Th<@MIPH>vm
z_?M!&(LULw<nftg7<F76l4qFKn^uC=;>fYwIrx#K`|sq0myOa~_<FW&z0w3Wq?ULQ
zhr&!Uw>}6F&`ggi!kBdON>qnk_jhKlPM<P(yosz*P^8wQ2Eac8niDP@jqI{DP8&;<
z%`!&Kq}(nde?XymZFtN64T(n;w@9GUV(saI;bcuyu&;VU)#R$`z96duXGH5>cv}hg
zIZM2`AEQ}?RJh?64E$(!qei;Y&jI~xIJ(Xm|Kz}a6%=-My>xH4E&vfGU|T&QcA2m`
z>o?3y|KN+ke+AgK<A)H8Wq9eJ+oN*U!E44(O6(;E%!uk!IBk*=v<4cKED<X#>9}v$
z$)nc8WmI=?V8`rVH;7V`17SsP252$%b%2bB(77el7T(Smq>+y^!?K}4Uzp`9BhPzi
zB{>+$HHgs8f<-odoy0q2A4z$5PX%~7b$i~fM8?fHF2MOv;IJ<D1^<;w-A?tN0dRl#
z>uX3SjAsmIp9e7Up=Z4J4SFo7HilQYBJi=FLN6z;aI+Jrx5yayZM?`r{oY1n(z&o2
zL$5*I#P`~h9J$YF7K;7!JOhIt71~%LZyp?RPf>oXiNhMP{&7ZGM_nuL<$Z&VR`_RI
zz{B3}0qIG0M)#z|>Hm}$(!XB_D08`S2!;_pF{EB+m@s3wM&#mKG)#C}^GjwO{@nG6
z0xF35+x+v5rU97`Z{idlf^}Bzi`;SE)cu$;`uKEB)Kbf$w>C)0r@KLrg7ZYqMMkTp
zkcMmf$q6LEetHeZpXotZXBt{;I4tBMXlydLF+3KD8FD+%%Q|jl9LtF#32XS;y3(s>
zW{tuyplg?+v)QI`-3=23AGxKD_c0F$$9YFKoH6qk+uR+-=ID>--y-0ia+Q}2$Efru
zh)F*z7N?tLVl-kO7I%?9B$yB;aw*NW73|#A2oJNSL|V+5@CGkQJkM|aaCUfU__Q29
z3T7V5b=%wgHNioy$ETePE#K&6@~7Y49`a*an6JMO6hE!f5DODH7PhnAQC*wx!Pdb~
zb4DubzvzE1d?CLckO-9=1hrBE1G;4&KE|J!VjNI5@(xzjO5F^mL%pXj12kIzUC`Ah
zMKNAebBOfi$YEx{+pi=W1;5JRJ+t+Z1Pu5eKAxdidc4;4ng=4tz?=q~`oIG?f;=>)
z9OH_01ouu%Kv*IdN?g)SBI~j%Xr)B1{*Gp5>g;+mkh>yn0SyO~8OJ1(pQ-gEO`;#a
z%_!Cu{+kWw*=b*{{I60~HGR(WXx&6ugn1^m)SUdjr0FB=7i7Mi2~bHJ4Mp3+D1O_f
z*k|=$H=p#ifwC-G>H>@E+C|$If8N6T9BeSMrP&xo%QNmIcJln((P!gl3zR_9a68VW
zX1MT;%LP$BdMlbvv$HP#`04m)k$6e%n5KrZ#YNrKg7Ar%vTMUHZ-t#+nhgKdYtx``
z$=6GJ(UiZZ3<_5={?1qMdqSEiOpnEpE0iIvJebzrQSZR~diPQR0h0KKx;V9w7&K}y
z5jqY;Own}E1E#_Cv>Puwv_=ufy41o1RFY&34)*Ac&Qwp-OQC-YE*@3O@p2m$RC8M^
zX#O2FLT7Gf^PGwX9LjYfo+kf0RjYA*=I=?0G&1-oZ144VzHIp^y37Y}E6fPI+E2{9
z4kRec0+6Bj`+nAK<yC%O%C)iHE`xF(hD0*0c@7g$ifzd4Y|wIa;^)=e<mDCh{E6Vr
zT`qq}1|+NcK<4+!rDR;UCNdFrI3S}K6n|VF+Dd{g?0pJ)IVPak9AadqDfcZh@gf=V
zr1!c(eAj~PCUU>*TpfL*alk0VNhVw;Lh{D^i;A50y?`ss!Gz%?k0Xf95M)p=%SO@x
zA(w+JD?+iaPBUaxsUL0<EeVqpiAb2my_-6t*3Z;QOl-0QriuSfZv0Dd{BI9JK!1tl
zqwVVRcKehKIx4-7Pb=F2xqP)|QMmn8;gn-N4s^hPJ4F70V)?>CT*_PN{j|DzdHpSa
zN~E=~pwqb$5kKnCH)t#@*l?GT^Ly2n=b<kgfUuI|hJGdYqw|MqlUw-l|LW~qzaHI=
z<b}5{ku@u#&p}7wj<ofa35G*@bEGUbp=_47&N+VplqW6E4?aza8prY2<IE(h7E?#T
z=H0esVdE@^l_T$bo!nXBs3b&=YTa9H3(WG!U#NbocWQ5XZP_PdC862kYy(hFek=_g
zIe`);Az1nxeg{7SraWR}PX|W<shRIAJ$y1E2n(N1!(K@E58T;g4h0Q0yPQ@p!)P{g
zKNR!1*ya}tdO&2VYD`WF_bIn5D6RxBsZ__4y&Y0AT4jd*wd<Oaj+Yv3m)5|hu(!K9
z5?&p*w;*Er#3}?o8B|8Z{y>FCZT%!}g@m5I$9WFSjPEj$h+`DRDKsMt8VS<)SHK$e
z+#egR_rI`*-4P8rj`cdAHpC#AgL$#jQI|!wXOHiNj;F+77*2lP>|S=xT!E(MAD&t-
z;~%XfV+t_IlL&EBPp);<z`A3E><Q1-6$c{Bdu#8MOx>B+sJe=d?O{)Yw3I&GS^}(~
z3>etO43WAtI{GE51D?Jvb^lcmRX>malB<->{4igQM_8?&!zR@x<zG2w-ya@lvb$%d
zePuO$b`8JMJ}OhIn6Fv?;fYl}M`~f|0YQ2bIzl~z7rqxmUjal{X(zMmF4FYaUPTLD
zzi1uB-oNI>LYJ9x`oFiiouz10?fCML55@OId&dNaN$r@R-YhTO^MdhtAf>JuWv@L=
zYH#|s)_3!Qu0XfCnOSW!W%AXi&TFS<qGtXf2rw1YfX1(aszJC%n6cISUg-H}H1GS}
z@`x7_+liNBh5sRV-Q$H*{)s3n+eg>btCT$iBxt1Jku=)R+XyH=P)uC$UTuE<rOdyK
z6P&ENdXMVnD$Tp@$Ez9(gyY|>QgHNoNU+VJ?E_tC@~oOuH!gyWfV1;-^W(q6$p3r<
z4PG1JJO23nUBD(TuW`GT$<mkC9KGlc(i9opyag&|%YF3s4BLwD2o^Xtb<w0RD}QR0
zpvXh*_t%HK@M~}ktky6C=X;YXCx|)dP{Q1)sG7R%5~^4CruqjH;d!!O&(xM^O|dkx
zz4^tFQ>?%A20efAbilxq7jBJsV=&6L1<Kn5M;<{VX*|F6-br8dPKh)f;H}0xAY1s6
zITkMgncD=m<?)Y|7WMXk#5)ZY?@?9#fS2RZS9Wdu?pOH!k5goE$ug1-$A{V6OIww_
z4^)b4MH$qLat?i|chJ-p8eq*RCLMU6>?1|uBz1VWbRWL_gP8~u{E2M!@*qgIs49fd
zSE#xjMJUVh9A>t8rz2a&&^|24QklKIHw^o+u%b(Q3_L!+5vBocDkwIteO6R6*Jw8H
z(kbp`_PRPVoq0CK9Lm+==rbBPdvK7bxm{x1I*oSQTLAbThIhx<64g&T5f6{uzTZr?
zu>}3df?ZupJRhNxo9@hJyUFq)qjnx4TvROT_$20xvgsuX9s}!!CQf*xIZ-d0h0!gP
zz3&UOh<6$6N|@(UY8q7Q;)3(g7~~Kr21<-`yOs@lJq>Ly3uI189jg)9J}`dFy8cz2
zAtYL0e*Ha=f}+~xXWz-p!aYKxd=X==pEs83GKq#<`>RbsWDp<^S|YXd4ucDt@qB$r
z=G~gy=@qB76clxk@p4qVOQ63u19|aBj3j2f#4A>+eSh0LCFEMW$8v5pOGJT%aI@y^
z%K%)tk}Pl^KfOSTkHejAG>`w71SySqk32A0-t@erO6;Lnz89O76znv8e)u8C0=}vp
z3}4AedHb06Z}ilDKVF>A@aJ(aA;~lw+QW=+Rrz+k?iW$}$Q;!Af>J}mUatPY<9efE
zkzm%qQ>o4menUJ#s>yuf_CUoXci|xHKySkSIEjO1O1at3cXcETr%5n=_KfW-AcB-0
zNdl6C-`O7xd8@4|l7FJH{Hn|nd6{yt5?gW~IV%?8Qfl%j#G_!i{+Z@`O#Q}guhyDV
zY#3VJu+xS9Pc2!KPk4%+F=gtA*^f(2cbVA9&X?OyvW^yPLsx%ibrkxY=(-KPa4ZI@
zjLZ$b)Z*hBBp;0VwbBhG{g^eYr8@Qu)#QKg;g&mbRnU41fc5HN!;cpkrb@wVSjShY
zWy6iI;s;x<MDMJ>QJyDA80;tMPR0b2e20xOoTsjHmew^!tH7+btCT^9f&_8qg-Ge5
zRv~P4C#VM5F}HL#NTYs2tjOnntRVmU-!2?V<Ou>w9`PTAREeTb;;*^x1hfB=d>B=z
zQ^2YBWb|jHvoheRc5rO1QeK-kKMh#j12cGUub|Ccv!uCx&Z&A14yshI>^o~TV%x8&
zw;RWdFH=$E;u2%7KotC#;{~KS%75pT*W>3hx1L=fA0R7G_JRaEOVx+nOwR3Kfe4^4
zb!3&?Id6yde9PL%w`Wy)AhY?7WMefb$wmTQs}WR*;cA<PkmKN`SRl_d0j=DeFqx<Q
z`5W&8Cisly7v?U*GcIxu3+*ndVsa$CS`ApL(JT69#tmk$H-Yrtch0>;g=4KH`p}yG
zDOCCNK_dE3pY7Bw@D|iL2a43Cts0juU(5fceO%hA%YDzBZC;(vbzsh`p^uJ-F05x=
zN09QK!m2K+o$B&Jz+G&^fR^OJA)w+;<rW}b{Yy$=T!P3~7XIbXZ=m~Cp`Exj-N+_y
zQ}ww2W@OUm%m8K|!swhy!?T;t>$=+yj_O|o8&1qtVS+ahR$_<;Y!;3lvc%Qj2+r3M
zoBH41`Ti~w*GxrxW|<5wyS}#_pue;qvAkVo7rl+KP#5VA*hze4aQjVSM+21*S@j0k
z*eYBKDa<TfhhbL|3>Q-Ok`PKyqN8>EeCt(OTb2^Z;?ovJ3f`8RRPSOyg#H>!VmNLW
zrZa}HsH?qpsb&i2h_?beNIAr^Z!Ozr+89Q}bv)B9-NNtW<?_xMsn(p;MWtO_*zK2%
z$wxotAI|464m9y_8rqg(pEif`>|K6G=4UiqttjeWKQcz*)FO+9g9;&BT$#SQSAlx*
z7NrS9U&VAJUwM37((FCrV%j|>UFDl_DmTPJl&8m)*I=nUw3>QLA~?gJH$E0N69%v!
zs|$i^aj?Q-qqdg%;SxlFmi&FI9Rx$OXg=lQPNiCx4Ji!}MZ81G6o@dvoKW!h9cSG5
zf|L;0pDzRtT$t+!KUbHI-;59%6k;8Rp^?h^ivBGZ`_efIPfd9>l-zG!*71Dw=n<LO
zZAYJ!-q$_PYySFc)Vsu?;gZ3hK~uhCe-ndVY$XxKEPWK2EBwaY`sdR+w&)noHNTF(
z>yct<S0O;wyiSUzFk1@QJM}?k`9R|=I47xj7W03`=8z_<;xx5(%{B^Gv$RZ%X$>cU
zawG$aUH0DtUEa%?9&=+TsUcW0y#VvBK%4$g!1RsQfHciQTG&n*kz0cq)78I)YqpUG
z#d^+%YtV>s*z{5r7hn+r8HMR#?3(3dSD&#M!6seUUG1Z7kQsN^F^OM@7~k4_xZ{bc
zvhNrn#P{avHI3KC=ys<smei3nnS!d6sR=f%UJd)R+a&Z#y}7--kxz3(-#a!ERr$+4
zv`x2ADe)k4t;x7QztGhb<ePLu>t{-0ZiSiTF(}^o0g|V3`-gy2UfuYn2)V&0#w0Pd
z;Q8EfCjVW)vJ1h*0l0S18Nm>wT0XF$q3NvWNg6wS@l-wpB)OsCIbn#F9*HzFMwnZR
z@|4L>*!F{P#Ed0J5VHX}v6XkROL~LF1&sr8K)O&{;cwt3a6$VY1=}XyL3dhf-Z(HF
z-_!B>SHX52-mS=<5~4^w!(n77DpGXDMWeSpT$99IFgL?*O1-0$WVW%FYFM$u^SL)-
zuk0kP9`TBP1U*(=9t7-(;IA+N_;=)}2;{C*Q#jwzj5ivzQPh%tW`?VWuDr&+Tk0C7
z?{}@F+me1Cx(Ul(-@AF|{onpn-#j+wd=qbSX+25|v8Yut&bIsgdpOu=0|f=X(rH<N
z>Jq&#()URlQ0lF(5QV~*UtCVZ2DcWaT`7D({<U{51nmAHAS`dd$S2nWJ`c(qa(oM~
z%x=$~>f<*bWND>u?kKE`gw1Y79R-O3$6%4ETN*XFHOT^|2F3~0{4G`1M~M5xYEjMq
z-F{g4^lLprBye8~xJxbhTywrW^;zM|FJ9%=dMJ41&o79l^F$^!#0`j!FiS--XFBSC
zCO!Y;iof)Nq*t!jo7}7~2F=V9&cL*<^c`3dj#pUglv9k2cyd1eUOOeAC}{nDRKMrH
z62nN0p-ye=`gSX{&oWMG?elAZ$p<7Yak*IE_C~EwIW|W<8MbdhhyD+K(q4TPfiv7>
z@b<aW^9x#k_;K&eK4}p$T^00oBS<T9h$qm@+;}q8zaKSJ7o?0{3IRO;(nm~>l9R5~
zw{`duo}BVhS3bTqx(WeH>9*)qeR$48ALE_P*qZoev~2MVov_SeoG$jPblLMof(p?w
zv8L$6^xyiaH#_fjO`RT={xM@yWPc}8UMi$;&XNwqM>A7rtb}#ND8FnLummv2zwok`
z0}>w7j_tklMDLNku&-vc%p5Dl)($;qNA*QnweI7y%436wDX{&{;vGzLRrKXsXR%@{
zeiFQ3#f|G_F#9!Yx09b&Pdf5bR$Q3i0sl*;-;tit!D@`fa=tXFig1J<-+L~BGDan;
z(&7nIHU1v;(GoGZyVa3VP)AacRdJ7dV2`RQMDHMNH=FLJaOt-Z4*K@9aYOn{`0QQ$
z3J_6rZJ3L-UZsdZGYW<su4L;pMZellq5y0rZ!x=Lt!DYLN|A~V-%7aqSpF??xX6BV
zrHqkyIjAnY-aQ5#Ihd$VRw)a%r$>omD7de{?i>C?hJqU%lkon-7kGC!EfqDgzN>@C
z%khpSCOL(b&nmMkZp+W9mA%QnifG;cx^SHsed~`^3m4#0tZ|JhB}GpgxJbNj<CqRI
z21iR1g1=i&p$eq5dJm{qT}|{$<irinoo1Bqx)z6cqE%enCivNrU<B4HJ)c4Sp=$6?
zL$^)Jr~@4f`yblc#?ba+(}<ewm0uB!a+*w9e`0w|ZhmGN8GJ0(RPEkLpA=sZ#q1<)
zDrwR@(lHS`b-etZ6Uo!}zww*@_jd(@A`0~WGup~Jy#z3^3~j@}`&?YMzjk<m50S?&
z@KvNz^W@&Q9(NsjB4oCbM7gFPPIk4y9D1HL#!n^?XB+4@m?`QK@g!ErQ)`2Fr5f?*
zmo7_V&xT&2Y0+Rd6iY6hj|Dj=`FV6*uD2GZ>N)Ps`?CSWwpI!xVjXd0kYOYMu{Bye
zxv^nbsBXvO`}O`IK$o6fra|+7cTfUFxzDjODF3AQv35bCy<h#}D-n^JV?Iw73N(qs
ziE78bG$DI*XrCWY9}?F5r~+%KG+t_>@(KLTT`;pxxKjmYcl|vxF+s#1nS>Z8`;2uX
zJ-U4VxQ;V_Zd;8BJb{EjRuIPOTZpbqg$Vr<a(zBAzU10sa0@M8P%8urq4P6bPK^q3
ztibv{pRkQp`P9!BfBB#<jE<K*LvPcag|KhU!=iA-?XR*<_L^jjPZO_$-Vbh@7|}iv
zUxCeg572Z{uBzx**tZ-M)hen0!eDPUah&d5;T~)aa)jXS<)gQqoR91GGDS}{mT}qR
zzc+Ph#BpP5VR%9^^{ErbOcEe^#7>LYk3+lc^xGL=zo9?;hhU7pVfcM~1<2=(bF2P6
zf;R+ApZRHU<6UybSspI%(oLg*<jt}y)d2&PO2IzOI(2IN4cXVD9_@}(7&6I{`%joY
zY0=_>soF8j_3+=cv&)l(d(TQ*A04e9#^OvJzhhr4oZYv(G*RJOWf^4Ulx!94C|@6B
zCSFMpHK%*WALOE_PN^~MW=lPHol+o<p{}U#0VV?85=X(7q#|TKm<~uA5XY$_FTHSD
z6E0$;l$?uH0_A~6?-Bfgp7f2U$UN9%SU)onR>dkqVHDkh3ik+vDR#~Dh=@r1xX@?P
zfE9PPl6w&QI(_>Fc=e{>v;$;7;#m^zLKdUWF@U^B<x@Bdq8!%J!jXK#BA2K4LebFZ
zV`Yi7!n)La;Y;LUdn7AlJ00zvh2QE?x=V9r-;VeTYF{GHmPJt)W0Z=*Sk-@Z%qhWV
zLfJuBdbf9m4WrOMa{Mb4FFS*3onFpOL7f7C>#vFOvp+PgJQ`;mE~hpuI_%_*sLI3t
zmV{L;;7Spel|2<b#rJ-)63{$vBm8p@Xd2lG1R-4i=Vt#uf0j_0a+u-N_jZ5DO-~f?
z(BcWE@SNl*tZPK(umcd`llGKex|~;ry0F3PDG7q#AX}Xmo*_XpE4ZfcqF|`-4~F;?
z6QtNwCCsrPz~8_trl03J=)U1y|93YAL;hkT!>CA5<x(7;Y`%6+LdVJA4cHnrmX`NS
zR+Oc)(BfB{{0NMdGTMUsqZjXPV{+H0=-RFxPj>>si?n3|Vj067^B`cYzr%SX9vg8K
zA7uJti-EuUChJstNsFB?L6scwjAPZ-j#cZKrxy8NWDh2rboD5C!Gh+u{R=mT1~r*W
z&BxAF&Cm>pdj4uF?W3177-nMe@AXc?DOvMexd(gG31AtFYXc}&HMMK@eTMihH!+4)
z=y%r#Ny(IfQY8^d!b=acylm1>hy{&jS05aY{SKy#dlMZ!h>QsD3nf?2ukqlGN>Unc
z@)zZb|2&#$1rjd$HohSrV}<J@O2R#$&T5!5&l<pk<z{>dQcPKC7Fo?pVa~e4bq)^|
zR0MeedWOblMt*jO%FXIowAv|C8*Ja!!@n(nqYGK<tuV}uIA=)%neVfD8G~<+C4z0~
z{+$l=y5F|Zu!PR*Ob@I!yRPo}(0=KiE;ON?)cc{NUN`(0N14-!RqUiwD|r~2E2Fcu
zVp0kXo;bV^nH{nB^YS99)2L_fz<WM!ThXpdCR$r-dN`h4Yw$cr={SNn`z{@OVC+Pa
z;f55>cX~&rJoD`$E`A+~y?0^Q&vwf9m~uyNl0!#F-PInL?T)w(AsLqXwdTy@W;M>i
zu2Zwe#4O)7BWzwM5ZMT^eY|xQV&G8jM6+Z0lc&Z8&GFN*W;)j-H>ZrjFI>jQDxbE|
zY5S_cz435wblL5}!d+yLyd!zY3(BgdIRO!^`IuzYwUg-6Q<{Frp6CiEMBv9N4ISZa
zzhIev2!0~%b5JS+&GzWh6hvi*z0qgp;k<&>LfPb20|?|dj%~SlwXn9^mYT8Yr$-d5
zT(16d9us;^j#c>%nq_7sqVnVl<C?oMyd3gbq`k=udb+$|#s9K1z@@uvjf<NVI)7XC
zSnl`3w&gWLFGUO4o?-dlIV+}eT>#I`ZwvrXDjxV2V<|yWDK3JEqP4$)Wf_I^@NM8L
zWAO#Z0_z4nbka_4oDw|G{=p%<8Ew7Z%RHGFShBFAMx5*8;ZD7Fm)*X1BY4O#r<LSM
z<^~{$sgKA~|A#<qU2v#+-`!t{M6iC+{pd3)ML2HlD}dB(WCL8szjh?Orb&i*kigW2
zZe~zPcYw5=Zsag851q=1>V-!fnlp9PeS<y)#GAKkW$SPyiB*)uKHe(jMN%(tvp(T^
zIp@c9+HK_*XFBQQ!s92_qHx+2L3?5N@I|bZ>-T;4mlR#T^z9_^*Ay>LgI6;#-1SOH
zfdfBN5Eo>#LA6k}V;s6?m}$Va4@d$W$cV#dt_Ap1!9{;vO$Nr8%ud+(qv`jXxQ=Ns
zZarG0*f|9%t{5LCYtzxy)F4A0vwk`G4|WFD>yjh&8@(JKQy4QA32*Y`j0xu)`&~fc
zAotlavfA;SkYt0G{T+!7$#LwN(|h!WqXMZs#c@N7srs7tAl~`hFd>5z@FS6{=LVV3
zb5~+;bq4&BPY1Xa#g&!I**ZSB=}XYv6M~rJdZGQz9y;ISYGRt)fRo{cW`k5F`D=Dd
zd`5j_(^`L5EH1V))Z~3ABCBdO5?YTpl&0sRzq0NG5)=*U{Vp+_C51*x%D75{U+%%}
zvk=VA4KMGNoO_D$;C_W?2}gAaw=6DqkOaV!5J?+&84u>S>vXGVRUf@vCa_z&GoZ_q
zOWEv7O@##5DQqDGkAG^hsd%wH25*X=UW7rcq4yoXNKPO4FlkTR-!gmoD3kms*L@Sx
zjTfK81}-msZsvP*P5WXz9xZLm9yJ`Vsk*YnzcdXbj$LZqUvFTP&o=*3s3iY`wD+*e
z@Dh=>k!ct`v^f%kl&QkwGOo)PTG4BilvolojFIk23!8R}(%Wnsi9Mj882UtcBK-g-
ze$DDvZTfe%%Ggmj<w^5peSdxIi{7SeEuz@W)z+UmEy=3Pj3+z^SRLg^Kj(Aqv3KW?
z$TR1;O>(+C(h8-3phDRQCVwL4K*R4?(`CfxAjaHl+E-O4gv3<C*+JjY^pu=xH^g^*
z^z@7JEBoaX)ezJlBcBym{d)_dS%ubVg)pBekK<GeSWdiqu7TFlR#S;L*#W!>kp05i
zKo|XKF=zP0t!V>>D*u$}crg*=>k@KC=HBdD8t$GNCh3yGTrK+@w=YOOaihu5YRxMU
zn`3IHc4e#${%PN-{7%47-MIacTci&^nE|A=Itx8aN}-2#ACY={i@B$!1lNF({bbuN
zryMzG{kRyKj>wXBW(qjoeSF=G*tQRA)?yaubH5Bt-Z4#qqhsU7u2coa0P^0j4dnFi
z3$XZuQ-`mV#7QbNWl#1KUAlbex=pP7GNu<<jC^r~+!qhFiULKsJjVK_v$B2lSn#O9
zXE_gUA(1Pw@*`N8pcy3*Z}gO8GpLlquYS|{+<w4=i^#^zl)`NKE<@a9nIlrQXH0$I
z{2;ENnHOXF0`oCv+jlg-v)HYA=(@p>kp1nEy9i82nPjYy=aQlkeh>F{x|x01c(LU-
z8{TN)5f4R{bl>*h*aB3(pYFE0FN2(7D3C0ggp$<MI&`v#--63}D39h0==;HG{~>VI
z$0e<{?)FZZYXJs35#aw9E?6obd2Fg~DFB~RBCmY7^ZYX8ynTDRHh?*~%y_qM!<=D<
ztSE%dTa(eA{rwdKJ{fpneNsL5j5S4QIGm3c$1N(}C!Ezbk8HZKF9%(G%A0chl*AV3
zWpV^dG>jX#IOYcqwf`Q{354Czh~I~Gsu1l=*QNXjNGs6m)zUqH$ZVbo#<bFnlN^ex
ztXuzi5yKhaOGt?HGKH}owS+~UUer#P<@|~OGNr+`Y*IV{A4U9`3#XdNRyRAAum)G3
zG#uRp_*`TEMpa!Fxob`l(l0YV?<L8Hajc?fF{yV}*5d|*yq#7IySYZV+kKO$a|<sL
z9fR{ykmpb`tkx&Lj%Ii1a)B~Fb*xnb8~vq{rpWYl87F?wO>rxra9=jplsATF0KYvE
z)bz0@+7B(wPSK3657;<^&W7qX=!gY_ho`wh*~IcF{{*}Tf+_rCXmC;KerG*1`P0)a
znNT(NPw-=BY4}?2nSt?g81Z@?T<1LDP+AT8c>;c{TRH`k#AX;fN;+w|JlVYK`cde^
zIXUrRSAuiqgm}r2(WZV=Lxc3f^^Ec5+1fQ?xI`P8X_u*-VefhwE0Pbz4F7vBW<XvH
z`oVeolPl=dyix6>y~lupI1-~}_AI9gr+{{O(gC=BIArQF3eu|Aqe_y(`=jk0tVE+@
zvvV5VzsV_qb8^2Fau1zGdcr-MguAR9R-W$j4k^jcsRr*&<TUSg!E5b9f}}aR+{*a(
z&Uc#zYd&vKWi@@w``~YrO|sY~M<wLZ=@)j69V^43u!>^$UmQxG+@Ifet1b|F^m}1R
zUN~f04i=V(mF#I)Z!JsKS3OJDOKw`s1+vdxu6a>D^mF4w)%7a0js^i62fL2^Ra{w6
z*eYVqzDH)p=MTKP#sN3BWf?k@#3iNV=*zt$@}gV?O~~4O(gmlMQ(oOVV8aMBl~{Co
zU$1>sFBGa=XbL~6A$ZP0K+x7B3@uwjC{nQYm>6dZB$KwrZxnMZ?k>bFNFIYpL@-O-
zoeOU(PJZI{?_6Au97cjCF^HbSdqI$|y{l!O19vAo#7KJ_zb4O1D%06%$9g_zjMKKT
zUq#NLUW{K9eiDmO){W=jZhdd+kVOphfrxsAe9+gVlwzYfp~%Wx*g0g~mYAm1Ah$Cn
z5>9;<TEa*{(6?DAZW#ornpJLTsSKIoNRf*EVvs4sMbK0Fvy-tTnGIqFTk3=;TF|!F
zV6vxa!NsjA@MW=mj!pj>m~j^vVi&KK;$1PiU7fllTX-C4pR)_9g~qkAIC&U5OjkIT
z1=o$TR7O}Y9;kg}|5}~P)Mg{qa|@uJ79dB@wQdigk+IN6Fm@-1O9C0$vl?~U#`t_I
zuGAsh6S{;U2GbXJ*)vtA({an=tm9>z4JiY%*SQWz9y8I>Cn7)Ctf>-S@|6~{zMPAt
zfV)AKBmW^FhOcPj+rf+tO%XXLOib6+O`BzCozlY&<;stArg&oK^WSdw)-ws?4GL42
zi~l5Bm^dd0Yff*ZikdVPm7GXiBfbC;oqi#Rl?IG%ndZkVrL}ce7q%zdE_SC?b?4wp
zz%F(?e){46!TJAhIsk%lQ?zfV6j&GsWKW6~(I`&tEz&I{`bBD=D09w7@B1kAC@<`q
zYtU#Xeo9K&O6%4X5VV8o`pum`$3VW=y#J%oT79qe4=Y%2IiSX^Z8MtK=|sc1voS18
zfC&7wvbiED`qQt^Iac84JFw^=>!T$cn+2rhm3hSisHXcgDDhHa1#8F=Y)xF~$kCcl
zZusDJlTEB1MXWrj+(T%AwtJUujLP2RNMDK7&hoBWjK1&VX_vTAmge+{e+Y8bS>cUc
z`h7vVft0w~Mab+M=Uul4u;In~m?wvgfA-6k=<Yk1%u{0TJwSac7c{(ys4-r({Nc=y
zU-RjHfsg70<H^?_qP-}rVOaW6tX9_Ux3|Um!JDN0H~W&<>;uD|v@f~^lr%c2N%rim
zYx`K{hoG1Z0{@Dx#b50D;W!U^!l97fayo`n7t?djsg(={=E{7t-jN}helj2QXT$wN
zBbn5_WVAZ9u8Y0BpV4xuq{_V;-~AijW#L7_BMXWB^&P=xE*;26gy3@U6j{$OSH}z=
zTx9>50$xB8r}-X2KMPqhPTk4HIRXnt>I^(U>w2Sf<XYBmY6+h9-5en(J&S4e*#07o
z;}y~R1mlV5bGz@H7aGgQVR`u(nOSz~yh7iUZvsw)mKvW@!@e@ABsUbRUiRYhK275}
z9q@ExvnR&sEBxs7Sy@%P<2IrcONL5)0>;YafMZ^&-Sea)p#5Ovygi$j3E_~wI2IUg
zwRGXd=b)R|Ytg`@=mPI3!l=mkId_4uf*F9Z8Ts_45N;cz2_dn-rjK`AO(A>>txQui
zL|(<N)3@0rt`i9HnZmzIkKFNh0AP5{(h1w}u9EXCLA*|Yck9=g-Ks1e=H_akNG;3>
zXegNOJligB3g&Z#l=@_p<7Zn*$AXX*>OY;b?R<k%#@&*mt$ovV%Qc0w!=!+Ww*H?x
zDTa0R45=BSv)yZLf|ieRly2R+t#)t!Ni4CDXnbUC0oGzu|IeenQHP&4Nqt-ydS9M=
zAU3oFK2!lEe~m>f@<-<lHCE|!mU1|v6pcjkOYN^)Ip45|rY%lBQyz5z05T1PDr$de
zMF`?s;1_;l`3b&nA`YMPRUAEA|KjpQFI6*TY52>5M08re)vZ_T_rH-Ow1?i#zeNI)
zFxxT-lF6vWr9PIRKs7$lAkGzXm2yGSwO_`%3%9lJ8l1-6bHL13?OHq?UA${(>4$DU
zVUZ@1p=2Ru1Nr=7iBD!1xlV#b;h5CG=THNSYQRxcS5qS=v!o(7Ij}374`MHd)#FAA
zYXs+H+h$v;Rg27*wp|&Ksa_6yU2mD%x|^*teMU%PKb>7E_aN>%FWE6bBg7n;_XLya
zn_eYHS(8<Uznu@MiQbGhjB-RAX+j=3Sul;)@&;x!4F;>-$>#RHk=?K8%U2jsWy~xd
zAqt5fmKSPe!4H{$+W#RS(F#QGW_cX`pfgJ9b#P^h>-(9&_OO>LMatl<6rh)|KQ&vC
zdxe-37T;EPwh)1Oe@Eb72ZIT>?_?;A7T?o%n^A?ds4az@$;bX6E_P(<Nt2U&@cIWj
zfr}zG&*p(L;71(NfM(0U>Tl8<qtuOb&Nz%MTa7tPqdZC3?50VjE%JdKd7L@psQNq>
z!#^Ef!_Q?7&{LL@M?DF79nvaRY;)W^9eSO@EFab@d_Ud^l5jj#>DJXWhB?x^<!%4M
z3uhvM4F>sg?<gBGR)1}9tNcJz>&OF|c7C6xzjxJnE0tH||KsdEqndiWbYT<_6c7dJ
zB`8R5(m^093IYO3?@@{ndha9(0#YL&pn#wh0TCj-6MFAe>AjbP5&|jS^Sf(i?%e;H
zHS^y0LsmHXkQI`1&ffdkPwB~>RDYsFA76R{MiR`PvoqOL0B%I?EX#~Op(zmbV8LIq
z<SrVFYB=TUFs+TbQD{CG^Jo_+)gQB$d?pa=J@@WJa<9s!(8r7|OH({MBo6*|Z}BGx
z!IvefO+=LR7_WazPP~`~zRwP^l^G*W+c(*j2_;WD2Q84oZknzR&MK3`<$$w7bh{Bx
zzNm}h-Te6ps|#=0KEmY5j%Cy6&r=aIo(VD6f=D|-OfN%uePS3jFU8F+kxUGI%kb5J
zY^|_v<B^{cB^8G;3gi~lz4Mu@F(0nJR@l-UlC~%d$m)WeyH)}iZ=SKsUosYDA!koM
zdDz{N+v)tk`xQP;h`UbBQ~i*ttT;Dnb%-;`$2RWk)RfWk;_H&lnon9a)D9TW%wJV$
zhQyDglfyOda#El5r8KvFGP>eQ*T!15ixpkHCO^`p@k1DMRT4pUnxI)={--Y~G`>nm
zyu|*A;yqRe@gMPXUyV;*85^%CUcWhtsJIqtfPp0Z8cr(pj)57J=tJJEO)q&PX>rmw
z@qK~n98)qy$gX=D4#5}zZ!po_puv^2(_Z4Lv=iV`UxGDmWG@}g&~%VMeYhm7SRh-p
zn5R3Z6w;DX@<*v6mmwO=zT#N=gIm_KnqB$VAU}5gkwsvi*u~k7mkAmkUUvPc|JAgE
z;30`kBF!zTxL{w;i!(4?nbNz5g@Qq~!Kn>4`Jkuy^}9!m$2WtMsD2%t6GeIiOLt%@
zp7UZA0>=eUNIpZtSyX5poukvEA|sd)y>mD=cRc;McxC^D?z)vJqGk1b{4d#ZVI}I~
z$_^+U9v=|TG_So)*L`bzqf6bovWS$Z!34RaAf8|6k%Y+C%KrIze-S-l4k42J(hq{W
zzR>Th<;lFUgkB^Scr~yh+C50V%O}SD+bmtZ-!V7fXV*b%HY5areX{8tP)h9nk`0{M
z;&r#UN&p@6Dd;a5XD*37_`zSYY#mz?QR6RJw@8~~$zQS+yMS>L%m4t6oe2Z*73*3x
zQYcV(TM-0=AE@#$3P0lvI@<t~6kp82CfJ2n_fnN|FAhzoLCKlZk)sCSJUF|inbCLD
z0Vmud1(dr<)j2;__?PT}mniB)aGi8%<_&0@Hb+-7{GL01CEmDaqmS~9oLgI6SKKQ$
zgP02<hZTkq$1s&5^-_1iKA&;Tu503h1rDzJuJ^6Z9JV;`wkkp>^@}w`;&QK6&V0;F
znM#Mat6up1GoE&WEdK-IH4qoKt%_^v{~>swE5h;ulhdWOT?7zk1M6W%SWcl9vqSbt
zY@jaNkVqiP@8)SnISOz;c|4r9qAT5t)6G`3fB(YWvE()JPE}1-Y=eAm{Pz@wyP`*u
zM$e^8li!dXXd-oRfoK*>c~6KFi+hgCOZ6cKUgbFQw<Es8<5eM;(4yhO=G{8BCaMSE
zYp3ux7xW!v?i>LOP*Iua>r1}$xKq!w=EpBB=*E3)v&~5*OYXVGG@?a%Y}qSZ<}t%b
z8J=OzjVGDSmf6zh1@VN?uGJzvx+ML0VRzl2Xf2&U4O5A1aL<L)7`Zd_mcBOMK0Bq9
zX8h4hTd_A+^nNkjefYhk&-9V3f0*{y@IlAtdG-za`kLfBI(m@FTiZXLKTqhj@WUPl
zp-hShW%_PE=hhqiXBNXogLwY)rb-dG?Le-VG~a>3U!xi)_C8%^(AF-91s(GH$rtcs
zGKPt))5n0RPP5+y6<R@+`^`9Goqr<^Hdet#?zUT=iG!iphK&!GXF*KcjHUrGQz8x8
z5i^(`Df(KKhg|PlsXjO>RjwL~OPOfAW@J+qO}2o6c~`|)I{W&{oT8xQ+z!a8Y8}EQ
zd6`kl-K15fTud(3b)uQQ=d7MO!*EnnV9;=&IyX7`!5$fY=~C7=uS>^DB=;3$uHTa{
ztJV_a=BD-nfscf==>C%V%GF8C9EjxUkB|_{*&ykViYvcxKRi3k%%ze&la5l7H%_6N
zr|#b686Bz5!#_{H-}{<5Y1RmJ$_6?N@Uv@Oqm5!kxc7<6L?sd-JRBSBpGJj5hg<0;
z;uQj>&Eu^XAJs;4aLzsbkTU$E;R;dYS;N*~+iwfy$Gb+M?su8a)P(?Ezk5i}fa-^k
zhvqv;`I>=GYJke2iEQc2=%&@gD_^53mUGG^i8OtB`Lu|%gV?ps%_DU-->LUi>dMWV
zQGYnp6==yl=SqHBTC7fdf6{Xm2z?8;Le;aa?xFKI;VrS#TZ$&(bGJX5jr0rEEI@(F
zc056&bps}b;kNa)xR(x8?7F{Xd4k!>!$p=>oWnBlv#Bd}?(g9=C)v?c&pTHPGgYcX
zK7UKL`*0v^!Pb|#N4`Nd%Y?0t#S7vjG^3wPMht!3@!qhUYu4jCy742cc6)X=t6L2R
z{}x~$9st)EcUi<^EOP2Lm8+!!&yXV?ilMaC*Rq&z7BH2^ywG0ziHPj<q_q4e8gE_k
zEZLQKutzY%2)sFL1efYLp$DdVgYSNnPwHN4z0Vi-F3Psjs{kxVrKoIVung4G`4Zy(
zJ4W-j)nLg%Y?yb4`wcsV)ua~h=~9*Z*d-hsjy}W|V)EGcBv>a-6;CZ3c-PZ8qS=k?
zRa!(1FPL6Uuph+`oiDZfIO|84O}K5Vh&BTJ{nGBX*Q&g$MH4B)GPcYa<_mg%$y_0C
zy_`eEXO14aT}15HA<kU-QcIDEz0d6^J+q{)4!d}ZWtEQ3J*t<>ewi$D=QpB)m^Y*e
zlKWHhX6<bsC6-+F3j_#wXuS<1Z@-&RxB8ci5sJ~io#I2}Hha#<Jo4;FBkNr<`xREO
zXt;9vZuct>HgR^-C{4{0WXbGl{qb+KkmnWeY!kz~(H{*K)8>KlU*o!6EQ5Ll`ppRm
zaFSpoP0U!<#|<-d=Q>^_SLDaBuCGVb(mr@zmUW9ZF-Z;$MjIf0Z$8G!GiR)V-(fYs
zZ!4t?|3>b;8cudsH1<i7B~iXixE3cUT#NsW9Azf}Uqo1Ah+l>&raA?UfBKPz>yFF>
zvSsH?N`!ji!X>&U_<Zh!>6nfGRc!Z0;zz68l9gsA?kQ503hg3QWSjkwzDa7T`-;~g
zd{+&SO9Lh6x&%oC*AHZQq3d&ZVB(WY<0a_2fHS49HsIQ4p9OeeTS)<{7iwtv@V#bk
zoQTGVvRtIfeYNr^y()uAn;6;nOWk;35zHou+8a&R%uAZl-@VAw*ZxkB9Or&+DP+`?
z;qx?#OWpe5i3;4W3NH&`TPcN}#<_~#vi@K>JOx?5)qH#cx$1>1UMjZDt11#V+U6pi
z%b)!tI!ytU08jsS80r6T%x#a?!nw?E`32?Cq*i?B6avk;y(6;Pk|?e3K@5P@9z;hF
z-F*DwTzz!z@R=qjsAY-7bhJ<WnC^NqH62^-?C&@q%F223JO=N^*%@qpJ$3<{3@ClP
zkF6zIgE^mh(m;`&@RqWFJSHE9{M)+Y7MY<^>X$2J1(xPczq&o0eL2)*^u^awSa0;T
z1LHwVdW^)&IYJH*ux+r7*T6zB1Ib&MVgDtfuD;K1n>_2)&7O}~-?hsyG~4b6Whs=K
za`2KE?~4CBqz{<$k6Ix#=-q3@Zy!7~Z0L<%dOYL|yTo)}fCnfst@o9U&@B3O6OC8h
z{a4(tmO&X-7VezMiA=k8I1f>Q8!d~-1>8I#-`6-bX)TRt4NG3QEQ|sF&iM%#JaokN
zDsT7I<kFcHY6+$><O5{W1RiZx5GaXFusg6}ZcwZSI^r~*6zeuH^4rOn<nx?a(KRmL
z?zyh+$gGzpIbZTyOfj|`v)GNisZkiPYFQg+!QkrZ@@aO~+=1<PjCU5*Kt~Nq>o`ax
z-u}1gq;JaIp^M+U0R*-0MZp?<^_C;$7}?2`iQ|>A4d+M+-uam)K0_UAN%qETS-=Lm
zS+EBEy%_2%o?;&;R??<h-gA8QM)4JnM?J+^_rpym3@7^~Q^pPQxk8fQK@&Uen{^2+
zIMe9-+0U~FO^B)c$lu)$C-xZNG`m5W+Damwe0|A9RGCW@Jv<kG$s%6v04o&nd`=f1
z9j~tSqkEfe=*#yvp&{O+CqV!!%>9NyOfz)=SidImwZ%O1kWI^G@-+r}m4`jNg*?+{
zkoOZg-vpSeM~H4&ugp3%wKa+`z)B5JAL!gSuwc{gf%J5!xlLc@uOmB+UYkks40}|@
zBiTb0W2Lc_9WV3AZ#RpxsUe;kGVl6j8a#h56+B=vH8T|Lup*|iFS~0hA(n?9efD<Z
zf|Gl7>KA;ObxK*_IDj9&kB$9HCb_R`?3z0!l4S);;NyxMwSN67dFl9Rm%=BjZd0Jy
zf&}!Mzkqo{PgFvvZmddS=daG-za!bi$6_1p8uQY3Lb88^h);>Wg6e+$@u0BbkGLzh
zz?0MSHx$9sZwN1mpEfX%=z<A?h_9=Ir{9i4b!}WXtIMk|W|tKCE<d!^v~9>V*{)8+
zT<JD!wz_W_6DnZv@rEe!Bd^Zxy24$4(1EmVvv6q@x_W-o8A;t&Z?r0hy?k4&vAY?k
z*PlG@(aN;c#&$>_JaPB5UOTVum6?Q4aODrxKI`67U%uhI_Dwcpdx?eaohpdYppw`*
zGALhmzI&#0Bu1422S#s_n4-cB^io&%<r%fy+&F20@!Rl+|IFpL$-Roo4&*;AKHqr7
z=XqiaF?6~KRl1cI@)GTq)C`@u$ghggBhS24moUGjx%xd+#aW#`3_{=N>XN49mRvA=
zgO$!%x<l!Vx;=~D+Bt;krhUj0V^brU;u3qUyg*?Id0pd_22BALtliXi#RM9u-kdUD
zp&Bg<qwf>jsJjaInnUJ^G}D4_N0vcN6Of$)elouWPkrQjq~V$+cr_eiI7jM(QD;4{
z85ekG%b|auPmdbtm@)*;LPP}rhf4pTHK5DCHq1@6$tL?<ApFP@f<enTz)qIV76U%L
z!p+`m-_~XW5ThhQUdU3GNb|GT&le4Eonqpb6#04gjGM5#SpHE<@b9WG`ciuRO|~Jl
zXDw{2+h$Q%4yps(R|G!F0Z34qj(v}Ah`0x0Hd3qpxD4tQ3prDX;TG6DfpHGiggPj~
zT#2*XQq6L8<Qoa(I1mPyn%g`j2xtW9LUz|5{Oh!@N40lFfzQ&*y64;Qh~iH?5Gs{z
zonQ$~uTGQ->fca@^OPJeG^Q`mc(^(%+Ix&w03WzC=wsun74skssjQ|kc&6D#?`j&`
zvDgF=1R5(l&5mbu%;KvkOKYz$nQy{&YWF|58^a=Q^sYUez3}&JU5^-|-g!!I&kR`h
z!oR{7&r!HZqk1Aj+04oEbEU&$HYu7^nmD`c&zDNs0_a*!`PN0tLVip^!U_`?^|}s(
z^6KD~;*HCiBykpn*430@x&6Onp?}Fvix$zDPyUiA^8sYo2sN}xi#z_|pPe+&wJI+8
z7vWFq@M7)BqH2CpZ2n#HLTo&UvHA?uAGNNjj&mj7rY;UpLqnWh5Me-p=$y@X60lSy
zZuau-l^W|Q<hoF`vsyPyz-br6CNQ1!rTO$$f;WlrJ!<n^_Ss18fBaRlh_gp={v{i^
z1z)lQ5xk0bvMUN=L%=p?S+~a-X8LH^?BzYGTb^=}Ck67n44O_$K!e^&&B<C?3(yIU
z7qAF{p$1Potx&N!8U<mPg4hb$C(7a=CN@Jrrd21jD<qRNTWs@t9>CJ?&OCoAb!j&y
zJ@M8ZN^3IKTU7+|gd+9oJ0;I#6ou;KS?8NgYjPSI{LHNyD)5l2*D4DQA_9+f`0_=}
z4`v#~m=>!4ncRd%ZW-xlSk#PDT|5hipRuHtYPoYd*O*s4!HP-E>dm$2rj=_dLY7s{
z-UtfB#S`p~JcqY4IhF+&F!e?rrZ6q~e@xC_9gU~Mv1|65wo3uI9gR`%=89H|kBuNl
z2h1OvgYrT6z=D}g+t8xc0)DqOnk}VdoH2j?s$58EdWOc?EOJRic&P2rYC?bUS<DJ{
z$M#)<43zH!{!%Z}1P$8rFWp6OF34taaZhAcK8SW>_52-DoS3D}aIbIr6n=%sB}E&X
z)a(#qWRGmh2Y&fhYzc8=oMZ@>!MGz7yP$&_m#5g1Nmfp>B=KzQjt*Q}IZ5;TggDhF
zYctLzf4+-rCy*#``R03qcX_BWcu)Jv=3u?rMlC=)6TiBDRvku9F+FO12`9gN7&&?y
zy^sx>`S@~vfFg~qi+%`ppz*wxdve0$gdcQO8wh_a`j7C(KcKfZ$3eBxeg1BG|3r&l
zoSxRT2GC<&(KT<m6>}<-;`bapY5{vCgYNlf6s){FtGrVUGlHcV7Pftv>n|2`mjQj@
z(|>h?HMXjGdtxE-N8IHkRGcHaVnZu6vA9_mh$G3&a@LQzl40>rg9WU-zg`9&^peD4
z@M|tcY$ndt_xo<@NNJTbO?7gi?OC<K4QjT7I3qQorGx;Uo98!69sRS|rb<r^Z_MgA
zz-^@;dQ$WmKef|W6Coc7qejj-VTNZkupDUlefePSZ!25kCxdR1KYYa$rxZ6HNHRR{
z>sz9InOOUh%(FApgx>RVzl3_zynIda$=>jnVJVGIGbJ2C|1?@)NfHI&#x&Rw9L>zl
zffl9Qtbgh7CF<bkH>%BawLgY_JG!^*K8RphmWSU|9!4xsjy0+*D?DcROLR#cDcpI$
zS$6tb(Iz-a$0mf9m>nX9SHb})F3YeiF`EyOb+w6&0P~%kV?7y%5FmS{u1hxPS2dWu
ziI6YMrF8O~Z8(M(*oLGDn%QwAH!hH@pY~!7<Mx_4aRSEE1a*>CLOfJ#ZL`i$zk^=i
z^UeJvN|<uUQ{1<r5GPzsX+VGHZR3L@gP^TkuLmqnYOGX;!YhJA59wV9960aHmF+#3
zk$=26xRi9lAyZ;|2ULoVDSV%fNrZdT?cj1$OT+NY{Z+&}WvU$Bt06{3>NV2h^HW&Q
zRA?TRvg_l|eTl=VpLVT*qA`*m-a6Sm>9=S-nt;4G(unVn6<)y@{xVEiNcT_ngG_(E
z0GbnuXT&c6l=YO<sr0(+M+@&H0e){<fG?zN{*AW%u<t#=5AbPQ2*k*WQN0Si5f3fc
z^3ZU#e|61bwk@3Img6@gV16fqF*%{Kq(iy_tL3Vtz8@#UVsp+#crTm<`d@u~>A1AL
zPo2MWfy!tA2(yi=WB-RX`(NdL{1>mG7iW0eAiqh%I$D1`ujJLa_$7MKZ%D$G&V)B%
zLID~iM(`CY^Nw$&w%op`($g)f341vENqHt@0&_3p&lVAM?iO%<K#o_VQg{;FZ?uCN
zbPSn&Lczo*Pw@hh3qnNv%lN<v>KOhL4ot0`ctslkqkMN_(R-J3F26Qx8$O?<Qhq(y
zt@SYde0rsTaPzqBA~n`iqIEvlWL(zrAVy5<acu>7&z_e-0RJZ>h#k)!bK>#2TAI?`
z)+m40_e|wFr>;e0=U+1Bbaxa4<$W9rz;|IPA!@Y~C23aEgxk&zx32`g%36Wm=5(Nb
z^?Yzxr^R!^Bn>tNE`tMZWcGBrqo;pzCM#8*G&Wf2Qe%0aUU}wqo39Jk%f=^3MX~&b
z==Kxx9zyTq9@Biic|<wX_Ihr<)4g-=cAniisl>itJU7}YZcr;S9lWYVv0WQY30Cmc
zyn75-3v2|kCa|{kCC7^t`MA~)B{~S2In5EoGWbg_e=M{%)S_+Q6B(c~%#uNsZKH>t
z6ULr=petw|djM83+4}v=p3_&>I1b4|bpDNdX3SiAJ-5JI&S4Bykw&=H-?2!h%5H`;
z4`tMy-jf+y8PnVh_#Vv`>mn+}_V9H#IoaEhYlo=iW2H?Z^^6IL7JxigL{|DlhKt+J
zLj&uR^GZ*HP@`WeD+YX71s{EneUbxk(GT^a7SU!RBKuCPssaIWR;qoE$mf4s6Tjtb
z7DbGDNqTwwS$*T(J%g1Wb@G#(`y+>Q<2AE&xmjmw$H}d61w|)<O<n>=Ys58E<J62F
znBX;@RMrXy6*)dn9P#0c!3_!ZNuGNZ7|?SF_?)^|)tqOWwAbZ_1Q%U9YA|-z4%$Cb
z5_5i3j^WP2rV~q=_A9f4gmSMJj4YjTHx(yMD08joQTJ~WWG0(uUuS+1G4e}!dT^&s
zF;SQ7l4*IEW)G){`;(BsQ!O1%YXl}&r?!zah}mPnIUBxwSUurUrt|yLcYKm~02NQR
zmX*U&XR@{IVVIkn56wYcmFurME_sb{?soFz58hVt@0Ar_t(3^_5i}j#gIxtoHy*Tb
z-b~DTXSbA~;dI=Oe5cBTa~7E+&`wr3O6})3iQ3h;H$IK-?Mw1|bCpd|6t~i-d>kdU
za3`NVFoIImB%sM~3E$>f`IWFTt}>7e?zS_!Yamg&T?f6}$lstVj!8O7a%__C0No5x
zIfvbWqGsfvA0+k+?dx`STd2sgm$^hr(9;Pr*(|d^(<x6FQ`wHrY>al#J`mc!?GL}`
z^i+cHb3hAroby90$J|4t;ru4j$P8z}A*u4~Uo(%xazR0rRm0&yEO9S<C9ciE9p;`X
z_mu8g%D86|ep68?f_(rUg_UJRtb)ZtU-3_0N+313xIeYr>YMdc8nH7TKX9?NT5J1$
zufm+3(h5*d&d{XChOVohA&6}$4G#_+k)pNJ1aEX}4*S|oT&&ImmcjRoJmfrAUdofV
z+yzV`i&T{8uZ8%Igy^kz%WSmmsJ*?lN|w1Ag>U}SZ9XfEF}T~g!b~pZ)w+V(T(})9
zcM)6xTN{Vd<@^8M93b2qlC0#Gmw3^+jM`kM+Gp53d=iX-hM2^sc~|5laz1^Vdpjla
zAr()IIZ^2T4q|AgI_@7=+@)ql*hn)Yu3-fbA8kQsOCt^mcS>&gOSf~2<@rk_GA3{e
zJ-W)t5YV&HfK*WioW_F!8~j~{2NOPb=ihjcI!aOfIG!b^um$v{6x0EyK-B>3d;jha
zyFn4fTjrr%U-&cMuu9mb32dmSb2$6=#w*!oMHU%m)YO;cpH52f)xgXxJC#1fpc)Uy
zbtI1egl_s<>j4H*zuKYpxie3t?@7-I^77ESQ0;iZFIkr!@ID&5@gtWn`3+@A1BSOF
zF(9LB=6gPkM56K{Z(@y?Aa&j*Yu}Gk<R&D~{g$@CGGJEekKePbW3V_<AM&g5@4r%C
zBm*O!z?KU)2#=1HcF#;rkLUM?MGCEnX)?&sPS%M~X90bTZ&Ru9_}St9na1kEFXSn{
zMB@(h+BXu@68v=cJR>}WV#QiG&Xe`Ms4Nxshrqye01ytDCVwZCjVRKoDs9K9u&y4L
zb*Fq=cdiJ~s{?^G{+~+=x2otFXMQs@#38pbS9hFgUSOUKl8oq++4c&OPi2E!Q2%ux
zwt`|y@5z;>nif!5d_pE>F642uQuy0sXV*{5R9Upyfl<0|!R(~6rD1Xf=XaSRc#ykQ
zXnnO`=qf;hzpp5<aYz{KT#(+(e2}kh)D$m(6`Gf8(1I`?n`mN{Y1*5aCJ3)WvF@~{
z^NYi+_K{NfOA7rr+v2hcE;r>aFYeik4M06HQl}-YY|MUfGzr<TN7P)c*XZ$+hvsro
z4$FfoVlo^S!S@|Wd(XRWY815`9vT;89mk&%+^SzGZqu=>*x>FjFGQnBli$#=FYb28
zvp)}^pv>hj(XYARWQjP%h^W2o?VbMxP*QIEYuorg9A82s2u%1D_T6S7wEx?aBF4dL
zO<9Y!78eBkVC8O<NR_^yJ60<lD1Bj}+h>ckC%{5H+Wfw@!;c+*=y%5?2KjcVOV3fN
zzGUmXYwmTWa~A3VK9C}@;AfTI3>9_}!=1JyOB$U#-S?89+fK~=lAowQ|AvPUE_{j4
zwY?{p5%My`9LExjSGL|J-EtK_Xn%q}%?on-ER%>3Kfhsrz?8NwCW92+`?_xanu7La
z2x!b8?j9#&i)26gx>mDLm-vgOWvau$>gtX1??~tQS=&Prg7dCMEN&(+QdU5NHuIxZ
zi6TK#Q)S;@-nE3|0GJl>vZN7aa3xDS?v&%f2dC7GR}&@ba8Gfi(xBXzDasd&ADl;a
zk2$b#+<YP73NFg^ry~$<8TZ%(`!&w$k)Fu#rKsn@H@9@*ngr0wJh(fHwQMRx%%jE}
zmU|=pDocG<hab<{jX;VUflN3=L}M_wR6jBGF2J?W*VKVaZOb%b)1ZDjphn!=)}!S(
zhsv+0KReE#gXS{Wm;0sr(*ML0PauZd{;iY$r<?!(`k=pr+g)13Jt5efF&D>L=}Ge|
ziEM5{1~y3)BGG{KL?}Qg#Cut2KbL&}1?*xTHA`MF5{KwkDc(3!kMHZbR-So7om%$n
zoL_!7z?Tgiqy!$#{)iE`IHk47G^tZDK^hT#YycwVUbfPIto{CzpMz8Y!wyx<HpA03
z{n9w8pY#+rf?|Mhl<Z$rZHO4@^NWHASD%KG*|-^PjyvbOb-ZnIo|!4b)`5yxCy^l5
z^xshL7a6ffep1DcvTl1##yAgJ;w-QLT_DWZ%u)3!=@0Sqv|&AJyw$l$IPl%RKP$z-
zn<bDW-22Naheta-dVa7n)kI1ykfT%P*^AMMVqWqvYUMC`C*mLy0bVLVm7M|;ITyG?
zB8vU>JY4(y)TjDcX#E8F=g%4@h%k#Hoj*g9-7_A*>Oe0`!NsC!At`vV;^>$tcdmTM
z6UE!{DkXhE#NB9f{o?R^9fAb?>iRJ=yoXp)%o{RzW~z7+KXY$!ROE=e`2_gL=CEm{
z5Njsz_2#=yeyP&GdmA1lEqB-~^Lbe>1}@e5^@Vw<eNf$E+Qs-Wd>6P<paV&nzcBCq
z+|K&xd8fkQ>}Ru}Y&YPmK8IfL0N3y(6Z4UujT!|_-+`=kpjlXmacnO-QBdhdI-R#;
z(-3OJ50$nn5K3G=0UYeJazB*c%=}@<AM!Wka-9?v%qHZ@UWV+8)p~?mu{t=aVx47F
zL^T#zPem`&rg=Tx+^e@;jR<Y*Ip28`U)!p<f>Lf4Bp6ONOXG%iA-Kds?II`t06N|`
zKVnpO?j+pQcuB^YJdq4BN=7$XwoR6C@2h@UEU@3iabL0R80|;u(I4vlz>7?ya`2<r
zf;|qrgC&g~g9ZjFd%w3z?3t6ab$&~2q(Hjtxh+Mi(Y4r5XCToyGU=+2;ic|w&`p91
zv2<qAU3GLB&g?9;$8Gw`_L3BJnC&MEJ5F9(<?6)@UvD^C7s<43eHDX3RDn4x&0D{l
z2T1L&;U5?qdwM$mq5y}+zPuFPOWqU^k@lW><^*D@GL-m3t!|}$e73*;#^%9gIR-Nm
zK_ep#YHu@#OuF!))tJ^*^hb<gy4K{K44smdg)w8@ow1qjk`I`S{vDt#@HA+hY8o#P
z=-E3pusCXtUbsI;6iMz<Drh^5O~QfN*mrl7^0TsJZEfBUgv~W`2>yBgM^QMhnpM7t
zYcmoKjfK)DuUZJX@>3LRUOsrB@gup`JKLNvfV09hbb_c<?^pyZK4PA!!hYe4l||cZ
z>_~;(s1cb^Un`p1N>UWK&XIzJ$HCDCTpBCY$socV7(04BVm|9xjY)Z3U6L%#8KYMr
z<(FR>>u!0M;=9qbdf|14G{%UF==A#M)wM1LUOCxDZ1?+UoW<-edR<^^jgR_pk?#t+
z%<P<ZzYAQoH!3}fskA$5+GiS*@ZQgk$*xKR#0lXrZo~^r&HKGXHz}T)H=hq2cneL_
zM3v%_-$+TRLsc)6CYxyhV>?;mw<Q&wS6Rx$T)if{80_>04;wG7`gJKbP0rJy2RKMO
zxLPi@cvdb2llC<GS*^#2RrsZ>iTG?e-0;x1g!MmFn8c*Ot}GO=`D9s9$;L*7tp{K7
z+<2M{oxW@l$cC_rucg$#&UEei(~}MfCT9<y+%OEIU`beppJ|L=NmN>ac(INvn_-$y
zwL*48r9!y`TZ>tVsGYv+NKuf{>e*>?+DB;jV73Wt6o{#Rt4fZu3G^Xa=wjp=`Yg7C
zd{?yaz|<)A<?4;XRlegbx}d{%zDS&=vQYlc9c?IV5y~`#_Y<QzeDLQ{ps;uANheGT
zH;%oJiHhV})QFt5sHXg_^7E~op>;%V2v-OEn)}+#tk3vj(pBhcU85#mDxSs6pM}IQ
zQ81ntX&G(vC}i?FM`AedSFUQ?Dp-H`pn~^%uivWy!5~^Ky~Xt;SBRS<;rT3-Z+^JP
zv(6a)!FK)K@Gt51>ZS$9hWqaQ5|OYnSI5wsB~a9?l1}Hh)ThiF(u|ti@;?&8E3={~
zmaxb;RgoQ~(8Gk&TWHwn*vsU;nk$c%!`}Q{O7db0m&D#_9+=)2ucBk#p<`Y%o7ZDY
zxF4252n$$X;pKDSaY&-{1OaRO*YN8h;G@mDC_LoFGf<_Kpm2P*5NO_7{h{KkUF*9c
z*;Ma4+d=wqR*t2ngL&&r4KLiCPpgpmhjziWw7ZO9Z4+(kKU0M6*YZ!;;d@>7T_u8@
zUFS)@XsVV)27-FP?BnsZ$f1XcfnK!FZL`#FrmfkoUujwd=J>tD<wM%X<v1j)F0fVg
zwe4Ga{Jrquf`O+BTOyN%$VZYdCYDMPiY%Vw9Nj4|3x8Ibu&5I$5^&q9teb)2G54G6
zWJQiSKUe>sfs20+I7K6If}njGPc&%BCow>McW*%eqnxWd&MG$@rEvV3ikIX0k{ry~
zNA{rVd=&2(5M@XN*gP&J_gg_UpWXSx)<f>)cm4D`Ka#(&YPUW3kz`}z)nM~_6^eAe
z)26``qhE}E=&gk}^FL}PnJ70dG77W$v%@)?CS4Ekf!O1+12Fm_{ErEVQR4jts&nzy
zgE43ptI24i>R32_x4?4P<5Dk&{v)Ntj7ahW%tdyIqp>RAcwPmD=}QSQubrN*127;_
z^7=`w;{1`A*-W_&s?ci%(cwU?!m6#yyRtRMeiY6{u#<{fPPZg=f7LzbYGl6x5@6by
zVosfVtuxMSvX4qi8&BN_E3l<Ed*PFf^{HG?gOy~+z}a-a{aWG@>K1q@3r3AWWq{wQ
zjxT5Yk$PCI>RNpeC|S1TrJ(+z^EGM6`Z6YauW02@h{~!>eNC7zRNA%3BD1?-j{8#L
z)p>^Aw(Q1l)h~}ah-^AZvgvY~DOQ{}`qUqCzcX3ad*hX<s>=2qqW9b8&RJw)QgGLE
z0E}??i2ULIlql<&U~568jN$;hs%t}cRxT9!S9E+Dob!Knzj(m$fc$Gt^Y8NqKui2z
zGP3`>*O7F@ugX-!x5#+?R!?ZrJ#n%X#h`5bE^=D-z$jzokQPX4p|{+=h`OpoD2e;@
z+lw`nE{N^5?2>a4F)bG;{%-;mr_3v!$rOd={|tpM?S(WvF`qhdM$kJ=Gn?lGde16q
zfF^=(hN;9GAxBs5@}>XDM)Zj@cqUZ<$h-mHKU3wuK7{z<?bXc;rGB+i*xlTMI)0^p
z132J*L!tG4D|0~$LmuFG-8tFK8C8+_{7j-pf$4L82mvuxd|e-*QJ~WopHjFzb<vfr
z^1XyY-Z<?=rL>w9Rf65CNMnO`AqoKKW<Qr-r6({*v~f!`XB{uAmIW`H*-=L1B@)X=
z<Z}*QZqefh6WZfU7VInyt{D;|mGpG-owxs-o@aApdhGoeo_)nI?OWY!MFTM3bQfn~
zi-0$0i832TYENT@C<W*Tp_)LmQAtfkS9|(dV}UA_++%nU@xvL@&;cQX@eqV@pFQ*V
zBar+Jxz9>C>s=UnyfgED^|JO-h5eKSq7vjnbnp<utLmEC5yFdA@U1_M6z$4VPhAge
zd4DvJ@lj1M8YL_{+ByeYGCeKqx9~$5xrMcP-a>zD$`a44OHDi6g%#}7#FHd&2Fu?b
zV(2^SnInPid9n8H*4IF#&`CdvQ!W*!nhpf(4*~;*xnU`|M?$%TednleW9B8Mr=O-=
z>S+Om-Svma9ss|hZ8X3fcbZ_EsVnQJec$ifPn6#3uZO<4*Hqpu-tntttxSPUws;9X
z1MUV>htRChQ{f;c_2VaBT&-zl6?rMY@wp`TFGw>=t#i21uUxu;Q@sZyPisMo3vy}z
z7-66gqwVi2Buf=6TZ#$;FtfM*Qi{;4uWgewd-;=ML{1o2{+S4_iXQThALM4Wnen9a
z&n2mMn$Z{XesCJwVXpD)sqai@@sQJVe{U_t=oJ6P#eS>9m9`@klbkMNA}2tl##&;2
zl>`^4SE0S)9NV`!9Pwj?kcF6ZAvykzkmcxymj}SMcKph6H7F~8l!!jk^nCv{WJP`E
zXULb1E9><W3#^!LBfh_pjS~L;%l)IRsSD~!m!HKvDnDA`xnjyFWRttD*wR|Oi)_U7
zqz^n_j3ztZpcr_^-zg&49#R}9bXS4R;4MTxZn+mPG@5%?*h@z%bL^vC*7o;IbC~l+
zXfsW`<nQ!f#h;(A7c-|9Yu4(Dxg4Ye$i`Z5_gk>bpz+At)qSa1qjJ-JjuO$g%1_^Y
zOEQi1V2iQ!Pl?;4;xTC?+#!BF8H62KWJj=HWPGYkm8IkO#Wf>Dwvg+ue{FNlq_xbv
zh2J8Z-g*$~%h|A!U|W@YKHdLEX^ELERkTe~4wJc(H+5J=qd5a+w$SjFNvunNrb+N`
zf}0s{bHjyOq**K-xXK8<T_BC?90T`2R{gdHqMZvnJ{(y8ameeLD%AF*mUBr4I2UKo
z6}OWG+Nly5A3#@E6j<FgNvo-R;g2G!6o{N@>>#7S(A|F5qLmD4H<QK>AhGUA-G@XG
z6ne2M`#LVesWqg!LXIlMNBMj2e)3WBY?$-WbPF&~`c*9(u(_ItJ{r5_HO2nZMqQ-Q
z)%0MMa^aD5|Bu2=B-0*=j#%tN=QIItvq5~*!@hNHteN?_pw>g<dqb!#MZj-FdP^FT
zVS`?f2=J=Q`If%SJ0;YYW3tQCZ;qX8HZOw=jGk)KEh9K&uZ?Dz#DwxqKx@{)LGj3P
z{Q`l4`i!y*)&#x>qaXU(dBQ(D5eunUrjMTa9_~!r%rwb4qhJzm)bXKy4Y(j)9tb4V
z$!;}s5(}*^7sUE3k;-SP>51rWpu+$y&$q(?@#nz$o^+6Rn~}My-6jRLM#e<yc;lV^
z&M@P97()4Bn#Sz9L8Nf%Zc1Tq$zi`hAEC~HAJ5(X#H(Jf3q`wwzwGpDwjz?xlAPMo
zR`K;EjTZ@ln_SnGEJU_s?>{||`IA!{W<SrFsNmzdL%kG392OcnB480yQhSeu+Z*}v
zn*H2{dv{<W>2|e*B;>CF$>V(0@z~gNE0K#FbHSV@wSoV(llWGG%W7|TrNW*L7)N_(
z<t2S7N=%^Prm`kQBBzBgh(}SKs&uvEge_ydt<_A|fP9b5dUeqnde%$HoefD7jrl}C
z33sfN_XtfFun2aURMz==jur&+jrosOE=1L}2Rvg2%d{_hv(>)H{Ae#YgV`9>Hav-i
zR=Mg!oqYe3xt#eqybW^m6hSG;&a@Uwb~o<2|6|P^zpU&RF{gS|u(2Rm!7UG5Jxf^~
zpHS+(dh+qChP0%BO)_6(cdh(3V|$23zs5cHt7az|P4e3>_|k~Y-*$$l6lz+y?=0h|
zTJFZ=)wh~EPi45G@SeTryyJp}GDNLK<4Vnw-kjRVhbaNSB~>nBvQR<!#z36M$Gm*z
z(sX|ve;qeO`_9;xtM$HjCQ{>NUuVA#ixDiLZTmNLgI<DI29q2_YhXjCedD?9!wu8H
zWu}VYosADSH!e+_JMyKGF}@$Z0<bA@xApcsu-LRk4%qEk?adt_{)}bEgIg>yzZoj#
zH}PX~TdgeujPiFLIKRPRmQ+lzSYkCn5$(`gV{&yFMXz2T6(jx<EcP+b_(SrSeC(&(
zXW><AT(NI2@|WqaZZBL$f6w@ado(6OCj!{g_6^P4eiz0%apw-6lFZPPA4*J2>As{%
z!lIhQceq%HP3MrhU5^AEV##OawjabvI^I7M03B(6acDlCT`&hF47T}iPn&;lHvgMv
z<yq`oTrjaBKi)wLA#dy)-Iib%JH3PIR}E{_Z?5pYo^N@Xx|Ql1GP_xdpr}ecJunk=
z^ZP=XqlZFt!#9%c?3VK`4t{R~VpMp#P14vTn0@r>->pnVtehSm|2e<NbOi`-`XLJc
z9a3mAl>JXo5fc>uJT%P=t<xD9yV7r|A=c5ep!@B*B(u$%5B&s*FwYc44lN?f6QAXK
zDIT&$PEwVLkpgsjY&v5<N<SZW?`h(%wBm&zkX>`^ulnKlRrlS?D%5VqEKD)<?L{Y<
z_1dY`?&H5wawhixTh&MKmmhI;4*sj|;n;TprStNOrX32Cqw4a4f6201=y9y9_}{}*
zl_qx5nU?n;mfnpQSm&w+c>96ZuyDIJ-Ki0W8b+@PAhlS4BCkivi|`2dc|2t088ay+
z%|foZlNey*s-l%sVsUaaS>NP}UM@dPun1g(GQLvrI}n7=s57lI;jeQyddlP)-`5m4
z%TArn6;;tRNZtMoG-tp6v2^|4U7B!bWU*>```$pld^D~8oqon&w3i&`m~6=9q^`@e
zBcD85W>54?uMcJO*%^xaxZPVMmOe^cuTctJ(+?aOgvv=pUOB$Ae6KS1OG^crwmXmA
zGCi3p7e5f2R*%<+bfat6;6P>D>^I9$E4TH(+4`zT`S}4@?kbzCdJ1RPa_(I--G@{Z
z?C5(Tijl|?3D0w<=EEaLd`Jc2n3Lgohxt!%6N*HEG8{xw6Z9rSID#r1Youow&}{5O
z1};O14%Z%$_K&XiCV~W#Gpj?9z}&T8<}2#iy~-+sj~aNf#9Z}py{`<ohitk{-5t1g
zT|-wR9PW~l=KZss4&%)$-9!DcVO88tpY7Al#KvC~S=V*VXY5mU84G~S0?@s8{@NeD
zf)Xj-U*F&jGkfun%OikL6R&)?ueMrl=+})7IBd|+_>WL`YL2y*G1z>Wo|*TfHi_|G
za+@aci%Q19xp|62Moil4(7I(%d@~5+`;j4kX8X|lUKr~k7w_@hJ<G?-ao%=_^VfqD
z<Und4j1!~!ZCPa8bGf+g=b9JIx$9B?n+FR`AdUoi^0?_uNh^c4h_&jZgF*4fsS7>~
zH@W(XE=}60RX-V;TS^?CssBqB1*$BFhH*TkE6q?gfMP1u_6FI1{J4ELMN%^VjB7Zo
z<2dt_WU>OvIepe#S=I>ptQ%oe$v;!$^P_Dn#)|P>#@NvgTyvdUPBfG<az**py<1sF
zVdmH-#RZ1YnM`9WLhS_Ao?UsmUUv0=*LzaXgOD1Kks=i^IB@T(;dYlxJSSD>T-IKF
zsnJ}2yJvAgszOJ#EY8fduee9MoVeebjpsku5Xzh*8@_Tn*Z0;?+AI8Sf+AHy%Kpe!
z(&8igQEo0RCbG0jPod}P>T4h=A8^3+6iihusxzAwQZXdCXqpSE62w1oBaLO~n{4mP
z9@KLgEFoxdM<)Hdq2`Uv1NM9K_+X{z&iub*x+kF^gPkn_?N6!N4raWbDfWs~TThgl
zpN|1KYmQ=Hm@f{-H|XEz+fgK}k%xD4PP;AxbP<cO<kvM^&EMa8utlZu2Z!{x63RoJ
z_iFvOpCs9q&@R#_F_0WlQ2H+)?V4`c?}X^WPn?E4{IiUrBLc7~+x_c5bBLC?Tfu`&
zn@XU4i5$qDl@raN>Lb<HGk4>I{2`ytNZgY%jxfowMy(D<rHK6T1T767@%zLD6zK=C
z{q`z}b0t``0;HRlHM&s=%p_OMAyJ9AuE3VYhwc7>saNvuG*62GEb+Hp7=n&4;1Jk&
zE}8Q$vmz2q%ntzx#K2hQ=AdIT^69*A=%bxky@QE^OZAA4WvmCq)^9cWoQ0+OzNJJ`
z^YS3nG>f+^`L}yDeN+`jkf=(T+W7=ix{^~SuZ$IUy6h+vW7U@%$TYWPHnGT+#Rm7!
ziYmHom(H$yfo#ynh6v)oG4}!lyC(98`KoitlXt5V{Sm}^cpiz(Q+TD6t8JANDN`q~
zn64yuHl;Oop3cpQvCduQIIP5Z9axy@(bZ5|z9g<I(oLLFaXF7Sa^ZDeimJ+2@vYHU
zS|GgpZ>!X9A9e=PF>QLJy`NKSm1XQJ(~nachn>nVpo94F#o7bAD@ghyZmDpyEH<rB
z{-&AOs=O+{y@N(Xv5y4f_5y#knGeluaHTyRzJ@YQnb1F}2e|Gq!J0M8CfsBi-pYMe
zo*GL)Fn)Z6OK=_yxsBIyGJp9r#N?XO4W!v~iVRG5U$7s@05X}$agy10%x%u8_GF}p
zfNm^88T|x)jpXx6xTL55XuDL4yamrsfJPwTIAM)=stsrnjx=gVO|x#`V)FvlO#_1V
z?LJ?69nq?3=s7e#;ibsNIW-U0TK`;1qtxEb@}`8nGHWbc9}=`X?}pe6=@#v>J6(_H
zevzUS8_=tOsyjLU6A?JP{}BJs95#faIgouF?S5L^#T+D;E;R<Ii`oRAoX5l+xcISB
zc?+rtbh^i|>$MJFhTy`xyx+U**^1t}!R_BV%kB{0k$@w!OCsa*0Fn+7Z_T^+V=>tz
zc1G-*%6nT%q6p_DSN><8j8fh^59Yn9u6|U;CUrUBD5)s`FM9XtY8j+0O1C`isf0B$
zr$IsJMad_d+i%<ck}-q^sjJu#mBdSXE<{LItMH&XSU==SK{GDN_;>}J;6VC~QQ3Wo
z%3S{}c6RB^_WGZ0j+sXaJoGt*IVh3t)gK$C(JRgkeY%Gy+Pz{7TZmbyJxWd_ogU3Y
z^LxgSVwmgBqA}2Vl}cCsdl~!x`z&(56UJJNiHuN*{85+is-3=x!|&CBh<Z$+uHjlf
z%WEJFUDjq<*5|~bKT8-rIQ&lb&z>wW6=bpcBnC@j0olf5U;*fo1s=UrIg=h=C`O(o
ze~}APy8v}H2iI36^=&+LznPoLFTB-G`E&<}`aNpwQj8EmMZ2LO>M+eO>kNIxf1Ih%
zsPRd7D=(nUA6I_<uNU&a9Fwme;za$U#12$xaTX?yH|EWqA+~0}<b|~<N=;Neo(YGl
zniJ5-5fDyPqO(Dpo;)D9^+ree;*PwUN$Ml`>VjZ<S1otx5CMDhSGO_@4tVnmv2}|_
zN$u=!!;kCEkqotsK6Q{M@P8P2|5w7<|B0fam{N+na7mbA)V!KV>t0sYL~rzJUe6;y
zR^_1RdWlf7L1l!!@Lw{OA0^Ej!k6ut{3!LMBaFn7lamv*V%9pAJaDMhhhS9{Y%@1)
z12<P{`CF@0Rdb7IJiLK`5I>NvIfEIR#WZ};Lo=b~i|QIlhdwqe5!P34N!xvF4JmxS
zK3ix>j`(`YC7_Q^{T^LxVZEJ}xboP{*3r&7;ME7}H4kS%D-re@HkoymnC)|T3%baB
zxIP9k&RTE|MppDIyD?d5NyeKNQNA6L^ygL=nM6S)aKip?bI?swY3L|DRf*<W#F+`p
zl)8%QmAygEK5lh`@EV(N;tpY0uIC<8*;syLzPOd>!mR(S_sJd$Yu9&Xrn1R9>#nZ3
z^Z#IIae6q@`lbO3+53onG9}r*t9L;{FYt?~x0w<1H9U6gP4w|4uc37NC7m_>DMvkt
z+OGmyTaAWRGt0=yGZ|pFF;N}bsP}1+tKC<$KTl|lR!Rj5Cg8g6w8r0X1&KO+KJXrN
zFoO!ZKWpZiw2chli<=ERull?gSN@A%X2#23CbnhRTIASG1Wm;|eE#FL0<|KlMn|E4
zvR0wS{gA&AF<z_D_4Bo5E-kZzpu}3y&rlZG6(n)$^VuBx;FpQ9lBx-b_2fvtub!Fq
zYA-W46}miU!%b9!0Yk3@MPBz@A*vIe>E4B##Qai%Q10)UkcVT3?#$ABbVc1tz1XC8
zh~zirwu1TZqQJp?SwY$ov(k^o?M%vQrayk`xk9Z)61L|dQX@AGcrOIi()D}qmB((g
z7iq$mJS()P=t!s$MbH=(qdn*Nl>*Ra-hKhhOfH*5(R4r5A~(bw!yVba7gRM~y1)L~
zT>%6^Wi>BaS4}%+S6%t$UKR4NVAhImu2R&-6{Yq;Z=LA8Tr4qr<Y+;>-%ca7bTZ+`
zxtb7?yf}LV2)R(zz?!5adPVS5j9pJq86m3ba~szuqLJ!WK#{X{y*mb1=iwfw%3ZwV
zV4N?wzqe7Wv94v3MlvAieJ}cigLVP=#uGwqc{Q7e^0c8mQPdH<1)r=&%n7;wqmbpu
z5_-fuTkY6SU%KAYty{ahJ?NY65K}p52k$6&US4-zO**i*&DNzKKW4(5MK!wT1~dRZ
zaqmow$9;LPE4R9OC4D;1bi1Z29UgbVKCwGh*+RUlP{3Ps>T*newbw1vULCG7O?rv|
zflhpu8yC121-o?9gUz~rs#<^vU$U)VK*YCEH>>t<zx_3NN=f!o<k?UY8NMWO<kEPR
zFT4D{0Mc|2ge^CRJXY|qJ(_QhYb3|r9M&Sr3tT)$Z-|c$NKQKT*T2X#8I+YEd>|~!
zUC3gtFx9>Eyj83**vA0p&x9Y@E)*h##!yb$CeLLDg;(T;`LB}4g>ry~|B{uAKv9*T
zNME`f=c0YKvW^jXiq$X^9^c(e&nc8nzp~BJz|2CoOab#>vM=@U3(v=dRVj2@>`?>W
zr8>9f$h?5`Va;#C*f@E$gacr-s2ADZU#jl>?7u{tT$#BPG?i(<hlzTdGI1j8MZXFO
z4jVuRehftW=IX1g-o1PFeK@MizP7R6@>9Lkcf6Lk1m>yltru3^H<#hndhg7^dvHe6
zu7N2aStUv8fn_Q}HJtxpHulX4QYP$`=QT~xye};nfZkT<-|=D&!>YvYY=_ZoX^A9$
zNKjPo4v{p#ne=kAB|W{9K18@xTk)Z*XWoR00!DQQm8P0JclVU=Ju*~Xl}ObORMxFU
zoP*mUBe(z-PcS%r78;UN*)&s}4qi#v-Q)m(aKQ=S&X02>_5U+$Gb^$SG3D6&EZ_+b
zqc1F7ZT(9IszT9<dhAfj-Zxye>xd@nB4hB|p5s1ke_?${k5P4Qt4)dR^OVX5A2fkJ
z<{Yv~2mx0(qsnq)Mf9(~WJZIMm}2@vF_TR5DTyI5>0I2GC1zqX*pZ$RpddEKwu9&@
z_jZ&f_fO{}b%J?ZLeuBvlQ)>isFVf$lv8GaB(dgO=;kj#iw&It#CJ>MCndr&_qFev
zvbgx6b0QwU_)SS>vPKLtX#>slcP(CtyAc343?3BczLFjM`6~@^vUi^KFPYEVi|G@X
z(aGno*FJ?WaS1VN-+vZpS{R>x{5qczYH6-1A_$7GX-3qF=H$B=tGzfj^!F@0v`bBl
z8>`pONph|6=RHY}L4)2w(9coS0hpxeZf${x!yZ=X>_F9r)l4m_{>8&LN?v+QhwQLi
z_hPdr-uZfTgOxkniZ&kGcd~Zc!+-$c@gz@vvksr8YK!;rPo{>Ac3Gej=^7iKOEnNZ
zJHev9Y!A;%K)L4NWV?wMPl6pj{#QJa|JT=;HbI=Kx8QAR<W3~6Sb2=ZVa|)ewuB}B
zR~Lw{NR5A7ql%Vxgew<N*@@i=En@?M^|HnYbm-_2WGdzq5f2!PhM?pD3A`nP`3?n_
zirouB_v)^-9wg^w*+<O8q!wKEPj=#K#Zb%gid>^0)%brBb?S%yyTuXt5g5CPJ9EH4
zCXJ*gR4wQ6YL!LH%nEUobi80zbszmamw?yWOvhZDf@igtjzX>z^9b5G(4PJR<rQqV
z$lJAnoCn1_H!3u;u_FB<XS8+trsi6R*1nQ$+auB7LX4~mveVxKIH`ZF{ZIIjC^Wrf
zD%MY2CO9KjbK(YHq#DPYpr!EiBr#(-)(IDvg9qU~YQNe><FYeZ`wn1w1IrC|oJL9_
zS<MUyLgqgiPa#FK;R9u6A$7kd?djF07pm>YZSxmIzthAv2wi<a>&=s{{L)QAc%S!6
z$0ZIww!^2*^iB0-+n2>2D0i_=O+Dl*Uj<7>5giQ@Xn1z+X992C(cv+~{=G6I7C*g;
zTnSAj@30)Ev$s;uIW~p|jiYT(#QYB1l^R(t=sHqs{05s98GmH2J!GTcrsm_O3$;LN
ztp3bqkYP#X4-UqQ75iW6SEq5HeYd&pTGHhu`EZzaQLJ@^rdD-`8=`vO^8B9Vo#9)7
zU7nn83Wr8#e<6ROmOg<gM4RtHVfR996f1F+#}1tSas&RB_X9h_qvi@6$q)ANgw41R
z78bNP3s+bgHbU69s2HkH&`>?511ngtv*tgZMoe01G1V(*SECr9Br03?Rp>Yf8Y9pB
z$H3$(=F89Q-BeRjF7FBQ(KN~?)MjF{J*ewTz*mMRZ7K~utYmacXFD41?UB<chefsk
z|BS&aSbMQbj_2au)0yi3tGVlpYO3wBLGeWpL8VIx0*dsGs6YT;Ktw@$CnCKF(pyM;
zL1|J{q$o`(QX)i}6bX?UsgW9b?=|$0=G=E?)_gPH)b-7*nfZ~Go7`k2circC&N=(p
zXK%gJNwYx!#nN`=y8ufs>gw|V=jkjU6j=1KQqqU0>DPBLaeido4Uf_-V=<cgCT7n^
z%Tg)RCMDK2A*W81;Sj0j8TG9?tz~YmDgLhWooGFFq}VY-q2!H0o7ao_LXJt?@f6ez
zlB7|}a@W2nSp1doFHmP5P+=8-3zrag8%k_szZb0-JU}c~0%r!2wu`<DO9#}Sb5B>8
zP+(?bf5EtZ8|;6AzsMTz<M0cV_6<wEciuJzdkIB>Y4Rc(?*0N@rp(BaZu%x!pAd5T
zkT!m%ONbLAP0{_6+|bBf{ELc>6t*ofQH-iIW>5Dv>h-EJn62^7CqVV0ysFp{1ls2T
zo&I#%iD`Tg<7UWa`GL*OW{U3n{s-FwtXW3{9PMxKz?%3Zi*|C=5Ozv@$o^_&s-LLY
z(If4UlrEUTT%s_a(<Pdo>03A~$%u?IR@q@8zO<qm`;d2wcwqGd>62lpMO$(xV4Gy^
zf_s&iuwdKwg{HGUEARJ#^2(q|aJS~DhTUjf`NPCw<dr?|?~|e1mYnK~!)~bAT<ZP)
z8~x2CVfd)bke3}wPlx3vaXe;&#bwt`^`;=ZI<!QO28GeCI))dr#8?*|?@L)8np5{O
zJ$ex@CWgI8KxYCp8yxnRRZgGpiDZ0*n2X~xP^aimqdG=ApH^^Ejsy_`dqZTfZmfz`
zUapZ^w$aaW4S0|$vcS`!`&gow2qkk90Y-gts?w(<(u3z25YM41?*cx*Vx1#;zDO>h
z#$SWH0C-S_j704>3-ecpadGA84Bdhg)+G!OUmO1*zOSsR{?I}~a!i)Vd-G@snIdt<
zC&mv3Yu}R%X;TltjBQz-tz!{49d=U^T}Lk7Lm<YW%J0CN6Xd{2nDx-YfGTtjkeu4$
zbT$br_`W|OZ8-g!QR>3%ej1-Z>Vfk^!pe*48HP1?=Q;gzYRUtUZ|AEeF0p|6*aK(%
zEjGwp(FC3}o2W!SJlEDbs-`P{uYY5Gz^8<Q&2f}f*)&0>&ruU0OUr&I8?~QG{;ZWW
zLYn{4s+aD+@eI<@Ra!t?R%hXtD;%bXUQ}+|uKR<5lrr@e5j+$`3OKrV7T{vbs;p-)
zWf4xwSF+zNRaf1UPKBnd=N>kw>*t|O7Fe4V+?)Zpl+%H{KmK7!&5EI@cxxgl#{&BC
z(eB&b>fw7nG`bUIhxCzdj|Q{!{8fAaaK8ijuKYXyo8d<;!4xxgZ)M)~#XRv3z%Ot}
zRnBRnq@^u7r&r<H;Z}pZlZZemHuvr6lVNC`z35NDU1)R-zl<qeS&f9dlncER`)j5z
zSU4l9os}o#&O-vjOC^4`_Mjcl5_b#MC%$Zmb0y8#;%(Nany7)g+hFkn(dbN~l#ec=
zljhp&Yrv%F(b34&jw!8O#t6#;6w^xehyT@@e}WiH*svy6EbvO5o;!R#o`!^zuSBax
z=qm_tXmm>61u+BSXTLy|hMP04WLcO%)laD$iEINGvUy3+yY}{05X*qZZFUgy`<$PK
z2p-tbZoZ0fwZZ*Hv@zRVN$`5nI4ziT`-z}FkJXFft2stFaT1~75x<8n%8G8rQ@Z1;
z;vOSauin_!-dF}wy(b%c5Y4%sGKV$E`*@60V>*SOdJ4FnSL#_vq+5_RwIar@ss{c1
zFexNWd!1E>KJfHjxvniWOO*?C&#A6ou=Z7g%h)vNCT>cLtA2~}BADIC=~!CftvZ)u
zdmwH!SEqrV7;Z7-HJX*Ib}107p$s-S{NXWE;S*byos?X>cEk6X96Co-6vKO$>bdq_
z+*XLhi{-TjyAk(IshKd(g8XU{V6pkz%}C27NrpTm+W~jyLAi}3Aq_b<myNhTB&qSJ
zzOFW;<L(czEZ@R=a%FssUcfEkcfBy9`@|bMgMCHCSTJG5Kb+34wAhSJ2Q+I@09Z0n
z3y<Rk2M<QCSSb>ERlA-kG-eu1E~tVSSKdtS)vR#P!XmDyV5pDhtsY(PJCLo7+yW{H
z8E~ol9oi)18--;KwdpMotzIo<!?x434&K`eQ`$jH029t)c4c!;TYY+3zkWc%G}JjY
zQb<s;PY>{UE5LuDNKm6yLwBW&!@D^M)Tr6GL&=WKv$*aHXK>yhSi%lb7Ggj4nU7Fk
zx>xTNU>Y)Okz+E-1FD<dH=3Q<spZPm4&WP}B&nIMmBf*qKj7RUG4swRUkNP9#U7Pn
ztr<d+uXl5^Y3l(rA&ITz7r6vO$e9rS$GIKR2zKa|smz2{FP4xt@leuj*_&(jOZlrK
zaDl<czyOo_>@{mwq3Ls}ozJn~Oal}lakR}(IL9AfW4qi?2wsXoLv&~y>-D%U1q?SK
zBYd!HwITVWa2q=;6Gu5QOm5;vG%EzE>2}u<Yd^OZ{r=Q?(6DXLG3l}nFZ&@M1OF}j
z0GJu!f;s~elIxtEV(OO>UJF&(-_~4tExd?=&V4UmkEhPWz*K*zJRWGCODS%eM6ZQ?
zqJhbQ!h3uhiT(++-8Q@j>n3(8xmOBUL^u8n3tq6||7e@-Z#id?PO&EPHM~nVA=o-d
z8(4dqaa$qA4Rwxo4mZ}>EzH??EY6sW8zUB}29j{7u3W*IxrH6rl$^gfChX?qlxk&X
zz;m~m;b;7l@Z&as?iti{BXPSLo!0OHjNsOz=JNjnjc**H&W%*-V^}@ptvw&=`S@Nu
zc^$LP>botm<45UAbKu451wuzwDJbPm<dRACVbr)CWLCb6-N-&}5m?v%wd@Bh6wuiR
z0RU6D2b2e90x0e>ofR1=inA~tEPSPV)Dl-C5|`Ws_9#siIsMF!B{(8RJ7`w!uyVYs
zh6^w~aPeD^4X8NC7DpO(_k!b~7u0z-W|$gvNV;uLu90<yjDFA29@6G{_2zWRYS&#s
zSrI;gkk9w1{?sajGg)RleC}#rF8f1WK%wD5M=~IjE0**SGF%n7Hwi27qib~(m)J|k
z&aehzUQA!F?}uM;PY*LF@$kKhZh7Jr<gCVH=wht7&WIYSRNdx5t9g}5;ny26!uy|R
zuIE^pJa}}3$ls^*cmSM%!H)kdd1oRy(imgNH^+cJ)6P&NalVEBjs~InlzDVWYpbvS
z6(Ny8Vdf8ve930Ek>_DJXfcAI4~^l1(^1o;$^(mG?_hceY2@Y9))UDJERTAR(6m~X
zz~MgSL1Ks`b3K2dOd#x-!s;JGc*&8FEZ>lXSt91@=w5D%uefez^b7=eHy8uP(K_&q
zzho<bT=tzd(xyER;c$yOAyil(CED|p=h=zHx&es4lLOxP@+e~$ks(#Bf9<gaReZ%=
z4Rl-ckh>cW!nY7#w^z)T3$!g;P?zf6%MY&>a5RjOdlBNU6Ct53l?#02^XWFn!OO*D
zlcY*~A-*9uDEa!Z(#@Led*Nu!?%t=%x{vibMW>O&A|oS5G>HeCVm{?7zI?t~@^;A@
z5}S}Qo1;rs2gJbKMDyKygAM~fC_lK0`3{dA@p%cbUJCy!qC&f^9&Icxe^X3vs;|oT
z2|6IDDkT_wVjSU=ysf&@ZqN4vIz$*&t1<oj@m{Y;^@z2WnDxX|z{}B-);WB0GCRz&
z$r47F2lW7b9eL*{K@Z*O8U@6cbA}I~EjZ#e(?RZSxSZ$9OAFV!@AKx?o>xdXb(5Z6
zy%%b;*5KTf=y3j13cdNr$m*r*c@Il2Cq1JZC_;@9gTGIvNGz3Pbk2)Q(>5Ul_HCM<
z`w>K;k7C{Hf3B52l{+*XAf$bNUwI;e_kTrnC2+Ms&I(~$sh0-29)Gi*jgo3jSozK3
zYQ<JoxCK<3gUrc#CZ>B$oZH&>IcNVrXg&2*sFaS5Cw&$2_7AUDo;X2k0T_RN)_*n5
z|1F=B;O)pcJ=hpnbaHD5dMj$_exu&SBlR3?{8(Ojwx02uEdKWjNF9#ZwIPd_QwqyF
zV3^zxj4lX1nG=!8r|TEB=;KHo+(a<rZ$d5Y#@dT*RB!Yb{OnS0xD{<K@ci-~G<K$H
zLd$;tyGDZ_&aJoW7YMJc29L@0A85i#<L0b~b}#E=bT=mU4j^iMD(0UA4xhpg5b31{
z+ZeD(ms`=KO{e?q_Ml|;itNuxg;s_i84Bf_-W;pnM7p&A+!M{e&e!aH$$IeaCN64`
zssJfCNy=PGi`~JYq%CSonI*i0CL+rFiV%^+9j3-nc6qS1*R)7N&#OP%`XP#eDSqt$
z0>-%g0*wiGK&k|4mqOONaRbW{UmMDbW!!yN&pFC9(Vg%zL?tA7_DCGFwW;%e+4X+u
z|B0B_g*O`9xqwxx>Cw9T394hxE!4zM1?t#TR?p>)+{$&;ioN|+3$~?{>2ck4$Y>DG
zR_=!p>6%gd+g;AWgkxvQ6d2{x{gdc52#E9_$~GN@2ij-<0zpjX#w85f;mjkdj7YVZ
z#A>weU9=(k%+j}J!Q=5+zVH#}h__CkP+S49&w2BkBOd|!>A%-OMpsLiX-)@UgbTVg
zuw<m@DWk$U@?z8dLEG$K1iq)_e0RZkDn<xRw34*TqF>SnjReUSHeUG9(ti26*+aEI
z+s#jaRv!9y1V#1h)S~5~nzMF;tqu2&pEk*tC{${FAaAA_L#B>UgO@4zgkAeVHl}lK
zNXiU_J{dD-3#n@Ds+u+=KnZ_Z%)s0a|3#i{uSveINGi1=>dtNF0<Imb90ry8=d10#
z_S!N8!SkR>F@_J8nJTB6qJc@3ZHP^g82|XS64&c5l#S5_x52Z*UfVYl_Gdr+LBN}!
zhZN5llRo9e@%JwF*jt!#pRJ!`D;clj5dm}-ntQ7x)xoKLB||S8CZ>Wsj#oFw2W@V9
zMVM>8nXh;-^rR(aCa|~E-KMm`&`$bWjv85j)_lvgng7;|6)~Cba(evIgC2z@UO*}2
z)Y0QxoI5xV(ye%jg$m1ty3YM!#3GutU$*X+rHY@iJqme9Lj##@xnuF)GBN+w@2tT5
z30VEzMf!E4P>V+n(vEZZ(O)2-FQJuVz6p?AC?)cY>&McDomjc#PMNYE#h6h-j+G9a
zg^L5lXMy%Hwew+h%z*jf)U;ae+9;vQ0Nm;loM~INjWNUT@2lU>L*oI^AEb!8ke=Jx
z=3e`xkKKh!eS-ptS@irtv6<z!Xtp?`G~sf$v!y3j(eS`$^=Slh^Ql!in7@F(aBK${
zLzI2FAG=nb-UUA3Wg8e7_V=+?82fL<T5-?!XJX<GIwc$Ro!yJadgMg7WIy+-9Ge4_
z8B_w4@W3Y;%ytlmZzXAkn3y@MKUjYE_U^>j4cLiS{W{1bDM4bDNuua{0_u6X5>FjQ
zlxo}3#F=!}Z#dGk7o3pGk8s^#qxI1gbG3=D9YmNv!*M~+YHId^-tfpp(*rO}z#|rI
zUo&#(f(-o{_yR@Ew}80vplUc^qHy9Wx<~7GADF;%*T|dvnj8e^Rt0cvy*@EvPq6VA
zS2yn+#aQ{{@tZC=;Xw0X&MUcYJ?<613dy=YOIM%Oce)${9V-28<yK8-@Cn@Bwbcc7
z!-P58_V#kJH<E%&T+NFo?Jxbg#sOpKFOO;p@9dPxJUHGf!0?PEuEe9^Z62?un~t>(
z<OArGJ|Boq<QXGMlgLWufoV|lw#@hEj$V}%q`UPMhF^~yl|KY!RqbPclttnB@T**#
zj^uM|kV<sIcv6`!XU%-~;@)!8#nhw`g~-Rx1fJeq>O1%7DFess@}y==x5HH)G1E1U
z8NiMu+{#-I3Km(+a+vAv#w-RvDpodYi)h`s=L82IUzps$&l)1_OZOG)eu2yj_LqvL
zj5<QSPIaJN0PS^{c34K~8>HE^p*cK7Cqb5Ax`w>^1!$Fv`-j}xcx<e<*tS6VqMl+U
zpSVz(3yID?Wj1+iV4YGxQ+hS770S8$%Gu{Q`3l11pxoA6If+Ge*x3iSefx`*x3FBg
z)LAGl@6a{Jt>OGi>`vOmov&%jL94|I$$nvU+n#t;I<o4-TA_5KS7)&7qRkcR0HP5Q
zOAa96BY=Jt%MkN6n^n&K3GErTlPpq&YRVz<a8HFoD;FZ<Z)0~6OM7G-ri*AEeQ8)u
zfI^N)r6C}7UPh7^#WhM}qnJu%*~91z0ADgH4*4g~>|Zqk{BOJ`md?35^}XM&<4wl`
zbJEAun2!u)>VWW_2T1`3b0n*-y&pMgR9<;dr@DDM0H|%~)-iuzYeHS)X*{0lERy5s
z%nSxf?aP%|nsCuVJjp!r^eNs~p5x6cbqP1ViS~lkXum-F_4}bQ%#U_!-*SmGekEhN
zu$=)GKc~frq92IaIfhl4645I?UvJAt;J0(Tl>l!*YW88GVD<ZR?IkTYTWH%xz|V>@
z5Fjril(|$}<`sQXPk>2+?Qjfn<$eAwA#`CHvV>U<rE;j}7f!BmUE?>O4tLizO=Y^V
z=)rV2Xl$X2>yno4{nK4V?ypM}a1MrG0v&CD#E@Q6w1^f?Lj5uYr!A{rsO$5L&ffpP
zEa`1<cC}nb<tf*~d+N7ceAH*!l@JB9pN(3P`uz3-`g(tWDCLEEJlhJd!Y@ed<^tv*
zQx>zeZ4oxD0tzp?h7{i=+}E8-z03SYhAdfV87>ft8mu!>PLqGxIvo5~c^d#~Py5tS
z$Z>wdFG`Vv_M5eP2m+#K1-SX7h2?*m&uB7@ae*v50q_3>;&DppxKKTPnK0xY41f2%
zK1k%wX|Rs?BI&c2JmYVTw_BU|n|6cHu7yOG5znbR58pkit&Iy-?ZeV(9ACrOA{=+d
z`X-K?iBgUqS*7i)7pYZdbw;$WbI-b>QuaPDMd7_SPjxi;$N57yHx-_PcSQ5{?4YZS
zxNyO4I6Btk{jo4|KZ0wWlB_>)fV5*Zh{S1sjqDp3rhnK>|5#;SLzcr#g9)@L9(Eno
znc4K2D)A^U{?<=A?SLj;q8=(F-_y#nQ5LW@t)mV^=!8O^DXr#kQZqkGGIh=)-^mp4
zW#sqxQW?qE1JrGfOoyip27o#}igXA#3Tr1~ty;44r&ck&l=2W-YW~zl`s4Eg)1L1#
ziH=&>b>E-i`1}yvBkymGVloNdoCE5g&fX1Wt?+5UM6VyBh3^1p!ta-*syQoI=Mqn?
zOp<c_&5_F$Fl}qpH*LRiGJs!?&Z_9LY7MkC1y4BBd`LgL3AiXl+*zAYIJi2VIYD7T
z9zyO3RVvZu*x%18)$8)!By|PM>4>ur=mh{7rfJ$gude<tk1d7*kamwJTah-+{ugPC
z4^=3MzHmWmPJonU)tbwK!RvbJgpVP43G<O-+Q^PwF7Qj=!6*FT=wkaLS>mwA!4TRU
zpqxZ+%v2>+6`dNF92t*fc?K50L%ab7<dsRds6!|#8?~lrYND8SBVm)m`;oG-E)Kx0
z=u$qt*yvQDLLy|3!kX1afL|&HEZT@DEVS)JI-cZeLG=CTS&(gbuPVs<bIB4A3jdHk
z?|%!mIrnkeX>DM7toBhkzwT@+q%5quE`=46Bn5}yCqvAL5FSHo<An6dUm$)6TkCa{
z@esm=w;Fe3fb^01@cvrz;Thy_0Bpz*=he9<^{D=IqX~-EqaCKW?{(F#R3;}kJ6Wh)
zJHv=rj7h_!_DRQ2H4>fY&oNW;bulXI#=?TYj)lKf@MWo`jBDEi%aVaJY%=>Cc&3AO
zn7rZgQ`1)S!rf++X}@*6dbRl{I(rD&3XWChr<OEIcbAA*`@~5-e0knlWnUzMQm=u1
z$8(@su4n7CGJ3u23o$;!I=)QITE8UKL$*zGuo%S&m!|{++E#|-o5bD&!?-jpMoZ4x
z`q=cZXbm_;{=y9w9ZkAu9Sx;6{1yXN<_{9X79-(kr#1INS;E)a(*Y(rTD`8<$(3zW
z8F0ab$g?8g-sEJ!WMC#1^IPDFg=YT_%qp)j^NCM2GAKuvo|1Q$O<re8q1iU;HziV#
zep_|b*}cb?6SemY2PsWEGPo+Q$Z{sz1Gcy7M`%P3!9!v|(QtHUzBbln?*JELFE527
zO<i--71;h@;rjuAb*0oHA+J-&3UlREHNQadR%P=K6_SGQN{k`j&lV#%8@WEep&H63
zStJ7evCo&*0NR2G46v|EA>RTN>}9D%hnbl^(Ia2io;Wd#`|R-N)zpgSr7^Wt(AG=T
zaD4bCq5|zKba$g&Ui7Fudv1$Ge)cAMsM#6%P}m$d2uX#%g`KBHan0toZb4deF5L3Y
zc5sUFKZR53d47JcOY2!*n^P8v?imOymg74DeOBb96w>8W)nNaX!dlW9!li&6Lc>%S
z&P-YV0wLyMXfh|QIm}`ceR`dCEoZ`JH=^t94R@XkqdGdwPk&nx&HAAs^7K@~Hi-qD
zexI7L`M7yR>2?XOK<@LM!zsIKDFnmTFuks|ji<D#`ji7va3S0z!`E5TOxL_1YmriR
zGX8_AOV`X@syM9DAsfHFb7H@<3_EsrR{p3G-@@bix<SOvDtT6-xL=)tm~2DW)PD3k
zq}^{GLJ3ZzO04oqsh__EmNAUv76!LoJ_CMLf^05ptGQZu{ptql1gsl9+a))G)}bl_
zRp8G(94oD7q~C#JSgbDxgvp?d3dBzS8^jemTuc-HY7?jUxOA=$A9BE)cM(uf%oKrh
z)M$r{CScj(X(ChKyqXpxIN7c@&(;*`iOvKce*yNTV-F(#d0P4Z_?Sf(n@he)OI@B8
zA;$nBBSuieRDf&SxWDkaeI2}ELtT%QIgE^#*vKd3(wQvWl3}4XAJA%Mh!7fNo|L?z
u%`xj@qp%^L9MAB{5?Boi5cksJCFXNhR{O_*%;B!XUqcB0L$4Wrjr|?I79h<4

diff --git a/scripts/clean_script b/scripts/clean_script
deleted file mode 100755
index ebf3bfa96..000000000
--- a/scripts/clean_script
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/bin/sh
-if ( test "$1" = "all" ) then
-   (cd pysrc >/dev/null ; ./clean_script)
-   (cd exsrc >/dev/null; ./clean_script)
-   (cd logs >/dev/null ; /bin/rm *.LOG >/dev/null 2>&1)
-fi
-
-(cd libcdms;./clean_script >cdms.LOG 2>&1; /bin/rm -f cdms.LOG rebuild.py rebuild.LOG >/dev/null 2>&1)
-(cd esg; /bin/rm -fr build *.LOG rebuild.py *.log >/dev/null 2>&1)
-(cd Packages; /bin/rm -fr vcs/cdatwrap */build */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1)
-(cd Packages/visus/src/pyvisus ; /bin/rm -rf build >/dev/null 2>&1)
-(cd contrib;/bin/rm -fr */build */*.o */*.a */*.pyc */Src/*.o */Src/*.a */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1)
-/bin/rm -fr build *.pyc installation/build installation/cdat_info.* *~ rebuild.py  >/dev/null 2>&1
-find . -name 'config.cache' -print -exec rm {} \; 
diff --git a/scripts/get_git_version.sh b/scripts/get_git_version.sh
deleted file mode 100755
index 7d27fa7fe..000000000
--- a/scripts/get_git_version.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env sh
-
-if [ "X"${CC} = "X" ] ; then
-    gcc show_git.c -o a.out ; ./a.out ; rm a.out
-else
-   ${CC} show_git.c -o a.out ; ./a.out ; rm a.out
-fi
diff --git a/scripts/git_hooks/commit-msg b/scripts/git_hooks/commit-msg
deleted file mode 100755
index 672bfaae2..000000000
--- a/scripts/git_hooks/commit-msg
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-# placeholder for custom commit-msg hooks
diff --git a/scripts/git_hooks/pre-commit b/scripts/git_hooks/pre-commit
deleted file mode 100755
index e7b50ac4a..000000000
--- a/scripts/git_hooks/pre-commit
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env bash
-
-# Reject commits directly to 'master' to encourage use of topic branches.
-if test -z "$HOOKS_ALLOW_COMMIT_MASTER"; then
-  if git symbolic-ref HEAD | egrep -q '^refs/heads/master$'; then
-    echo 'Please do not commit directly to "master".  Create a topic instead:
-
- git checkout -b my-topic
- git commit
-'
-    exit 1
-  fi
-fi
diff --git a/scripts/git_hooks/pre-push b/scripts/git_hooks/pre-push
deleted file mode 100755
index 424f890f9..000000000
--- a/scripts/git_hooks/pre-push
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-# Reject pushes directly to 'master' to encourage use of topic branches.
-if test -z "$HOOKS_ALLOW_PUSH_MASTER"; then
-  while IFS=' ' read local_ref local_sha1 remote_ref remote_sha1; do
-    if test "x$remote_ref" = "xrefs/heads/master"; then
-      echo 'Please do not push directly to "master".  Push to a topic instead:
-
-  git push '"$1"' '"$local_ref"':my-topic
-'
-      exit 1
-    fi
-  done
-fi
diff --git a/scripts/git_hooks/prepare-commit-msg b/scripts/git_hooks/prepare-commit-msg
deleted file mode 100755
index 1571a7d20..000000000
--- a/scripts/git_hooks/prepare-commit-msg
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-# placeholder for custom prepare-commit-msg hooks
diff --git a/scripts/git_setup/.gitattributes b/scripts/git_setup/.gitattributes
deleted file mode 100644
index 3323f94b9..000000000
--- a/scripts/git_setup/.gitattributes
+++ /dev/null
@@ -1,9 +0,0 @@
-.git*            export-ignore
-
-# Exclude from source archives files specific to Git work tree.
-*                export-ignore
-
-config*          eol=lf         whitespace=indent-with-non-tab
-git-*            eol=lf         whitespace=indent-with-non-tab
-tips             eol=lf         whitespace=indent-with-non-tab
-setup-*          eol=lf         whitespace=indent-with-non-tab
diff --git a/scripts/git_setup/LICENSE b/scripts/git_setup/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/scripts/git_setup/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/scripts/git_setup/NOTICE b/scripts/git_setup/NOTICE
deleted file mode 100644
index 0d32c02eb..000000000
--- a/scripts/git_setup/NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-Kitware Local Git Setup Scripts
-Copyright 2010-2012 Kitware, Inc.
-
-This product includes software developed at Kitware, Inc.
-(http://www.kitware.com/).
diff --git a/scripts/git_setup/README b/scripts/git_setup/README
deleted file mode 100644
index cf468fb68..000000000
--- a/scripts/git_setup/README
+++ /dev/null
@@ -1,80 +0,0 @@
-Kitware Local Git Setup Scripts
-
-
-Introduction
-------------
-
-This is a collection of local Git development setup scripts meant for
-inclusion in project source trees to aid their development workflow.
-Project-specific information needed by the scripts may be configured
-in a "config" file added next to them in the project.
-
-
-Import
-------
-
-A project may import these scripts into their source tree by
-initializing a subtree merge.  Bring up a Git prompt and set the
-current working directory inside a clone of the target project.
-Fetch the "setup" branch from the GitSetup repository:
-
- $ git fetch ../GitSetup setup:setup
-
-Prepare to merge the branch but place the content in a subdirectory.
-Any prefix (with trailing '/') may be chosen so long as it is used
-consistently within a project through the rest of these instructions:
-
- $ git merge -s ours --no-commit setup
- $ git read-tree -u --prefix=Utilities/GitSetup/ setup
-
-Commit the merge with an informative message:
-
- $ git commit
- ------------------------------------------------------------------------
- Merge branch 'setup'
-
- Add Utilities/GitSetup/ directory using subtree merge from
- the general GitSetup repository "setup" branch.
- ------------------------------------------------------------------------
-
-
-Configuration
--------------
-
-Read the "Project configuration instructions" comment in each script.
-Add a "config" file next to the scripts with desired configuration
-(optionally copy and modify "config.sample").  For example, to
-configure the "setup-hooks" script:
-
- $ git config -f Utilities/GitSetup/config hooks.url "$url"
-
-where "$url" is the project repository publishing the "hooks" branch.
-When finished, add and commit the configuration file:
-
- $ git add Utilities/GitSetup/config
- $ git commit
-
-
-Update
-------
-
-A project may update these scripts from the GitSetup repository.
-Bring up a Git prompt and set the current working directory inside a
-clone of the target project.  Fetch the "setup" branch from the
-GitSetup repository:
-
- $ git fetch ../GitSetup setup:setup
-
-Merge the "setup" branch into the subtree:
-
- $ git merge -X subtree=Utilities/GitSetup setup
-
-where "Utilities/GitSetup" is the same prefix used during the import
-setup, but without a trailing '/'.
-
-
-License
--------
-
-Distributed under the Apache License 2.0.
-See LICENSE and NOTICE for details.
diff --git a/scripts/git_setup/config b/scripts/git_setup/config
deleted file mode 100644
index 6fd06e8cc..000000000
--- a/scripts/git_setup/config
+++ /dev/null
@@ -1,2 +0,0 @@
-[hooks]
-	url = https://github.com/UV-CDAT/uvcdat
diff --git a/scripts/git_setup/config.sample b/scripts/git_setup/config.sample
deleted file mode 100644
index bba2382c3..000000000
--- a/scripts/git_setup/config.sample
+++ /dev/null
@@ -1,22 +0,0 @@
-# Kitware Local Git Setup Scripts - Sample Project Configuration
-#
-# Copy to "config" and edit as necessary.
-
-[hooks]
-	url = http://public.kitware.com/GitSetup.git
-	#branch = hooks
-
-[ssh]
-	host = public.kitware.com
-	key = id_git_public
-	request-url = https://www.kitware.com/Admin/SendPassword.cgi
-
-[stage]
-	#url = git://public.kitware.com/stage/Project.git
-	#pushurl = git@public.kitware.com:stage/Project.git
-
-[gerrit]
-	#project = Project
-	site = http://review.source.kitware.com
-	# pushurl placeholder "$username" is literal
-	pushurl = $username@review.source.kitware.com:Project
diff --git a/scripts/git_setup/git-gerrit-push b/scripts/git_setup/git-gerrit-push
deleted file mode 100755
index 2471490c2..000000000
--- a/scripts/git_setup/git-gerrit-push
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-USAGE="[<remote>] [--no-topic] [--dry-run] [--]"
-OPTIONS_SPEC=
-SUBDIRECTORY_OK=Yes
-. "$(git --exec-path)/git-sh-setup"
-
-#-----------------------------------------------------------------------------
-
-remote=''
-refspecs=''
-no_topic=''
-dry_run=''
-
-# Parse the command line options.
-while test $# != 0; do
-	case "$1" in
-		--no-topic) no_topic=1 ;;
-		--dry-run)  dry_run=--dry-run ;;
-		--) shift; break ;;
-		-*) usage ;;
-		*) test -z "$remote" || usage ; remote="$1" ;;
-	esac
-	shift
-done
-test $# = 0 || usage
-
-# Default remote.
-test -n "$remote" || remote="gerrit"
-
-if test -z "$no_topic"; then
-	# Identify and validate the topic branch name.
-	topic="$(git symbolic-ref HEAD | sed -e 's|^refs/heads/||')"
-	if test "$topic" = "master"; then
-		die 'Please name your topic:
-		git checkout -b descriptive-name'
-	fi
-	refspecs="HEAD:refs/for/master/$topic"
-fi
-
-# Exit early if we have nothing to push.
-if test -z "$refspecs"; then
-	echo "Nothing to push!"
-	exit 0
-fi
-
-# Fetch the current upstream master branch head.
-# This helps the computation of a minimal pack to push.
-echo "Fetching $remote master"
-fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out"
-
-# Push.  Save output and exit code.
-echo "Pushing to $remote"
-push_stdout=$(git push --porcelain $dry_run "$remote" $refspecs); push_exit=$?
-echo "$push_stdout"
-
-# Reproduce the push exit code.
-exit $push_exit
diff --git a/scripts/git_setup/setup-gerrit b/scripts/git_setup/setup-gerrit
deleted file mode 100755
index 6d46e3ccf..000000000
--- a/scripts/git_setup/setup-gerrit
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up the local Git repository to push to
-# a Gerrit Code Review instance for this project.
-
-# Project configuration instructions:
-#
-# - Run a Gerrit Code Review server
-#
-# - Populate adjacent "config" file with:
-#    gerrit.site = Top Gerrit URL (not project-specific)
-#    gerrit.project = Name of project in Gerrit
-#    gerrit.pushurl = Review site push URL with "$username" placeholder
-#    gerrit.remote = Gerrit remote name, if not "gerrit"
-#    gerrit.url = Gerrit project URL, if not "$site/p/$project"
-#                 optionally with "$username" placeholder
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-site=$(git config -f config --get gerrit.site) &&
-project=$(git config -f config --get gerrit.project) &&
-remote=$(git config -f config --get gerrit.remote ||
-	 echo "gerrit") &&
-fetchurl_=$(git config -f config --get gerrit.url ||
-	    echo "$site/p/$project") &&
-pushurl_=$(git config -f config --get gerrit.pushurl ||
-	   git config -f config --get gerrit.url) ||
-die 'This project is not configured to use Gerrit.'
-
-# Get current gerrit push URL.
-pushurl=$(git config --get remote."$remote".pushurl ||
-	  git config --get remote."$remote".url || echo '') &&
-
-# Tell user about current configuration.
-if test -n "$pushurl"; then
-	echo 'Remote "'"$remote"'" is currently configured to push to
-
-  '"$pushurl"'
-' &&
-	read -ep 'Reconfigure Gerrit? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		setup=1
-	else
-		setup=''
-	fi
-else
-	echo 'Remote "'"$remote"'" is not yet configured.
-
-'"$project"' changes must be pushed to our Gerrit Code Review site:
-
-  '"$site/p/$project"'
-
-Register a Gerrit account and select a username (used below).
-You will need an OpenID:
-
-  http://openid.net/get-an-openid/
-' &&
-	read -ep 'Configure Gerrit? [Y/n]: ' ans &&
-	if [ "$ans" == "n" ] || [ "$ans" == "N" ]; then
-		exit 0
-	else
-		setup=1
-	fi
-fi &&
-
-# Perform setup if necessary.
-if test -n "$setup"; then
-	echo 'Sign-in to Gerrit to get/set your username at
-
-  '"$site"'/#/settings
-
-Add your SSH public keys at
-
-  '"$site"'/#/settings/ssh-keys
-' &&
-	read -ep "Gerrit username? [$USER]: " gu &&
-	if test -z "$gu"; then
-		gu="$USER"
-	fi &&
-	fetchurl="${fetchurl_/\$username/$gu}" &&
-	if test -z "$pushurl"; then
-		git remote add "$remote" "$fetchurl"
-	else
-		git config remote."$remote".url "$fetchurl"
-	fi &&
-	pushurl="${pushurl_/\$username/$gu}" &&
-	if test "$pushurl" != "$fetchurl"; then
-		git config remote."$remote".pushurl "$pushurl"
-	fi &&
-	echo 'Remote "'"$remote"'" is now configured to push to
-
-  '"$pushurl"'
-'
-fi &&
-
-# Optionally test Gerrit access.
-if test -n "$pushurl"; then
-	read -ep 'Test access to Gerrit (SSH)? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		echo -n 'Testing Gerrit access by SSH...'
-		if git ls-remote --heads "$pushurl" >/dev/null; then
-			echo 'passed.'
-		else
-			echo 'failed.' &&
-			die 'Could not access Gerrit.  Add your SSH public keys at
-
-  '"$site"'/#/settings/ssh-keys
-'
-		fi
-	fi
-fi &&
-
-# Set up GerritId hook.
-hook=$(git config --get hooks.GerritId || echo '') &&
-if test -z "$hook"; then
-	echo '
-Enabling GerritId hook to add a "Change-Id" footer to commit
-messages for interaction with Gerrit.  Run
-
-  git config hooks.GerritId false
-
-to disable this feature (but you will be on your own).' &&
-	git config hooks.GerritId true
-else
-	echo 'GerritId hook already configured to "'"$hook"'".'
-fi
diff --git a/scripts/git_setup/setup-hooks b/scripts/git_setup/setup-hooks
deleted file mode 100755
index c07985ae5..000000000
--- a/scripts/git_setup/setup-hooks
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up local Git hooks for this project.
-
-# Project configuration instructions:
-#
-# - Publish a "hooks" branch in the project repository such that
-#   clones will have "refs/remotes/origin/hooks".
-#
-# - Populate adjacent "config" file with:
-#    hooks.url    = Repository URL publishing "hooks" branch
-#    hooks.branch = Repository branch instead of "hooks"
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Select a hooks branch.
-if url=$(git config --get hooks.url); then
-	# Fetch hooks from locally configured repository.
-	branch=$(git config hooks.branch || echo hooks)
-elif git for-each-ref refs/remotes/origin/hooks 2>/dev/null |
-     egrep-q 'refs/remotes/origin/hooks$'; then
-	# Use hooks cloned from origin.
-	url=.. && branch=remotes/origin/hooks
-elif url=$(git config -f config --get hooks.url); then
-	# Fetch hooks from project-configured repository.
-	branch=$(git config -f config hooks.branch || echo hooks)
-else
-	die 'This project is not configured to install local hooks.'
-fi &&
-
-# Populate ".git/hooks".
-echo 'Setting up git hooks...' &&
-git_dir=$(git rev-parse --git-dir) &&
-cd "$git_dir/hooks" &&
-if ! test -e .git; then
-	git init -q || die 'Could not run git init for hooks.'
-fi &&
-git fetch -q "$url" "$branch" &&
-git reset -q --hard FETCH_HEAD || die 'Failed to install hooks'
diff --git a/scripts/git_setup/setup-ssh b/scripts/git_setup/setup-ssh
deleted file mode 100755
index 8920a5bd3..000000000
--- a/scripts/git_setup/setup-ssh
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up ssh push access to the repository host.
-
-# Project configuration instructions:
-#
-# - Populate adjacent "config" file with:
-#    ssh.host = Repository host name
-#    ssh.user = Username on host, if not "git"
-#    ssh.key = Local ssh key name
-#    ssh.request-url = Web page URL to request ssh access
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-host=$(git config -f config --get ssh.host) &&
-user=$(git config -f config --get ssh.user || echo git) &&
-key=$(git config -f config --get ssh.key) &&
-request_url=$(git config -f config --get ssh.request-url) ||
-die 'This project is not configured for ssh push access.'
-
-# Check for existing configuration.
-if test -r ~/.ssh/config &&
-   egrep-q 'Host[= ]'"${host//\./\\.}" ~/.ssh/config; then
-	echo 'Host "'"$host"'" is already in ~/.ssh/config' &&
-	setup= &&
-	question='Test'
-else
-	echo 'Host "'"$host"'" not found in ~/.ssh/config' &&
-	setup=1 &&
-	question='Setup and test'
-fi &&
-
-# Ask the user whether to make changes.
-echo '' &&
-read -ep "${question} push access by ssh to $user@$host? [y/N]: " access &&
-if test "$access" != "y" -a "$access" != "Y"; then
-	exit 0
-fi &&
-
-# Setup host configuration if necessary.
-if test -n "$setup"; then
-	if ! test -d ~/.ssh; then
-		mkdir -p ~/.ssh &&
-		chmod 700 ~/.ssh
-	fi &&
-	if ! test -f ~/.ssh/config; then
-		touch ~/.ssh/config &&
-		chmod 600 ~/.ssh/config
-	fi &&
-	ssh_config='Host='"$host"'
-  IdentityFile ~/.ssh/'"$key" &&
-	echo "Adding to ~/.ssh/config:
-
-$ssh_config
-" &&
-	echo "$ssh_config" >> ~/.ssh/config &&
-	if ! test -e ~/.ssh/"$key"; then
-		if test -f ~/.ssh/id_rsa; then
-			# Take care of the common case.
-			ln -s id_rsa ~/.ssh/"$key"
-			echo '
-Assuming ~/.ssh/id_rsa is the private key corresponding to the public key for
-
-  '"$user@$host"'
-
-If this is incorrect place private key at "~/.ssh/'"$key"'".'
-		else
-			echo '
-Place the private key corresponding to the public key registered for
-
-  '"$user@$host"'
-
-at "~/.ssh/'"$key"'".'
-		fi
-		read -e -n 1 -p 'Press any key to continue...'
-	fi
-fi || exit 1
-
-# Test access configuration.
-echo 'Testing ssh push access to "'"$user@$host"'"...' &&
-if ! ssh "$user@$host" info; then
-	die 'No ssh push access to "'"$user@$host"'".  You may need to request access at
-
-  '"$request_url"'
-'
-fi
diff --git a/scripts/git_setup/setup-stage b/scripts/git_setup/setup-stage
deleted file mode 100755
index ce6ec4574..000000000
--- a/scripts/git_setup/setup-stage
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to set up the topic stage for pushing changes.
-
-# Project configuration instructions:
-#
-# - Run a Topic Stage repository next to the main project repository.
-#
-# - Populate adjacent "config" file with:
-#    stage.url = Topic Stage repository URL
-#    stage.pushurl = Topic Stage push URL if not "$url"
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-die() {
-	echo 1>&2 "$@" ; exit 1
-}
-
-# Make sure we are inside the repository.
-cd "${BASH_SOURCE%/*}" &&
-
-# Load the project configuration.
-fetchurl_=$(git config -f config --get stage.url) &&
-pushurl_=$(git config -f config --get stage.pushurl || echo "$fetchurl_") &&
-remote=$(git config -f config --get stage.remote || echo 'stage') ||
-die 'This project is not configured to use a topic stage.'
-
-# Get current stage push URL.
-pushurl=$(git config --get remote."$remote".pushurl ||
-	  git config --get remote."$remote".url || echo '') &&
-
-# Tell user about current configuration.
-if test -n "$pushurl"; then
-	echo 'Remote "'"$remote"'" is currently configured to push to
-
-  '"$pushurl"'
-' &&
-	read -ep 'Reconfigure Topic Stage? [y/N]: ' ans &&
-	if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then
-		setup=1
-	else
-		setup=''
-	fi
-else
-	setup=1
-fi
-
-# Perform setup if necessary.
-if test -n "$setup"; then
-	echo 'Setting up the topic stage...' &&
-	fetchurl="${fetchurl_}" &&
-	if test -z "$pushurl"; then
-		git remote add "$remote" "$fetchurl"
-	else
-		git config remote."$remote".url "$fetchurl"
-	fi &&
-	pushurl="${pushurl_}" &&
-	if test "$pushurl" != "$fetchurl"; then
-		git config remote."$remote".pushurl "$pushurl"
-	fi &&
-	echo 'Remote "'"$remote"'" is now configured to push to
-
-  '"$pushurl"'
-'
-fi || die 'Could not configure the topic stage remote.'
diff --git a/scripts/git_setup/setup-user b/scripts/git_setup/setup-user
deleted file mode 100755
index 1af439c45..000000000
--- a/scripts/git_setup/setup-user
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# Run this script to configure Git user info in this repository.
-
-# Project configuration instructions: NONE
-
-for (( ; ; )); do
-	user_name=$(git config user.name || echo '') &&
-	user_email=$(git config user.email || echo '') &&
-	if test -n "$user_name" -a -n "$user_email"; then
-		echo 'Your commits will record as Author:
-
-  '"$user_name <$user_email>"'
-' &&
-		read -ep 'Is the author name and email address above correct? [Y/n] ' correct &&
-		if test "$correct" != "n" -a "$correct" != "N"; then
-			break
-		fi
-	fi &&
-	read -ep 'Enter your full name e.g. "John Doe": ' name &&
-	read -ep 'Enter your email address e.g. "john@gmail.com": ' email &&
-	git config user.name "$name" &&
-	git config user.email "$email"
-done
diff --git a/scripts/git_setup/setup_aliases.sh b/scripts/git_setup/setup_aliases.sh
deleted file mode 100755
index 977170816..000000000
--- a/scripts/git_setup/setup_aliases.sh
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-echo "Setting up useful Git aliases..." &&
-
-# General aliases that could be global
-git config alias.prepush 'log --graph --stat origin/master..' &&
-
-true
diff --git a/scripts/git_setup/tips b/scripts/git_setup/tips
deleted file mode 100755
index 784e1ed89..000000000
--- a/scripts/git_setup/tips
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env bash
-#=============================================================================
-# Copyright 2010-2012 Kitware, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#=============================================================================
-
-# This script makes optional suggestions for working with Git.
-
-# Project configuration instructions: NONE
-
-egrep-q() {
-	egrep "$@" >/dev/null 2>/dev/null
-}
-
-# Suggest color configuration.
-if test -z "$(git config --get color.ui)"; then
-	echo '
-One may enable color output from Git commands with
-
-  git config --global color.ui auto
-'
-fi
-
-# Suggest bash completion.
-if ! bash -i -c 'echo $PS1' | egrep-q '__git_ps1'; then
-	echo '
-A dynamic, informative Git shell prompt can be obtained by sourcing
-the git bash-completion script in your "~/.bashrc".  Set the PS1
-environmental variable as suggested in the comments at the top of the
-bash-completion script.  You may need to install the bash-completion
-package from your distribution to obtain it.
-'
-fi
-
-# Suggest merge tool.
-if test -z "$(git config --get merge.tool)"; then
-	echo '
-One may configure Git to load a merge tool with
-
-  git config merge.tool <toolname>
-
-See "git help mergetool" for more information.
-'
-fi
diff --git a/scripts/last_update_time.py b/scripts/last_update_time.py
deleted file mode 100644
index a0bd0aed4..000000000
--- a/scripts/last_update_time.py
+++ /dev/null
@@ -1,12 +0,0 @@
-import os
-
-ln=os.popen('p4 changes -m 1 //depot/main/...').readlines()
-
-
-for l in ln:
-    sp=l.split()
-    date='_'.join(sp[3].split('/'))
-    date=sp[3]
-    print 'Last change on:',date, 'for more info run: p4 changes -m 1 //depot/main/...'
-    
-    
diff --git a/scripts/nightly.sh b/scripts/nightly.sh
deleted file mode 100755
index 29e23fa35..000000000
--- a/scripts/nightly.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env bash
-
-
-## script to build automatically (UV-)CDAT
-
-INSTALL_PATH=$1
-GIT_BRANCH=$2
-GIT_PATH=$3
-QMAKE_EXE=$4
-XTRA_ARGS=$5
-
-here=`pwd`
-
-: ${INSTALL_PATH:="/lgm/uvcdat/nightly"}
-: ${GIT_BRANCH:="next"}
-: ${GIT_PATH:="/git/uv-cdat"}
-: ${QMAKE_EXE:="/usr/bin/qmake"}
-#: ${XTRA_ARGS:="-DCDAT_USE_LIBXML2=ON -DCDAT_USE_SYSTEM_PNG=ON"}
-
-echo "XTRA_ARGS:"${XTRA_ARGS}
-
-cd ${GIT_PATH} ; \
-git checkout ${GIT_BRANCH} ; \
-git pull ; \
-/bin/rm -rf ${INSTALL_PATH} ; \
-cd ${here} ; \
-rm -rf build_nightly ;\ 
-mkdir build_nightly ;\
-cd build_nightly ;\
-cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \
-cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \
-pwd ; \
-make -j16 ; \
-
-
diff --git a/scripts/setup_for_development.sh b/scripts/setup_for_development.sh
deleted file mode 100755
index 190f39a81..000000000
--- a/scripts/setup_for_development.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-cd "${BASH_SOURCE%/*}/.." &&
-scripts/git_setup/setup-user && echo &&
-scripts/git_setup/setup-hooks && echo &&
-scripts/git_setup/setup_aliases.sh && echo &&
-scripts/git_setup/tips
-
-# Rebase master by default
-git config branch.master.rebase true
-
-# Configure remote push URL.
-if url="$(git config --get remote.origin.url)" &&
-   echo "$url" | egrep -q '^(https?|git)://github.com/UV-CDAT/uvcdat(\.git)?$' &&
-   ! pushurl="$(git config --get remote.origin.pushurl)"; then
-  pushurl='git@github.com:UV-CDAT/uvcdat.git'
-  echo 'Setting origin pushurl to '"$pushurl"
-  git config remote.origin.pushurl "$pushurl"
-fi
diff --git a/scripts/tarballit.sh b/scripts/tarballit.sh
deleted file mode 100755
index 1217260c6..000000000
--- a/scripts/tarballit.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/usr/bin/env sh
-
-svn export http://www-pcmdi.llnl.gov/svn/repository/cdat/trunk $1
-tar czvf $1-everything.tar.gz  $1
-tar czvf $1-cdat.tar.gz --exclude $1/pysrc* --exclude $1/exsrc*  $1
-tar czvf $1-pysrc.tar.gz $1/pysrc
-tar czvf $1-exsrc.tar.gz $1/exsrc
diff --git a/tests/cdat/test_cdat.py b/tests/cdat/test_cdat.py
deleted file mode 100644
index 3c87d0c0b..000000000
--- a/tests/cdat/test_cdat.py
+++ /dev/null
@@ -1,500 +0,0 @@
-#!/usr/bin/env python
-version='%prog 1.0'
-usage = "usage: %prog [options] PACKAGE1, PACKAGE2, CONTRIB1, CONTRIB2, ..."
-import subprocess,os,sys
-import optparse
-import time
-import bz2,ftplib
-ftp_site = "climate.llnl.gov"
-ftp_dir = "Shadow"
-ftp_user = "cdat"
-ftp_password = "Changeme1"
-
-import cdat_info
-default_time_format = "%Y-%m-%d %H:%M:%S"
-
-def get_shadow_name(test_dir,test):
-    fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2'
-    path = list(os.path.split(fnm))
-    while path[0]!='':
-        tmp = os.path.split(path.pop(0))
-        path.insert(0,tmp[1])
-        path.insert(0,tmp[0])
-    fnm2 = '.'.join(path[1:])
-    return fnm,fnm2
-
-def get_shadow_ftp(test_dir,test):
-    fnm,ftpnm = get_shadow_name(test_dir,test)
-    f = open(fnm,"w")
-    try:
-        ftp=ftplib.FTP(ftp_site)
-        ftp.login(ftp_user,ftp_password)
-        ftp.cwd(ftp_dir)
-        ftp.retrbinary('RETR %s' % ftpnm, f.write)
-        ftp.close()
-        f.close()
-        f = open(fnm)
-        s=f.read()
-        f.close()
-        s = bz2.decompress(s)
-        f = open(fnm[:-4],"w") # open w/o bz2 ext
-        f.write(s)
-        f.close()
-        os.remove(fnm)
-    except Exception,err:
-        f.close()
-        os.remove(fnm)
-        pass
-    
-
-def get_shadow_local(test_dir,test):
-    fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-    if os.path.exists(fnm):
-        f=open(fnm,"r")
-        s=f.read()
-        f.close()
-        shadow_dict=eval(s)
-    else:
-        shadow_dict={}
-    return shadow_dict
-
-def get_shadow(test_dir,test):
-    # first try from ftp
-    get_shadow_ftp(test_dir,test)
-    return get_shadow_local(test_dir,test)
-
-def set_shadow_local(test_dir,test,dict):
-    try:
-        fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-        os.remove(fnm)
-    except:
-        pass
-    try:
-        fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2'
-        f=open(fnm,"w")
-        s= bz2.compress(repr(dict))
-        print >> f, s
-        f.close()
-    except Exception,err:
-        pass
-    return
-
-def put_shadow_ftp(test_dir,test):
-    fnm,ftpnm = get_shadow_name(test_dir,test)
-    try:
-        ftp=ftplib.FTP(ftp_site)
-        ftp.login(ftp_user,ftp_password)
-        ftp.cwd(ftp_dir)
-        f=open(fnm)
-        ftp.storbinary('STOR %s' % ftpnm, f)
-        ftp.close()
-        os.remove(fnm)
-    except Exception,err:
-        print 'Error putting ftp bz2',err
-        pass
-        
-def set_shadow(test_dir,test,dict):
-    set_shadow_local(test_dir,test,dict)
-    if int(o.upload)>0:
-        put_shadow_ftp(test_dir,test)
-    return
-
-def make_tests_string_machine(machine,dict):
-    details=""
-    details = "\t\t\tlast successful run: %s" % dict.get("last","never")
-    if dict.has_key("time"):
-        details+="\n\t\t\tduration (min,avg,max) %i, %i, %i seconds" % (dict["fastest"],dict["time"],dict["slowest"])
-    if dict.has_key("count") and o.verbose>1:
-        details+='\n\t\t\tSuccesfully tested %i times on at least : %i independent machines' % (dict["count"],len(dict["machines"]))
-    return details
-
-def make_tests_string(dict_all):
-    details=""
-    for os in dict_all.keys():
-        details += "\n\t\tOS: %s" % os
-        dict_os = dict_all[os]
-        for v in dict_os.keys():
-            details += "\n\t\t  Version: %s" % v
-            dict_system = dict_os[v]
-            for m in dict_system.keys():
-                details += "\n\t\t   Machine: %s" % m
-                dict=dict_system[m]
-                details+='\n'+make_tests_string_machine(m,dict)
-    return details
-
-def run_dir(test_dir,lst):
-    lst.sort()
-    passed=True
-    output={}
-    for test in lst:
-        if test[-3:]=='.py' and (test.lower()[:4]=='test' or test.lower()[:6]=='cdtest'):
-            Dict_all = get_shadow(test_dir,test)
-            if o.query_mode:
-                output[(test_dir,test)]=Dict_all
-                try:
-                    fnm = os.path.join(test_dir,test)[:-3]+'.shadow'
-                    os.remove(fnm)
-                except:
-                    pass
-                continue
-            myversion = ".".join(map(str,cdat_info.version()))
-            dict_all = Dict_all.get(myversion,{})
-            myos = os.uname()[0]
-            system = os.uname()[2]
-            machine = os.uname()[4]
-            dict_os = dict_all.get(myos,{})
-            dict_system = dict_os.get(system,{})
-            dict = dict_system.get(machine,{})
-            dict_system[machine] = dict
-            dict_os[system] = dict_system
-            dict_all[myos] = dict_os
-            details = ""
-            last = dict.get("last","1980-01-01 00:00:00") # ok ever ago!
-            format = dict.get("format",default_time_format)
-            tlast = time.strptime(last,format)
-            delta = time.mktime(tlast)-time.mktime(time.strptime(o.date,o.format))
-            if delta>0:
-                if o.verbose>0:
-                    print "\tRunning: %s" % (test)
-                    print "\t\tSuccessful run newer than threshold %s vs %s " % (last,o.date)
-                continue
-            if o.verbose>0:
-                print "\tRunning: %s" % (test)
-                if o.verbose<3 or dict_all.keys()==[]:
-                    details=make_tests_string_machine(machine,dict)
-                else:
-                    details+=make_tests_string(dict_all)
-                print details
-            t = time.time()
-            out,err= run_test(os.path.join(test_dir,test))
-            err2 = []
-            for l in err:
-                if l.find("Warning")>-1:
-                    pass
-                else:
-                    err2.append(l)
-            err=err2
-            t2 = time.time()
-            if err!=[]:
-                passed = False
-            if o.verbose>1:
-                for l in out:
-                    st='\t\t%s' % l.strip()
-                    print st
-            if o.verbose>0:
-                if err!=[]:
-                    print '\t        FAILED\n\n',err
-                    if o.verbose>1:
-                        for l in err:
-                            st='\t\t%s' % l.strip()
-                            print st
-                else:
-                    print '\t        PASSED\n\n'
-                    runtime = int(t2-t)+1
-                    fastest = dict.get("fastest",runtime+1)
-                    if fastest>runtime:
-                        fastest = runtime
-                    dict["fastest"]=fastest
-                    slowest = dict.get("slowest",runtime-1)
-                    if slowest<runtime:
-                        slowest = runtime
-                    dict["slowest"]=slowest
-                    dict["format"]=default_time_format
-                    dict["last"] = time.strftime(default_time_format,time.localtime())
-                    count=dict.get("count",0)
-                    count+=1
-                    dict["count"]=count
-                    avg = dict.get("time",0.)*(count-1)
-                    avg+=runtime
-                    avg/=count
-                    dict["time"] = avg
-                    machines = dict.get("machines",[])
-                    if int(o.upload)>1:
-                        mymachine = os.uname()[1]
-                    else:
-                        mymachine = "private"
-                    if not mymachine in machines:
-                        machines.append(mymachine)
-                        dict["machines"] = machines
-                        
-                    dict_system[machine] = dict
-                    dict_os[system] = dict_system
-                    dict_all[myos] = dict_os
-                    Dict_all[myversion] = dict_all
-                    output[(test_dir,test)]=dict
-                    if out==[] or str(out[-1]).lower().find('skipped')==-1:
-                        # ok the test havent been skipped
-                        # we can replace stat file
-                        set_shadow(test_dir,test,Dict_all)
-                    
-            if o.skip is False and passed is False:
-                sys.exit()
-    return output
-
-def run_test(test):
-    wd, test = os.path.split(test)
-    cmd = 'cd %s ; %s %s' % (wd, sys.executable, test)
-    if o.full_testing:
-        cmd+=' --full --extended'
-    if o.extended_testing:
-        cmd += ' --extended'
-    #print cmd
-    P=subprocess.Popen(cmd,stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True,shell=True)
-    
-    out = P.stdout.readlines()
-    err = P.stderr.readlines()
-    rmv =[]
-    for l in err:
-        for e in o.error_strings:
-            if l.find(e)>-1:
-                rmv.append(l)
-                break
-    for l in rmv:
-        err.remove(l)
-        
-    return out,err
-
-format = default_time_format
-date = time.strftime(format,time.localtime()) # Now!
-
-p=optparse.OptionParser(version=version,usage=usage)
-
-time_format_help_string = """format for time, default: %default                             
-Format can be constructed from the following keys:                        
-%a 	Locale's abbreviated weekday name.               
-%A 	Locale's full weekday name. 	              
-%b 	Locale's abbreviated month name.               
-%B 	Locale's full month name. 	                 
-%c 	Locale's appropriate date and time representation.                                      
-%d 	Day of the month as a decimal number [01,31]. 	
-%H 	Hour (24-hour clock) as a decimal number [00,23].                                            	
-%I 	Hour (12-hour clock) as a decimal number [01,12].                                                      	
-%j 	Day of the year as a decimal number [001,366]. 	                                             
-%m 	Month as a decimal number [01,12]. 	                 
-%M 	Minute as a decimal number [00,59]. 	                 
-%p 	Locale's equivalent of either AM or PM.        
-%S 	Second as a decimal number [00,61]. 	          
-%U 	Week number of the year (Sunday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Sunday are considered to be in week 0. 	                      
-%w 	Weekday as a decimal number [0(Sunday),6].                             	
-%W 	Week number of the year (Monday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Monday are considered to be in week 0.                                           
-%x 	Locale's appropriate date representation. 	         
-%X 	Locale's appropriate time representation. 	            
-%y 	Year without century as a decimal number [00,99].                        	              
-%Y 	Year with century as a decimal number. 	              
-%Z 	Time zone name (no characters if no time zone exists).                                               	
-%% 	A literal "%" character.                   
-"""
-
-## Adds options to test utility
-p.add_option("-a","--all","-A","--ALL",dest="all",help="Run test for ALL Packages and contributed Packages",action="store_true",default=False)
-p.add_option("-P","--packages",dest="all_packages",help="Run test on all packages",action="store_true",default=False)
-p.add_option("-C","--contribs",dest="all_contrib",help="Run test on all contributed packages",action="store_true",default=False)
-p.add_option("-p","--package",dest="Packages",metavar="PACKAGE",help="Run test on this package",action="append",type="string",default=[])
-p.add_option("-c","--contrib","--contributed",dest="Contribs",metavar="CONTRIBUTED",help="Run test on this contributed package",action="append",type="string",default=[])
-p.add_option("-s","--stop","--noerror",dest="skip",help="Stop on errors (default: %default)",action="store_false",default=False)
-p.add_option("-S","--nostop","--skip",dest="skip",help="Do not stop on errors",action="store_true",default=False)
-p.add_option("-v","--verbose",metavar="LEVEL",dest="verbose",help="Level of verbosity (0, 1, 2 or 3), default is %default",type="choice",default="1",choices=("0","1","2","3"))
-p.add_option("-u","--upload",metavar="LEVEL",dest="upload",help="Level of upload privacy (0, 1, or 2), 0 no data uploaded, 1 no private data uploaded, 2 uploads hostname, default is %default",type="choice",default="2",choices=("0","1","2"))
-p.add_option("-e","--okerror",metavar="ERROR STRING",dest="error_strings",help="Identify 'none' error merror messages (removes lines in error messages containing this)",default=["ppmtogif","pnmcolormap","pnmremap","ppmtogif","ppmquant","pnmcrop","Definition of","DeprecationWarning","self.nxo"],action="append",type="string")
-p.add_option("-d","--date",dest="date",type="string",help="Will run a test if last successfull time is older than 'date', default is now: %default                                      See --timeformat option for date format",default=date)
-p.add_option("-f","--timeformat",dest="format",type="string",help=time_format_help_string,default=default_time_format)
-p.add_option("-q","--query_mode",dest="query_mode",help="Runs a query of successfully run test only, does not execute anything",action="store_true",default=False)
-p.add_option("-F","--full",dest="full_testing",help="Full testing (more detailed testing) default is %default",default=False,action="store_true")
-
-
-# short test is default -jd082007
-p.add_option("-E","--extended",dest="extended_testing",help="Extended testing (runs testing completely) default is %default",default=False,action="store_true")
-
-
-(o,args) = p.parse_args()
-
-if int(o.upload)==2 and o.query_mode is False:
-    print 'Your upload level is set to 2\nThis means CDAT will recover your machine\'s name (only when running the test suite).\nTo turn this off use option: --upload=1 (no private data uploaded) or 0 (no data uploaded at all)'
-    print "Your machine's name (%s) will be stored for statistical purposes only" % os.uname()[1]
-    cont = raw_input("Do you wish to continue? (y/n) [y]")
-    if not cont.lower() in ['','y','yes']:
-        sys.exit()
-try:
-    time.strptime(o.date,o.format)
-except:
-    p.error('date must be in format: "%s", or specify format on command line (use --help)' % o.format)
-# Handles case where nothing is passed!
-if not (o.all_packages or o.all_contrib or o.all) and o.Packages==[] and o.Contribs==[] and args==[]:
-    (o,args) = p.parse_args(["-h"])
-
-if o.all:
-    o.all_packages=True
-    o.all_contrib=True
-
-# Append all the Packages
-packages=[]
-pckgs = os.listdir("Packages")
-pckgs.sort()
-for pk in pckgs:
-    if pk in ['cmor','cdms','regrid','Properties']:
-        continue
-    if os.path.isdir(os.path.join("Packages",pk)):
-        lst=[]
-        try:
-            dr = os.path.join("Packages",pk,"Test")
-            lst = os.listdir(os.path.join("Packages",pk,"Test"))
-        except:
-            pass
-        try:
-            lst = os.listdir(os.path.join("Packages",pk,"test"))
-        except:
-            pass
-        if lst!=[]:
-            packages.append(pk)
-            
-if o.all_packages:
-    for pk in packages:
-        if not pk in o.Packages:
-            o.Packages.append(pk)
-            
-contribs=o.Contribs
-if contribs==[]:
-    pckgs = os.listdir("contrib")
-    pckgs.sort()
-    for pk in pckgs:
-        if pk in ['spanlib']:
-            try:
-                import spanlib
-            except:
-                continue
-        if os.path.isdir(os.path.join("contrib",pk)):
-            lst=[]
-            try:
-                lst = os.listdir(os.path.join("contrib",pk,"Test"))
-            except:
-                pass
-            try:
-                lst = os.listdir(os.path.join("contrib",pk,"test"))
-            except:
-                pass
-            if lst!=[] and pk not in o.Contribs:
-                # first try to see if contrib has been built
-                contribs.append(pk)
-            
-if o.all_contrib:
-    for pk in contribs:
-        if pk not in o.Contribs:
-            o.Contribs.append(pk)
-
-#Now adds the extra arguments
-for pk in args:
-    ok=False
-    if pk in packages:
-        ok = True
-        if not pk in o.Packages:
-            o.Packages.append(pk)
-    if pk in contribs:
-        ok = True
-        if not pk in o.Contribs:
-            o.Contribs.append(pk)
-    if not ok:
-        if o.skip:
-            print 'Will skip Package:',pk
-        else:
-            print "Package %s does not exists or has not test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            sys.exit()
-            
-        
-# Ok now runs the test to see if packages are good
-skipped=[]
-for pk in o.Packages:
-    if not pk in packages:
-        if o.skip:
-            print 'Will skip Package:',pk
-            skipped.append(pk)
-        else:
-            print "Package %s does not exists or has no test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            sys.exit()
-for pk in skipped:
-    o.Packages.remove(pk)
-# Ok now runs the test to see if contribs are good
-skipped=[]
-for pk in o.Contribs:
-    if not pk in contribs:
-        if o.skip:
-            print 'Will skip Contributed Package:',pk
-            skipped.append(pk)            
-        else:
-            print "Contributed Package %s does not exists or has not test suite" % pk
-            print 'type "%s --help" for help and usage' % sys.argv[0]
-            print 'valid contributed packages: %s' % ' '.join(contribs)
-            sys.exit()
-for pk in skipped:
-    o.Contribs.remove(pk)
-o.verbose=int(o.verbose)
-results ={}
-for pk in o.Packages:
-    print "Running Test on Official Package: %s" % pk
-    test_dir = os.path.join("Packages",pk,"Test")
-    try:
-        lst = os.listdir(test_dir)
-    except:
-        test_dir = os.path.join("Packages",pk,"test")
-        lst = os.listdir(test_dir)
-    tmp = run_dir(test_dir,lst)
-    for k in tmp.keys():
-        results[k]=tmp[k]
-for pk in o.Contribs:
-    print "Running Test on Contributed Package: %s" % pk
-    test_dir = os.path.join("contrib",pk,"Test")
-    try:
-        lst = os.listdir(test_dir)
-    except:
-        test_dir = os.path.join("contrib",pk,"test")
-        lst = os.listdir(test_dir)
-    tmp = run_dir(test_dir,lst)
-    for k in tmp.keys():
-        results[k]=tmp[k]
-
-
-
-import cdat_info
-Packages=[]
-OS=[]
-Versions=[]
-Machines=[]
-CDATVersions=[]
-#code to display nicely all the results
-if o.query_mode:
-    for test in results.keys():
-        pnm =test[0]
-        if not pnm in Packages:
-            Packages.append(pnm)
-        CDATVersions=results[test]
-        oses = CDATVersions.get(str(cdat_info.version()),{})
-        for aos in oses.keys():
-            if not aos in OS:
-                OS.append(aos)
-            versions = oses[aos]
-            for v in versions.keys():
-                syst = versions[v]
-                for asys in syst:
-                    full = "%s_%s_%s" % (aos,v,asys)
-                    if not full in Versions:
-                        Versions.append(full)
-                    res = syst[asys]
-                    machines = res["machines"]
-                    for m in machines:
-                        if not m in Machines:
-                            Machines.append(m)
-    print 'Your version:',cdat_info.version()
-    print 'Total Test:',len(results.keys())
-    print 'Total Packages:',len(Packages)
-    print 'Total OS:',len(OS),'---',', '.join(OS)
-    print 'Total OS Versions:',len(Versions)
-    print 'Total Independent Machines:',len(Machines)
-## else:
-##     for test_dir,test in results.keys():
-##          print '\n\n'
-##          fn = test_dir+test
-##          print fn,'--------------'
-##          tr = results[test_dir,test]
-##          for t in tr:
-##               print '\t',t,':  ' ,tr[t] 
diff --git a/tests/cdat/test_exsrc_ok.py b/tests/cdat/test_exsrc_ok.py
deleted file mode 100644
index 923dfc9d7..000000000
--- a/tests/cdat/test_exsrc_ok.py
+++ /dev/null
@@ -1,107 +0,0 @@
-""" Test external packages dependencies for CDAT
-Prints out Packages that need to be installed and why
-"""
-import sys,os
-
-## Test 1: Pyfort
-min_ver=8.5
-a=os.popen4(sys.prefix+'/bin/pyfort -V')[1].readlines()
-sp=a[0].split()
-if sp[0]!='Pyfort':
-    print 'Pyfort : Not Present in your python distribution'
-elif float(sp[1])<min_ver:
-    print 'Pyfort : Version '+str(min_ver)+' minimum is required, you have: '+sp[1]
-
-## Test 2: Numeric
-min_ver=23.1
-try:
-    import Numeric
-    if float(Numeric.__version__)<min_ver:
-        print 'Numeric : Version '+str(min_ver)+' minimum is required, you have: '+Numeric.__version__
-except:
-    print 'Numeric : Not Present in your python distribution'
-
-## Test 3: Pmw
-min_ver=1.2
-try:
-    import Pmw
-    if float(Pmw.version())<min_ver:
-        print 'Pmw : Version '+str(min_ver)+' minimum is required, you have: '+Pmw.version()
-except:
-    print 'Pmw : Not Present in your python distribution'
-
-## Test 4: gplot
-a=os.popen4('which gplot')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'gplot : Not present on your system'
-
-## Test 5: xgks
-if not os.path.exists(sys.prefix+'/lib/xgksfonts'):
-    print 'xgks : xgksfonts directory not present in your python distribution'
-
-## Test 6: gifsicle
-a=os.popen4('which gifsicle')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'gifsicle : Not present on your system'
-
-## Test 7: ghostscript and  fonts
-a=os.popen4('which gs')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'ghostscript : Not present on your system'
-else:
-    jpeg=0
-    png=0
-    a=os.popen4('gs -h')[1].readlines()
-    while a.pop(0).find('Available devices:')<0:
-        continue
-    for l in a:
-        s=l.strip().split()
-        if 'jpeg' in s:
-            jpeg=1
-        if 'png16' in s:
-            png=1
-            
-    font=0
-    a=os.popen4('gs -h')[1].readlines()
-    while a.pop(0).find('Search path:')<0:
-        continue
-    for l in a:
-        if l[0]==' ': # path lines starts with blank
-            s=l.strip().split(':')
-            for p in s:
-                #print os.listdir(p.strip())
-                if os.path.exists(p.strip()+'/n022003l.afm'):
-                    font=1
-        else:
-            break
-    if jpeg==0 and png==0 and font==0:
-        print 'ghostscript : no jpeg nor png support built, missing extra fonts'
-    elif jpeg==0 and png==0:
-        print 'ghostscript : no jpeg nor png support built'
-    elif jpeg==0:
-        print 'ghostscript : no jpeg support built'
-    elif png==0:
-        print 'ghostscript : no png support built'
-    elif font==0:
-        print 'ghostscript : extra fonts not installed'
-            
-## Test 8: Netpbm/pbmplus
-a=os.popen4('which ppmtogif')[1].readlines()[0]
-if a.find('not found')>-1:
-    if sys.platform in ['linux2','darwin','cygwin']:
-        print 'netpbm : Not present on your system'
-    else:
-        print 'pbmplus : Not present on your system'
-
-
-## Test 9: R libraries (not python module)
-a=os.popen4('which R')[1].readlines()[0]
-if a.find('not found')>-1:
-    print 'R : Not present on your system'
-
-## Test 10: VTK
-try:
-    import vtk
-except:
-       print 'VTK : Not present on your Python'
- 
diff --git a/tests/test_script b/tests/test_script
deleted file mode 100755
index 883bb3b5b..000000000
--- a/tests/test_script
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/sh
-# usage: ./test_script [-v] [targetdir]
-# -v prevents run of vcdat
-#
-unset PYTHONPATH
-unset PYTHONHOME
-unset PYTHONSTARTUP
-if (test "$1" = "-v") then
-    vopt=0; shift;
-else
-    vopt=1
-fi
-if (test $# -eq 0) then 
-    p=`which python`; v=`which vcdat`
-else
-    here=`pwd`;
-    cd $1/bin;
-    pdir=`pwd`;
-    p=$pdir/python; v=$pdir/vcdat;
-    cd $here
-fi
-prefix=`${p} -c "import sys; print sys.exec_prefix"`
-echo "Testing $p"
-(cd Packages/cdms/Test; $p cdtest.py) 
-if (test $vopt -eq 1) then
-    echo "Testing $v";
-    $v
-fi
-echo "-------------------------------------------------------------------"
-echo "Tests completed."
-
diff --git a/uvcdatspt/scripts/MHTScreenshots.py b/uvcdatspt/scripts/MHTScreenshots.py
deleted file mode 100644
index 4396c6074..000000000
--- a/uvcdatspt/scripts/MHTScreenshots.py
+++ /dev/null
@@ -1,170 +0,0 @@
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-if len(sys.argv) != 3:
-    print 'Usage: pvbatch --symmetric MHTScreenshots.py <output file name> "<input file names>"'
-    sys.exit(1)
-
-print 'input file names are: ', sys.argv[2]
-print 'output file name is: ', sys.argv[1]
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 8
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-in_msf_moc = CreateReader( MHTFileSeriesReader, [], sys.argv[2] )
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-
-XYChartView1 = CreateView( CreateXYPlotView, sys.argv[1], 1, 549, 583, tp_views )
-XYChartView1.ShowAxis = [1, 1, 0, 0]
-XYChartView1.ShowAxisGrid = [1, 1, 0, 0]
-XYChartView1.AxisLabelsBottom = []
-XYChartView1.LegendLocation = 1
-XYChartView1.AxisLabelsLeft = []
-XYChartView1.ViewTime = 0.0
-XYChartView1.ShowLegend = 1
-XYChartView1.AxisRange = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-XYChartView1.AxisTitleFont = ['Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0']
-XYChartView1.AxisLabelColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
-XYChartView1.AxisTitleColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5]
-XYChartView1.ChartTitleColor = [0.0, 0.0, 0.0]
-XYChartView1.ChartTitleAlignment = 1
-XYChartView1.AxisColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
-XYChartView1.AxisLabelsTop = []
-XYChartView1.AxisLabelFont = ['Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0']
-XYChartView1.ShowAxisLabels = [1, 1, 1, 1]
-XYChartView1.AxisLabelNotation = [0, 0, 0, 0]
-XYChartView1.AxisLabelPrecision = [2, 2, 2, 2]
-XYChartView1.AxisGridColor = [0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95]
-XYChartView1.ChartTitle = ''
-XYChartView1.AxisLabelsRight = []
-XYChartView1.AxisBehavior = [0, 0, 0, 0]
-XYChartView1.AxisTitle = ['', '', '', '']
-XYChartView1.ChartTitleFont = ['Arial', '14', '0', '0']
-XYChartView1.AxisLogScale = [0, 0, 0, 0]
-
-DataRepresentation1 = Show() #GetDisplayProperties(in_msf_moc)
-DataRepresentation1.XArrayName = 'reader_mht_global'
-DataRepresentation1.SeriesVisibility = ['vtkOriginalIndices', '0']
-DataRepresentation1.SeriesVisibility = ['reader_mht_global', '1']
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MHTTemporalStatistics.py b/uvcdatspt/scripts/MHTTemporalStatistics.py
deleted file mode 100644
index 18cfca03c..000000000
--- a/uvcdatspt/scripts/MHTTemporalStatistics.py
+++ /dev/null
@@ -1,26 +0,0 @@
-print 'starting'
-import sys
-from paraview.simple import *
-
-if len(sys.argv) < 3:
-    print 'Usage: pvbatch MHTTemporalStatistics.py <output file name> <input file names>'
-    sys.exit(1)
-
-paraview.simple._DisableFirstRenderCameraReset()
-reader = MHTFileSeriesReader()
-print 'input file names are: ', sys.argv[2:len(sys.argv)]
-print 'output file name is: ', sys.argv[1]
-reader.FileName = sys.argv[2:len(sys.argv)]
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 0
-MultiBlockTemporalStatistics1.SamplingMethod = 1
-MultiBlockTemporalStatistics1.TimeSpan = 0
-MultiBlockTemporalStatistics1.TimeStepLength = 1
-MultiBlockTemporalStatistics1.TimeCompartmentSize = 8
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = sys.argv[1]
-writer.UpdatePipeline()
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MOCScreenshots.py b/uvcdatspt/scripts/MOCScreenshots.py
deleted file mode 100644
index 1cb05ea8f..000000000
--- a/uvcdatspt/scripts/MOCScreenshots.py
+++ /dev/null
@@ -1,535 +0,0 @@
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-if len(sys.argv) != 3:
-    print 'Usage: pvbatch --symmetric MOCScreenshots.py <output file name> "<input file names>"'
-    sys.exit(1)
-
-print 'input file names are: ', sys.argv[2]
-print 'output file name is: ', sys.argv[1]
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 16
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-RenderView1 = CreateView( CreateRenderView, sys.argv[1], 1, 549, 583, tp_views )
-RenderView1.LightSpecularColor = [1.0, 1.0, 1.0]
-RenderView1.InteractionMode = '3D'
-RenderView1.UseTexturedBackground = 0
-RenderView1.UseLight = 1
-RenderView1.CameraPosition = [15.0, -2624.999755859375, 14496.62787197619]
-RenderView1.FillLightKFRatio = 3.0
-RenderView1.Background2 = [0.0, 0.0, 0.16470588235294117]
-RenderView1.FillLightAzimuth = -10.0
-RenderView1.LODResolution = 50.0
-RenderView1.BackgroundTexture = []
-RenderView1.KeyLightAzimuth = 10.0
-RenderView1.StencilCapable = 1
-RenderView1.LightIntensity = 1.0
-RenderView1.CameraFocalPoint = [15.0, -2624.999755859375, 0.0]
-RenderView1.ImageReductionFactor = 2
-RenderView1.CameraViewAngle = 30.0
-RenderView1.CameraParallelScale = 3766.3151510583625
-RenderView1.EyeAngle = 2.0
-RenderView1.HeadLightKHRatio = 3.0
-RenderView1.StereoRender = 0
-RenderView1.KeyLightIntensity = 0.75
-RenderView1.BackLightAzimuth = 110.0
-RenderView1.OrientationAxesInteractivity = 0
-RenderView1.UseInteractiveRenderingForSceenshots = 0
-RenderView1.UseOffscreenRendering = 0
-RenderView1.Background = [1.0, 1.0, 1.0]
-RenderView1.UseOffscreenRenderingForScreenshots = 1
-RenderView1.NonInteractiveRenderDelay = 2
-RenderView1.CenterOfRotation = [15.0, -2624.999755859375, 0.0]
-RenderView1.CameraParallelProjection = 0
-RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3'
-RenderView1.HeadLightWarmth = 0.5
-RenderView1.MaximumNumberOfPeels = 4
-RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0]
-RenderView1.StereoType = 'Red-Blue'
-RenderView1.DepthPeeling = 1
-RenderView1.BackLightKBRatio = 3.5
-RenderView1.StereoCapableWindow = 1
-RenderView1.CameraViewUp = [0.0, 1.0, 0.0]
-RenderView1.LightType = 'HeadLight'
-RenderView1.LightAmbientColor = [1.0, 1.0, 1.0]
-RenderView1.RemoteRenderThreshold = 3.0
-RenderView1.KeyLightElevation = 50.0
-RenderView1.CenterAxesVisibility = 0
-RenderView1.MaintainLuminance = 0
-RenderView1.StillRenderImageReductionFactor = 1
-RenderView1.BackLightWarmth = 0.5
-RenderView1.FillLightElevation = -75.0
-RenderView1.MultiSamples = 0
-RenderView1.FillLightWarmth = 0.4
-RenderView1.AlphaBitPlanes = 1
-RenderView1.LightSwitch = 0
-RenderView1.OrientationAxesVisibility = 0
-RenderView1.CameraClippingRange = [14351.66159325643, 14714.077290055833]
-RenderView1.BackLightElevation = 0.0
-RenderView1.ViewTime = 0.0
-RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0]
-RenderView1.LODThreshold = 5.0
-RenderView1.CollectGeometryThreshold = 100.0
-RenderView1.UseGradientBackground = 0
-RenderView1.KeyLightWarmth = 0.6
-RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0]
-
-in_msf_moc = CreateReader( MOCFileSeriesReader, [],  sys.argv[2])
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-Threshold1 = Threshold( guiName="Threshold1", Scalars=['POINTS', 'reader_moc_global'], ThresholdRange=[-1000.0, 592.3663330078125], AllScalars=1 )
-
-Transform1 = Transform( guiName="Transform1", Transform="Transform" )
-Transform1.Transform.Scale = [40.0, -1.0, 1.0]
-Transform1.Transform.Rotate = [0.0, 0.0, 0.0]
-Transform1.Transform.Translate = [0.0, 0.0, 0.0]
-
-a1_reader_moc_global_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] )
-
-a1_reader_moc_global_PVLookupTable = GetLookupTableForArray( "reader_moc_global", 1, Discretize=1, RGBPoints=[-151.5101776123047, 0.23, 0.299, 0.754, 592.3663330078125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 )
-
-SetActiveSource(in_msf_moc)
-DataRepresentation1 = Show()
-DataRepresentation1.CubeAxesZAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation1.SuppressLOD = 0
-DataRepresentation1.CubeAxesXGridLines = 0
-DataRepresentation1.CubeAxesYAxisTickVisibility = 1
-DataRepresentation1.Position = [0.0, 0.0, 0.0]
-DataRepresentation1.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation1.SelectionOpacity = 1.0
-DataRepresentation1.SelectionPointLabelShadow = 0
-DataRepresentation1.CubeAxesYGridLines = 0
-DataRepresentation1.OrientationMode = 'Direction'
-DataRepresentation1.Source.TipResolution = 6
-DataRepresentation1.ScaleMode = 'No Data Scaling Off'
-DataRepresentation1.Diffuse = 1.0
-DataRepresentation1.SelectionUseOutline = 0
-DataRepresentation1.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation1.Specular = 0.1
-DataRepresentation1.SelectionVisibility = 1
-DataRepresentation1.InterpolateScalarsBeforeMapping = 1
-DataRepresentation1.CubeAxesZAxisTickVisibility = 1
-DataRepresentation1.Origin = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesVisibility = 0
-DataRepresentation1.Scale = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelJustification = 'Left'
-DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelOpacity = 1.0
-DataRepresentation1.Source = "Arrow"
-DataRepresentation1.Source.Invert = 0
-DataRepresentation1.Masking = 0
-DataRepresentation1.Opacity = 1.0
-DataRepresentation1.LineWidth = 1.0
-DataRepresentation1.MeshVisibility = 0
-DataRepresentation1.Visibility = 0
-DataRepresentation1.SelectionCellLabelFontSize = 18
-DataRepresentation1.CubeAxesCornerOffset = 0.0
-DataRepresentation1.SelectionPointLabelJustification = 'Left'
-DataRepresentation1.Ambient = 0.0
-DataRepresentation1.SelectOrientationVectors = ''
-DataRepresentation1.CubeAxesTickLocation = 'Inside'
-DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.CubeAxesYAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation1.Source.ShaftResolution = 6
-DataRepresentation1.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation1.SelectScaleArray = ''
-DataRepresentation1.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation1.ColorAttributeType = 'POINT_DATA'
-DataRepresentation1.SpecularPower = 100.0
-DataRepresentation1.Texture = []
-DataRepresentation1.SelectionCellLabelShadow = 0
-DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.MapScalars = 1
-DataRepresentation1.PointSize = 2.0
-DataRepresentation1.Source.TipLength = 0.35
-DataRepresentation1.SelectionCellLabelFormat = ''
-DataRepresentation1.Scaling = 0
-DataRepresentation1.StaticMode = 0
-DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation1.Source.TipRadius = 0.1
-DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation1.CubeAxesXAxisTickVisibility = 1
-DataRepresentation1.SelectionCellLabelVisibility = 0
-DataRepresentation1.NonlinearSubdivisionLevel = 1
-DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Representation = 'Surface'
-DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation1.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesXTitle = 'X-Axis'
-DataRepresentation1.CubeAxesInertia = 1
-DataRepresentation1.BackfaceOpacity = 1.0
-DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation1.SelectionPointLabelVisibility = 0
-DataRepresentation1.SelectionPointLabelFontSize = 18
-DataRepresentation1.ScaleFactor = 1.0
-DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Source.ShaftRadius = 0.03
-DataRepresentation1.SelectMaskArray = ''
-DataRepresentation1.SelectionLineWidth = 2.0
-DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesXAxisVisibility = 1
-DataRepresentation1.Interpolation = 'Gouraud'
-DataRepresentation1.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation1.SelectionCellLabelItalic = 0
-DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesZGridLines = 0
-DataRepresentation1.SelectionPointLabelFormat = ''
-DataRepresentation1.SelectionPointLabelOpacity = 1.0
-DataRepresentation1.Pickable = 1
-DataRepresentation1.CustomBoundsActive = [0, 0, 0]
-DataRepresentation1.SelectionRepresentation = 'Wireframe'
-DataRepresentation1.SelectionPointLabelBold = 0
-DataRepresentation1.ColorArrayName = 'reader_moc_global'
-DataRepresentation1.SelectionPointLabelItalic = 0
-DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation1.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation1.SelectionPointSize = 5.0
-DataRepresentation1.SelectionCellLabelBold = 0
-DataRepresentation1.Orient = 0
-
-SetActiveSource(Threshold1)
-DataRepresentation2 = Show()
-DataRepresentation2.CubeAxesZAxisVisibility = 1
-DataRepresentation2.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation2.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation2.SuppressLOD = 0
-DataRepresentation2.CubeAxesXGridLines = 0
-DataRepresentation2.CubeAxesYAxisTickVisibility = 1
-DataRepresentation2.Position = [0.0, 0.0, 0.0]
-DataRepresentation2.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation2.SelectionOpacity = 1.0
-DataRepresentation2.SelectionPointLabelShadow = 0
-DataRepresentation2.CubeAxesYGridLines = 0
-DataRepresentation2.OrientationMode = 'Direction'
-DataRepresentation2.Source.TipResolution = 6
-DataRepresentation2.ScaleMode = 'No Data Scaling Off'
-DataRepresentation2.Diffuse = 1.0
-DataRepresentation2.SelectionUseOutline = 0
-DataRepresentation2.SelectionPointLabelFormat = ''
-DataRepresentation2.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation2.Specular = 0.1
-DataRepresentation2.SelectionVisibility = 1
-DataRepresentation2.InterpolateScalarsBeforeMapping = 1
-DataRepresentation2.CubeAxesZAxisTickVisibility = 1
-DataRepresentation2.Origin = [0.0, 0.0, 0.0]
-DataRepresentation2.CubeAxesVisibility = 0
-DataRepresentation2.Scale = [1.0, 1.0, 1.0]
-DataRepresentation2.SelectionCellLabelJustification = 'Left'
-DataRepresentation2.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation2.SelectionCellLabelOpacity = 1.0
-DataRepresentation2.CubeAxesInertia = 1
-DataRepresentation2.Source = "Arrow"
-DataRepresentation2.Source.Invert = 0
-DataRepresentation2.Masking = 0
-DataRepresentation2.Opacity = 1.0
-DataRepresentation2.LineWidth = 1.0
-DataRepresentation2.MeshVisibility = 0
-DataRepresentation2.Visibility = 0
-DataRepresentation2.SelectionCellLabelFontSize = 18
-DataRepresentation2.CubeAxesCornerOffset = 0.0
-DataRepresentation2.SelectionPointLabelJustification = 'Left'
-DataRepresentation2.SelectionPointLabelVisibility = 0
-DataRepresentation2.SelectOrientationVectors = ''
-DataRepresentation2.CubeAxesTickLocation = 'Inside'
-DataRepresentation2.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation2.CubeAxesYAxisVisibility = 1
-DataRepresentation2.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation2.Source.ShaftResolution = 6
-DataRepresentation2.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation2.SelectScaleArray = ''
-DataRepresentation2.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation2.ColorAttributeType = 'POINT_DATA'
-DataRepresentation2.SpecularPower = 100.0
-DataRepresentation2.Texture = []
-DataRepresentation2.SelectionCellLabelShadow = 0
-DataRepresentation2.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation2.MapScalars = 1
-DataRepresentation2.PointSize = 2.0
-DataRepresentation2.Source.TipLength = 0.35
-DataRepresentation2.SelectionCellLabelFormat = ''
-DataRepresentation2.Scaling = 0
-DataRepresentation2.StaticMode = 0
-DataRepresentation2.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation2.Source.TipRadius = 0.1
-DataRepresentation2.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation2.CubeAxesXAxisTickVisibility = 1
-DataRepresentation2.SelectionCellLabelVisibility = 0
-DataRepresentation2.NonlinearSubdivisionLevel = 1
-DataRepresentation2.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation2.Representation = 'Surface'
-DataRepresentation2.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation2.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation2.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation2.CubeAxesXTitle = 'X-Axis'
-DataRepresentation2.ScalarOpacityUnitDistance = 287.4628538795667
-DataRepresentation2.BackfaceOpacity = 1.0
-DataRepresentation2.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation2.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation2.Ambient = 0.0
-DataRepresentation2.SelectionPointLabelFontSize = 18
-DataRepresentation2.ScaleFactor = 1.0
-DataRepresentation2.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation2.Source.ShaftRadius = 0.03
-DataRepresentation2.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction
-DataRepresentation2.SelectMaskArray = ''
-DataRepresentation2.SelectionLineWidth = 2.0
-DataRepresentation2.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation2.CubeAxesXAxisVisibility = 1
-DataRepresentation2.Interpolation = 'Gouraud'
-DataRepresentation2.SelectMapper = 'Projected tetra'
-DataRepresentation2.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation2.SelectionCellLabelItalic = 0
-DataRepresentation2.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation2.CubeAxesZGridLines = 0
-DataRepresentation2.ExtractedBlockIndex = 0
-DataRepresentation2.SelectionPointLabelOpacity = 1.0
-DataRepresentation2.Pickable = 1
-DataRepresentation2.CustomBoundsActive = [0, 0, 0]
-DataRepresentation2.SelectionRepresentation = 'Wireframe'
-DataRepresentation2.SelectionPointLabelBold = 0
-DataRepresentation2.ColorArrayName = 'reader_moc_global'
-DataRepresentation2.SelectionPointLabelItalic = 0
-DataRepresentation2.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation2.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation2.SelectionPointSize = 5.0
-DataRepresentation2.SelectionCellLabelBold = 0
-DataRepresentation2.Orient = 0
-
-SetActiveSource(Transform1)
-DataRepresentation3 = Show()
-DataRepresentation3.CubeAxesZAxisVisibility = 1
-DataRepresentation3.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation3.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation3.SuppressLOD = 0
-DataRepresentation3.CubeAxesXGridLines = 0
-DataRepresentation3.CubeAxesYAxisTickVisibility = 1
-DataRepresentation3.Position = [0.0, 0.0, 0.0]
-DataRepresentation3.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation3.SelectionOpacity = 1.0
-DataRepresentation3.SelectionPointLabelShadow = 0
-DataRepresentation3.CubeAxesYGridLines = 0
-DataRepresentation3.OrientationMode = 'Direction'
-DataRepresentation3.Source.TipResolution = 6
-DataRepresentation3.ScaleMode = 'No Data Scaling Off'
-DataRepresentation3.Diffuse = 1.0
-DataRepresentation3.SelectionUseOutline = 0
-DataRepresentation3.SelectionPointLabelFormat = ''
-DataRepresentation3.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation3.Specular = 0.1
-DataRepresentation3.SelectionVisibility = 1
-DataRepresentation3.InterpolateScalarsBeforeMapping = 1
-DataRepresentation3.CubeAxesZAxisTickVisibility = 1
-DataRepresentation3.Origin = [0.0, 0.0, 0.0]
-DataRepresentation3.CubeAxesVisibility = 0
-DataRepresentation3.Scale = [1.0, 1.0, 1.0]
-DataRepresentation3.SelectionCellLabelJustification = 'Left'
-DataRepresentation3.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation3.SelectionCellLabelOpacity = 1.0
-DataRepresentation3.CubeAxesInertia = 1
-DataRepresentation3.Source = "Arrow"
-DataRepresentation3.Source.Invert = 0
-DataRepresentation3.Masking = 0
-DataRepresentation3.Opacity = 1.0
-DataRepresentation3.LineWidth = 1.0
-DataRepresentation3.MeshVisibility = 0
-DataRepresentation3.Visibility = 1
-DataRepresentation3.SelectionCellLabelFontSize = 18
-DataRepresentation3.CubeAxesCornerOffset = 0.0
-DataRepresentation3.SelectionPointLabelJustification = 'Left'
-DataRepresentation3.SelectionPointLabelVisibility = 0
-DataRepresentation3.SelectOrientationVectors = ''
-DataRepresentation3.CubeAxesTickLocation = 'Inside'
-DataRepresentation3.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation3.CubeAxesYAxisVisibility = 1
-DataRepresentation3.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation3.Source.ShaftResolution = 6
-DataRepresentation3.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation3.SelectScaleArray = ''
-DataRepresentation3.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation3.ColorAttributeType = 'POINT_DATA'
-DataRepresentation3.SpecularPower = 100.0
-DataRepresentation3.Texture = []
-DataRepresentation3.SelectionCellLabelShadow = 0
-DataRepresentation3.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation3.MapScalars = 1
-DataRepresentation3.PointSize = 2.0
-DataRepresentation3.Source.TipLength = 0.35
-DataRepresentation3.SelectionCellLabelFormat = ''
-DataRepresentation3.Scaling = 0
-DataRepresentation3.StaticMode = 0
-DataRepresentation3.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation3.Source.TipRadius = 0.1
-DataRepresentation3.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation3.CubeAxesXAxisTickVisibility = 1
-DataRepresentation3.SelectionCellLabelVisibility = 0
-DataRepresentation3.NonlinearSubdivisionLevel = 1
-DataRepresentation3.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation3.Representation = 'Surface'
-DataRepresentation3.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation3.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation3.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation3.CubeAxesXTitle = 'X-Axis'
-DataRepresentation3.ScalarOpacityUnitDistance = 388.2163580108114
-DataRepresentation3.BackfaceOpacity = 1.0
-DataRepresentation3.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation3.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation3.Ambient = 0.0
-DataRepresentation3.SelectionPointLabelFontSize = 18
-DataRepresentation3.ScaleFactor = 1.0
-DataRepresentation3.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation3.Source.ShaftRadius = 0.03
-DataRepresentation3.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction
-DataRepresentation3.SelectMaskArray = ''
-DataRepresentation3.SelectionLineWidth = 2.0
-DataRepresentation3.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation3.CubeAxesXAxisVisibility = 1
-DataRepresentation3.Interpolation = 'Gouraud'
-DataRepresentation3.SelectMapper = 'Projected tetra'
-DataRepresentation3.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation3.SelectionCellLabelItalic = 0
-DataRepresentation3.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation3.CubeAxesZGridLines = 0
-DataRepresentation3.ExtractedBlockIndex = 0
-DataRepresentation3.SelectionPointLabelOpacity = 1.0
-DataRepresentation3.Pickable = 1
-DataRepresentation3.CustomBoundsActive = [0, 0, 0]
-DataRepresentation3.SelectionRepresentation = 'Wireframe'
-DataRepresentation3.SelectionPointLabelBold = 0
-DataRepresentation3.ColorArrayName = 'reader_moc_global'
-DataRepresentation3.SelectionPointLabelItalic = 0
-DataRepresentation3.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation3.LookupTable = a1_reader_moc_global_PVLookupTable
-DataRepresentation3.SelectionPointSize = 5.0
-DataRepresentation3.SelectionCellLabelBold = 0
-DataRepresentation3.Orient = 0
-
-
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
-
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MOCTemporalStatistics.py b/uvcdatspt/scripts/MOCTemporalStatistics.py
deleted file mode 100644
index c6d51900b..000000000
--- a/uvcdatspt/scripts/MOCTemporalStatistics.py
+++ /dev/null
@@ -1,26 +0,0 @@
-print 'starting'
-import sys
-from paraview.simple import *
-
-if len(sys.argv) < 3:
-    print 'Usage: pvbatch MOCTemporalStatistics.py <output file name> <input file names>'
-    sys.exit(1)
-
-paraview.simple._DisableFirstRenderCameraReset()
-reader = MOCFileSeriesReader()
-print 'input file names are: ', sys.argv[2:len(sys.argv)]
-print 'output file name is: ', sys.argv[1]
-reader.FileName = sys.argv[2:len(sys.argv)]
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 0
-MultiBlockTemporalStatistics1.SamplingMethod = 1
-MultiBlockTemporalStatistics1.TimeSpan = 0
-MultiBlockTemporalStatistics1.TimeStepLength = 1
-MultiBlockTemporalStatistics1.TimeCompartmentSize = 16
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = sys.argv[1]
-writer.UpdatePipeline()
-
-print 'ending'
diff --git a/uvcdatspt/scripts/MWehnerTemporalStatistics.py b/uvcdatspt/scripts/MWehnerTemporalStatistics.py
deleted file mode 100644
index d9f2f4c1a..000000000
--- a/uvcdatspt/scripts/MWehnerTemporalStatistics.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Script for computing temporal statistics (average, minimum, maximum
-# and standard deviation) on hopper.nersc.gov. The input is a single
-# file that contains multipe time steps. The time compartment size is
-# a command line argument.
-
-import sys
-import time
-start = time.time()
-
-try: paraview.simple
-except: from paraview.simple import *
-paraview.simple._DisableFirstRenderCameraReset()
-
-import libvtkParallelPython
-import paraview
-pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-globalController = pm.GetGlobalController()
-pid = globalController.GetLocalProcessId()
-
-tcsize = sys.argv[1]
-
-fileName = "statsmwhenertwod.vtm"
-
-if pid == 0:
-    print 'starting script with tcsize of ', tcsize, ' and output filename using ', fileName
-
-V_cam5_1_amip_run2_cam2_h0_1994_nc = NetCDFReader( FileName=['/global/project/projectdirs/m1517/ACE/cam5.1/control/0.25_degre
-e/monthly/run2/zg_Amon_CAM5.1_0.25degree_control_v1.0_run2_197901-200512.nc'] )
-
-V_cam5_1_amip_run2_cam2_h0_1994_nc.Dimensions = '(plev, lat, lon)'
-V_cam5_1_amip_run2_cam2_h0_1994_nc.SphericalCoordinates = 0
-
-MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics()
-MultiBlockTemporalStatistics1.TimeStepType = 'Months'
-#MultiBlockTemporalStatistics1.SamplingMethod = 'Consecutive'
-MultiBlockTemporalStatistics1.SamplingMethod = 'Climatology'
-#MultiBlockTemporalStatistics1.TimeSpan = 'Year'
-MultiBlockTemporalStatistics1.TimeSpan = 'AllTimeSteps'
-MultiBlockTemporalStatistics1.TimeCompartmentSize = int(tcsize)
-
-writer = XMLMultiBlockDataWriter()
-writer.FileName = fileName
-
-writer.UpdatePipeline()
-if pid == 0:
-    print 'finished run in ', time.time()-start
-
diff --git a/uvcdatspt/scripts/POPGenerateImages.py b/uvcdatspt/scripts/POPGenerateImages.py
deleted file mode 100644
index 86f61e47f..000000000
--- a/uvcdatspt/scripts/POPGenerateImages.py
+++ /dev/null
@@ -1,310 +0,0 @@
-# Spatio-temporal script for generating images for POP NetCDF
-# output files. This one pseudo-colors by TEMP. It has a
-# time compartment size of 4 so the number of processes
-# also needs to be a multiple of 4. To run it, do:
-# mpirun -np <numprocs> ./pvbatch --symmetric POPGenerateImages.py
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-currentTimeStep = -1
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-tp_writers = []
-tp_views = []
-# ==================== end of specialized temporal parallelism sections ==================
-
-timeCompartmentSize = 4
-globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-RenderView1 = CreateView( CreateRenderView, "POP_TEMP_%t.png", 1, 549, 583, tp_views )
-RenderView1.LightSpecularColor = [1.0, 1.0, 1.0]
-RenderView1.InteractionMode = '3D'
-RenderView1.UseTexturedBackground = 0
-RenderView1.UseLight = 1
-RenderView1.CameraPosition = [24413625.828416377, -24592716.541236263, 5758186.884780747]
-RenderView1.FillLightKFRatio = 3.0
-RenderView1.Background2 = [0.0, 0.0, 0.165]
-RenderView1.FillLightAzimuth = -10.0
-RenderView1.LODResolution = 50.0
-RenderView1.BackgroundTexture = []
-RenderView1.KeyLightAzimuth = 10.0
-RenderView1.StencilCapable = 1
-RenderView1.LightIntensity = 1.0
-RenderView1.CameraFocalPoint = [1.78529588937719e-12, 1.4505529101189668e-12, 64147.750000000015]
-RenderView1.ImageReductionFactor = 2
-RenderView1.CameraViewAngle = 30.0
-RenderView1.CameraParallelScale = 30343845.664423227
-RenderView1.EyeAngle = 2.0
-RenderView1.HeadLightKHRatio = 3.0
-RenderView1.StereoRender = 0
-RenderView1.KeyLightIntensity = 0.75
-RenderView1.BackLightAzimuth = 110.0
-RenderView1.OrientationAxesInteractivity = 0
-RenderView1.UseInteractiveRenderingForSceenshots = 0
-RenderView1.UseOffscreenRendering = 0
-RenderView1.Background = [0.31999694819562063, 0.3400015259021897, 0.4299992370489052]
-RenderView1.UseOffscreenRenderingForScreenshots = 1
-RenderView1.NonInteractiveRenderDelay = 2
-RenderView1.CenterOfRotation = [0.0, 0.0, 64147.75]
-RenderView1.CameraParallelProjection = 0
-RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3'
-RenderView1.HeadLightWarmth = 0.5
-RenderView1.MaximumNumberOfPeels = 4
-RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0]
-RenderView1.StereoType = 'Red-Blue'
-RenderView1.DepthPeeling = 1
-RenderView1.BackLightKBRatio = 3.5
-RenderView1.StereoCapableWindow = 1
-RenderView1.CameraViewUp = [0.0471859955443886, 0.2695389330828218, 0.9618327533293193]
-RenderView1.LightType = 'HeadLight'
-RenderView1.LightAmbientColor = [1.0, 1.0, 1.0]
-RenderView1.RemoteRenderThreshold = 3.0
-RenderView1.KeyLightElevation = 50.0
-RenderView1.CenterAxesVisibility = 0
-RenderView1.MaintainLuminance = 0
-RenderView1.StillRenderImageReductionFactor = 1
-RenderView1.BackLightWarmth = 0.5
-RenderView1.FillLightElevation = -75.0
-RenderView1.MultiSamples = 0
-RenderView1.FillLightWarmth = 0.4
-RenderView1.AlphaBitPlanes = 1
-RenderView1.LightSwitch = 0
-RenderView1.OrientationAxesVisibility = 0
-RenderView1.CameraClippingRange = [15039199.876017962, 60476974.08593859]
-RenderView1.BackLightElevation = 0.0
-RenderView1.ViewTime = 0.0
-RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0]
-RenderView1.LODThreshold = 5.0
-RenderView1.CollectGeometryThreshold = 100.0
-RenderView1.UseGradientBackground = 0
-RenderView1.KeyLightWarmth = 0.6
-RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0]
-
-TEMP_t_t0_1_42l_oilspill12c_00060101_pop_nc = CreateReader( UnstructuredNetCDFPOPreader, ['Stride=[10, 10, 10]', 'VerticalVelocity=0', 'VOI=[0, -1, 0, -1, 0, -1]'], "/home/acbauer/DATA/UVCDAT/TEMP.t.t0.1_42l_oilspill12c.*.pop.nc" )
-timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0]
-a1_TEMP_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] )
-
-a1_TEMP_PVLookupTable = GetLookupTableForArray( "TEMP", 1, Discretize=1, RGBPoints=[-20.0, 0.23, 0.299, 0.754, 31.338409423828125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 )
-
-DataRepresentation1 = Show()
-DataRepresentation1.CubeAxesZAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5]
-DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds'
-DataRepresentation1.SuppressLOD = 0
-DataRepresentation1.CubeAxesXGridLines = 0
-DataRepresentation1.CubeAxesYAxisTickVisibility = 1
-DataRepresentation1.Position = [0.0, 0.0, 0.0]
-DataRepresentation1.BackfaceRepresentation = 'Follow Frontface'
-DataRepresentation1.SelectionOpacity = 1.0
-DataRepresentation1.SelectionPointLabelShadow = 0
-DataRepresentation1.CubeAxesYGridLines = 0
-DataRepresentation1.OrientationMode = 'Direction'
-DataRepresentation1.Source.TipResolution = 6
-DataRepresentation1.ScaleMode = 'No Data Scaling Off'
-DataRepresentation1.Diffuse = 1.0
-DataRepresentation1.SelectionUseOutline = 0
-DataRepresentation1.SelectionPointLabelFormat = ''
-DataRepresentation1.CubeAxesZTitle = 'Z-Axis'
-DataRepresentation1.Specular = 0.1
-DataRepresentation1.SelectionVisibility = 1
-DataRepresentation1.InterpolateScalarsBeforeMapping = 1
-DataRepresentation1.CubeAxesZAxisTickVisibility = 1
-DataRepresentation1.Origin = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesVisibility = 0
-DataRepresentation1.Scale = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelJustification = 'Left'
-DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.SelectionCellLabelOpacity = 1.0
-DataRepresentation1.CubeAxesInertia = 1
-DataRepresentation1.Source = "Arrow"
-DataRepresentation1.Source.Invert = 0
-DataRepresentation1.Masking = 0
-DataRepresentation1.Opacity = 1.0
-DataRepresentation1.LineWidth = 1.0
-DataRepresentation1.MeshVisibility = 0
-DataRepresentation1.Visibility = 1
-DataRepresentation1.SelectionCellLabelFontSize = 18
-DataRepresentation1.CubeAxesCornerOffset = 0.0
-DataRepresentation1.SelectionPointLabelJustification = 'Left'
-DataRepresentation1.SelectionPointLabelVisibility = 0
-DataRepresentation1.SelectOrientationVectors = ''
-DataRepresentation1.CubeAxesTickLocation = 'Inside'
-DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0]
-DataRepresentation1.CubeAxesYAxisVisibility = 1
-DataRepresentation1.SelectionPointLabelFontFamily = 'Arial'
-DataRepresentation1.Source.ShaftResolution = 6
-DataRepresentation1.CubeAxesFlyMode = 'Closest Triad'
-DataRepresentation1.SelectScaleArray = ''
-DataRepresentation1.CubeAxesYTitle = 'Y-Axis'
-DataRepresentation1.ColorAttributeType = 'POINT_DATA'
-DataRepresentation1.SpecularPower = 100.0
-DataRepresentation1.Texture = []
-DataRepresentation1.SelectionCellLabelShadow = 0
-DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.MapScalars = 1
-DataRepresentation1.PointSize = 2.0
-DataRepresentation1.Source.TipLength = 0.35
-DataRepresentation1.SelectionCellLabelFormat = ''
-DataRepresentation1.Scaling = 0
-DataRepresentation1.StaticMode = 0
-DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0]
-DataRepresentation1.Source.TipRadius = 0.1
-DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483]
-DataRepresentation1.CubeAxesXAxisTickVisibility = 1
-DataRepresentation1.SelectionCellLabelVisibility = 0
-DataRepresentation1.NonlinearSubdivisionLevel = 1
-DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Representation = 'Surface'
-DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0]
-DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1
-DataRepresentation1.Orientation = [0.0, 0.0, 0.0]
-DataRepresentation1.CubeAxesXTitle = 'X-Axis'
-DataRepresentation1.ScalarOpacityUnitDistance = 313870.26193506655
-DataRepresentation1.BackfaceOpacity = 1.0
-DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds'
-DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0]
-DataRepresentation1.Ambient = 0.0
-DataRepresentation1.SelectionPointLabelFontSize = 18
-DataRepresentation1.ScaleFactor = 1.0
-DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0]
-DataRepresentation1.Source.ShaftRadius = 0.03
-DataRepresentation1.ScalarOpacityFunction = a1_TEMP_PiecewiseFunction
-DataRepresentation1.SelectMaskArray = ''
-DataRepresentation1.SelectionLineWidth = 2.0
-DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesXAxisVisibility = 1
-DataRepresentation1.Interpolation = 'Gouraud'
-DataRepresentation1.SelectMapper = 'Projected tetra'
-DataRepresentation1.SelectionCellLabelFontFamily = 'Arial'
-DataRepresentation1.SelectionCellLabelItalic = 0
-DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1
-DataRepresentation1.CubeAxesZGridLines = 0
-DataRepresentation1.ExtractedBlockIndex = 0
-DataRepresentation1.SelectionPointLabelOpacity = 1.0
-DataRepresentation1.Pickable = 1
-DataRepresentation1.CustomBoundsActive = [0, 0, 0]
-DataRepresentation1.SelectionRepresentation = 'Wireframe'
-DataRepresentation1.SelectionPointLabelBold = 0
-DataRepresentation1.ColorArrayName = 'TEMP'
-DataRepresentation1.SelectionPointLabelItalic = 0
-DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0]
-DataRepresentation1.LookupTable = a1_TEMP_PVLookupTable
-DataRepresentation1.SelectionPointSize = 5.0
-DataRepresentation1.SelectionCellLabelBold = 0
-DataRepresentation1.Orient = 0
-
-
-
-IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views)
diff --git a/uvcdatspt/scripts/benchmark.py b/uvcdatspt/scripts/benchmark.py
deleted file mode 100644
index dca7f2ab8..000000000
--- a/uvcdatspt/scripts/benchmark.py
+++ /dev/null
@@ -1,626 +0,0 @@
-"""
-This module has utilities to benchmark paraview.
-
-First, when run standalone, this will do a simple rendering benchmark test. The
-test renders a sphere with various rendering settings and reports the rendering
-rate achieved in triangles/sec. run() is the entrypoint for that usage.
-
-Second, you can set up arbitrary pipelines and this module helps you obtain,
-interpret and report the information recorded by ParaView's logs.
-Do that like so:
-1) optionally, call maximize logs first
-2) setup and run your visualization pipeline (via GUI or script as you prefer)
-3) either
-- call print_logs() to print out the logs in raw format
-or
-- call parse_logs() to let the script identify and report on per frame and per
-filter execution times
-
-WARNING: This was meant for server side rendering, but it could work
-reasonably well when geometry is delivered to the client and rendered there
-if the script were changed to recognize MPIMoveData as end of frame and did
-something sensible on the server which has no other end of frame knowledge
-
-TODO: builtin mode shouldn't show server info, it is redundant
-TODO: this doesn't handle split render/data server mode
-TODO: the end of frame markers are heuristic, likely buggy, and have not
-been tried since before 3.9's view restructuring
-"""
-
-import time
-import sys
-from paraview.simple import *
-
-try:
-    import numpy
-    numpy_loaded = True
-except ImportError:
-    numpy_loaded = False
-
-import re
-import paraview
-import copy
-import pickle
-
-# a regular expression to parse filter execution time
-match_filter = re.compile(" *Execute (\w+) id: +(\d+), +(\d*.*\d+) +seconds")
-match_vfilter = re.compile(" *Execute (\w+) +, +(\d*.*\d+) +seconds")
-
-# a regular expression to parse overall rendering time
-match_still_render = re.compile(" *(Still) Render, +(\d*.*\d+) +seconds")
-match_interactive_render = \
-re.compile(" *(Interactive) Render, +(\d*.*\d+) +seconds")
-match_render = re.compile(" *(\w+|\w+ Dev) Render, +(\d*.*\d+) +seconds")
-match_icetrender = re.compile("(IceT Dev) Render, +(\d*.*\d+) +seconds")
-
-# more for parallel composite and delivery time
-match_composite = re.compile(" *Compositing, +(\d*.*\d+) +seconds")
-match_send = re.compile(" *Sending, +(\d*.*\d+) +seconds")
-match_receive = re.compile(" *Receiving, +(\d*.*\d+) +seconds")
-
-match_comp_xmit = \
-re.compile(" *TreeComp (Send|Receive) (\d+) " + \
-           "(to|from) (\d+) uchar (\d+), +(\d*.*\d+) +seconds")
-match_comp_comp = re.compile(" *TreeComp composite, *(\d*.*\d+) +seconds")
-
-showparse = False
-
-#icet composite message comes after the render messages,
-#where for bswap and manta it comes before so we have to treat icet differently
-icetquirk = False
-
-start_frame = 0
-default_log_threshold = dict()
-default_buffer_length = dict()
-
-class OneLog :
-    def __init__(self):
-        self.runmode = 'batch'
-        self.servertype = 'unified'
-        self.component = 0x10
-        self.rank = 0
-        self.lines = []
-
-    def componentString(self):
-        ret = ""
-        if self.component & 0x10:
-            ret = ret + " CLIENT "
-        if self.component & 0x4:
-            ret = ret + " RENDER "
-        if self.component & 0x1:
-            ret = ret + " DATA "
-        return ret
-
-    def print_log(self, showlines=False):
-        print "#RunMode:", self.runmode,
-        print "ServerType:", self.servertype,
-        print "Component:", self.componentString(),
-        print "processor#:", self.rank
-        if showlines:
-            for i in self.lines:
-                print i
-
-logs = []
-
-def maximize_logs () :
-    """
-    Convenience method to ask paraview to produce logs with lots of space and
-    highest resolution.
-    """
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    if pm == None:
-        return
-
-    # Not used here...
-    default_buffer_length[str(0x01)] = 1000000
-    default_buffer_length[str(0x04)] = 1000000
-    default_buffer_length[str(0x10)] = 1000000
-
-    default_log_threshold[str(0x01)] = 0.0
-    default_log_threshold[str(0x04)] = 0.0
-    default_log_threshold[str(0x10)] = 0.0
-
-
-def dump_logs( filename ) :
-    """
-    This saves off the logs we've gathered.
-    Ot allows you to run a benchmark somewhere, save off all of the details in
-    raw format, then load them somewhere else. You can then do a detailed
-    analysis and you always have the raw data to go back to.
-    """
-    global logs
-    f = open(filename, "w")
-    pickle.dump(logs, f)
-    f.close()
-
-def import_logs( filename ) :
-    """
-    This is for bringing in a saved log files and parse it after the fact.
-    TODO: add an option to load in raw parview logs in text format
-    """
-    global logs
-    logs = []
-    f = open(filename, "r")
-    logs = pickle.load(f)
-    f.close()
-
-def get_logs() :
-    """
-    This is for bringing in logs at run time to parse while running.
-    """
-    global logs
-    logs = []
-
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    if pm == None:
-        return
-
-    connectionId = paraview.servermanager.ActiveConnection.ID
-    session = paraview.servermanager.ActiveConnection.Session
-    pmOptions = pm.GetOptions()
-
-    """
-    vtkPVOptions::ProcessTypeEnum
-    PARAVIEW = 0x2,
-    PVCLIENT = 0x4,
-    PVSERVER = 0x8,
-    PVRENDER_SERVER = 0x10,
-    PVDATA_SERVER = 0x20,
-    PVBATCH = 0x40,
-    """
-    if pmOptions.GetProcessType() == 0x40:
-        runmode = 'batch'
-    else:
-        runmode = 'interactive'
-
-    """
-    vtkSMSession::RenderingMode
-    RENDERING_NOT_AVAILABLE = 0x00,
-    RENDERING_UNIFIED = 0x01,
-    RENDERING_SPLIT = 0x02
-    """
-    if session.GetRenderClientMode() == 0x01:
-        servertype = 'unified'
-    else:
-        servertype = 'split'
-
-    """
-    vtkProcessModule::SERVER_FLAGS
-    DATA_SERVER = 0x01,
-    DATA_SERVER_ROOT = 0x02,
-    RENDER_SERVER = 0x04,
-    RENDER_SERVER_ROOT = 0x08,
-    SERVERS = DATA_SERVER | RENDER_SERVER,
-    CLIENT = 0x10,
-    CLIENT_AND_SERVERS = DATA_SERVER | CLIENT | RENDER_SERVER
-    """
-    if runmode == 'batch':
-        components = [0x04]
-    else:
-        if servertype == 'unified':
-            components = [0x10, 0x04]
-        else:
-            components = [0x10, 0x04, 0x01]
-
-    for component in components:
-        timerInfo = paraview.servermanager.vtkPVTimerInformation()
-        if len(default_log_threshold) != 0:
-           timerInfo.SetLogThreshold(default_log_threshold[str(component)])
-        session.GatherInformation(component, timerInfo, 0)
-
-        for i in range(timerInfo.GetNumberOfLogs()):
-            alog = OneLog()
-            alog.runmode = runmode
-            alog.servertype = servertype
-            alog.component = component
-            alog.rank = i
-            for line in timerInfo.GetLog(i).split('\n'):
-                alog.lines.append(line)
-            logs.append(alog)
-
-def print_logs() :
-    global logs
-
-    if len(logs) == 0:
-        get_logs()
-
-    for i in logs:
-       i.print_log(True)
-
-def __process_frame() :
-    global filters
-    global current_frames_records
-    global frames
-    global start_frame
-
-    max = len(current_frames_records)
-
-    #determine ancestry of each record from order and indent
-    #subtract only immediate children from each record
-
-    #TODO: Make this an option
-    for x in xrange(max):
-        indent = current_frames_records[x]['indent']
-        minindent = 10000
-        for y in xrange(x+1,max):
-            indent2 = current_frames_records[y]['indent']
-            if indent2<=indent:
-                #found a record which is not a descendant
-                break
-            if indent2 < minindent:
-                minindent = indent2
-        for y in xrange(x+1,max):
-            indent2 = current_frames_records[y]['indent']
-            if indent2 == minindent:
-                current_frames_records[x]['local_duration'] = \
-                current_frames_records[x]['local_duration'] -\
-                current_frames_records[y]['duration']
-
-    for x in xrange(max):
-        #keep global statics per filter
-        record = current_frames_records[x]
-        id = record['id']
-        if id in filters:
-            srecord = filters[id]
-            srecord['duration'] = srecord['duration'] + record['duration']
-            srecord['local_duration'] = srecord['local_duration'] +\
-                                        record['local_duration']
-            srecord['count'] = srecord['count'] + 1
-            filters[id] = srecord
-        else:
-            filters[id] = copy.deepcopy(record)
-
-    #save off this frame and begin the next
-    frames.append(current_frames_records)
-    current_frames_records = []
-
-def __parse_line (line) :
-    """
-    Examine one line from the logs. If it is a report about a filter's
-    execution time, parse the relevant information out of the line and
-    collect those statistics. We record each filter's average execution
-    time as well as the each filters contribution to the each rendered frame.
-    """
-    global filters
-    global current_frames_records
-    global cnt
-    global show_input
-    global icetquirk
-
-    found = False
-
-    #find indent
-    cnt = 0
-    for c in range(len(line)):
-        if line[c] == " ":
-            cnt = cnt + 1
-        else:
-            break
-
-    #determine if this log comes from icet so we can
-    #do special case treatement for frame markings
-    icetline = False
-    match = match_icetrender.match(line)
-    if match != None:
-        icetquirk = True
-        icetline = True
-
-    match = match_filter.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "FILT:", cnt, line
-        name = match.group(1)
-        id = match.group(2)
-        duration = match.group(3)
-
-    match = match_vfilter.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "LFLT:", cnt, line
-        name = match.group(1)
-        id = name
-        duration = match.group(2)
-
-    match = match_comp_comp.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "TCMP:", cnt, line
-        name = "tree comp"
-        id = name
-        duration = match.group(1)
-
-    match = match_comp_xmit.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "TXMT:", cnt, line
-        name = match.group(1)
-        id = name
-        duration = match.group(6)
-
-    match = match_composite.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "COMP:", cnt, line
-        name = 'composite'
-        id = 'comp'
-        duration = match.group(1)
-
-    match = match_send.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "SEND:", cnt, line
-        name = 'send'
-        id = 'send'
-        duration = match.group(1)
-
-    match = match_receive.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "RECV:", cnt, line
-        name = 'receive'
-        id = 'recv'
-        duration = match.group(1)
-
-    match = match_still_render.match(line)
-    if match != None:
-        found = True
-        if showparse:
-            print "STILL:", cnt, line
-        name = match.group(1)
-        id = 'still'
-        duration = match.group(2)
-
-    if match == None:
-        match = match_interactive_render.match(line)
-        if match != None:
-            found = True
-            if showparse:
-                print "INTER:", cnt, line
-            name = match.group(1)
-            id = 'inter'
-            duration = match.group(2)
-
-    if match == None:
-        match = match_render.match(line)
-        if match != None:
-            found = True
-            if showparse:
-                print "REND:", cnt, line
-            name = match.group(1)
-            id = 'render'
-            duration = match.group(2)
-
-    if found == False:
-        # we didn't find anything we recognized in this line, ignore it
-        if showparse:
-            print "????:", cnt, line
-        return
-
-    record = dict()
-    record['id'] = id
-    record['name'] = name
-    record['duration'] = float(duration)
-    record['local_duration'] = float(duration)
-    record['count'] = 1
-    record['indent'] = cnt
-
-    #watch for the beginning of the next frame/end of previous frame
-    if cnt == 0:
-        if (id == 'still') or \
-           (id == 'inter') or \
-           (icetquirk == False and id == 'comp') or \
-           (icetquirk == True and icetline == True) :
-            if showparse:
-                print "SOF" #start of frame
-            #decipher parent child information from records in the frame
-            #and save off newly gathered per filter and per frame statistics
-            __process_frame()
-
-    #keep a record of this execution as part for the current frame
-    current_frames_records.append(record)
-
-    return
-
-def parse_logs(show_parse = False, tabular = False) :
-    """
-    Parse the collected paraview log information.
-    This prints out per frame, and aggregated per filter statistics.
-
-    If show_parse is true, debugging information is shown about the parsing
-    process that allows you to verify that the derived stats are correct.
-    This includes each and echo of each log line collected, prepended by
-    the token type and indent scanned in, or ???? if the line is unrecognized
-    and ignored. Frame boundaries are denoted by SOF, indicating the preceeding
-    line was determined to be the start of the next frame.
-    """
-
-    global filters
-    global current_frames_records
-    global frames
-    global cnt
-    global showparse
-    global start_frame
-
-    showparse = show_parse
-
-    if len(logs) == 0:
-        get_logs()
-
-    for i in logs:
-        # per filter records
-        filters = dict()
-        filters.clear()
-        # per frame records
-        frames = []
-        # components of current frame
-        current_frames_records = []
-        cnt = 0
-
-        runmode = i.runmode
-        servertype = i.servertype
-        component = i.component
-        rank = i.rank
-        i.print_log(False)
-
-        for line in i.lines:
-            __parse_line(line)
-
-        #collect stats for the current frame in process but not officially ended
-        __process_frame()
-
-        #print out the gathered per frame information
-        if tabular:
-            frecs = dict()
-            line = "#framenum, "
-            for x in filters:
-                line += filters[x]['name'] + ":" + filters[x]['id']  + ", "
-            #print line
-            for cnt in xrange(start_frame, len(frames)):
-                line = ""
-                line += str(cnt) + ", "
-                printed = dict()
-                for x in filters:
-                    id = filters[x]['id']
-                    name = filters[x]['name']
-                    found = False
-                    for record in frames[cnt]:
-                        if 'id' in record:
-                            if record['id'] == id and \
-                            record['name'] == name and \
-                            not id in printed:
-                                found = True
-                                printed[id] = 1
-                                line += str(record['local_duration']) + ", "
-                                if not id in frecs:
-                                    frecs[id] = []
-                                frecs[id].append(record['local_duration'])
-                    if not found:
-                        line += "0, "
-                #print line
-            #print
-            for x in frecs.keys():
-                v = frecs[x]
-                print "# ", x, len(v),
-                if numpy_loaded:
-                    print numpy.min(v), numpy.mean(v), numpy.max(v),
-                    print numpy.std(v)
-        else:
-            print "#FRAME TIMINGS"
-            print "#filter id, filter type, inclusive duration, local duration"
-            for cnt in xrange(start_frame, len(frames)):
-                print "#Frame ", cnt
-                for record in frames[cnt]:
-                    if 'id' in record:
-                        print record['id'], ",",
-                        print record['name'], ",",
-                        print record['duration'], ",",
-                        print record['local_duration']
-        #print
-        #print
-
-        if not tabular:
-            #print out the gathered per filter information
-            print "#FILTER TIMINGS"
-            print "#filter id, filter type, count, "+\
-                  "sum inclusive duration, sum local duration"
-            for x in filters:
-                record = filters[x]
-                print record['id'], ",",
-                print record['name'], ",",
-                print record['count'], ",",
-                print record['duration'], ",",
-                print record['local_duration']
-            print
-
-def __render(ss, v, title, nframes):
-    print '============================================================'
-    print title
-    res = []
-    res.append(title)
-    for phires in (500, 1000):
-        ss.PhiResolution = phires
-        c = v.GetActiveCamera()
-        v.CameraPosition = [-3, 0, 0]
-        v.CameraFocalPoint = [0, 0, 0]
-        v.CameraViewUp = [0, 0, 1]
-        Render()
-        c1 = time.time()
-        for i in range(nframes):
-            c.Elevation(0.5)
-            Render()
-            if not servermanager.fromGUI:
-                sys.stdout.write(".")
-                sys.stdout.flush()
-        if not servermanager.fromGUI:
-            sys.stdout.write("\n")
-        tpr = (time.time() - c1)/nframes
-        ncells = ss.GetDataInformation().GetNumberOfCells()
-        print tpr, " secs/frame"
-        print ncells, " polys"
-        print ncells/tpr, " polys/sec"
-
-        res.append((ncells, ncells/tpr))
-    return res
-
-def run(filename=None, nframes=60):
-    """ Runs the benchmark. If a filename is specified, it will write the
-    results to that file as csv. The number of frames controls how many times
-    a particular configuration is rendered. Higher numbers lead to more accurate
-    averages. """
-    # Turn off progress printing
-    paraview.servermanager.SetProgressPrintingEnabled(0)
-
-    # Create a sphere source to use in the benchmarks
-    ss = Sphere(ThetaResolution=1000, PhiResolution=500)
-    rep = Show()
-    v = Render()
-    results = []
-
-    # Start with these defaults
-    #v.RemoteRenderThreshold = 0
-    obj = servermanager.misc.GlobalMapperProperties()
-    obj.GlobalImmediateModeRendering = 0
-
-    # Test different configurations
-    title = 'display lists, no triangle strips, solid color'
-    obj.GlobalImmediateModeRendering = 0
-    results.append(__render(ss, v, title, nframes))
-
-    title = 'no display lists, no triangle strips, solid color'
-    obj.GlobalImmediateModeRendering = 1
-    results.append(__render(ss, v, title, nframes))
-
-    # Color by normals
-    lt = servermanager.rendering.PVLookupTable()
-    rep.LookupTable = lt
-    rep.ColorAttributeType = 0 # point data
-    rep.ColorArrayName = "Normals"
-    lt.RGBPoints = [-1, 0, 0, 1, 0.0288, 1, 0, 0]
-    lt.ColorSpace = 'HSV'
-    lt.VectorComponent = 0
-
-    title = 'display lists, no triangle strips, color by array'
-    obj.GlobalImmediateModeRendering = 0
-    results.append(__render(ss, v, title, nframes))
-
-    title = 'no display lists, no triangle strips, color by array'
-    obj.GlobalImmediateModeRendering = 1
-    results.append(__render(ss, v, title, nframes))
-
-    if filename:
-        f = open(filename, "w")
-    else:
-        f = sys.stdout
-    print >>f, 'configuration, %d, %d' % (results[0][1][0], results[0][2][0])
-    for i in results:
-        print >>f, '"%s", %g, %g' % (i[0], i[1][1], i[2][1])
-
-if __name__ == "__main__":
-    run()
diff --git a/uvcdatspt/scripts/ocean.py b/uvcdatspt/scripts/ocean.py
deleted file mode 100644
index 932d4e2de..000000000
--- a/uvcdatspt/scripts/ocean.py
+++ /dev/null
@@ -1,187 +0,0 @@
-
-try: paraview.simple
-except: from paraview.simple import *
-
-import sys
-import os
-import paraview
-
-import benchmark
-
-# trying to import the library where I can specify the global and subcontrollers
-try:
-    import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set
-except ImportError:
-    import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH
-
-
-# global variables
-timeCompartmentSize = 4
-input_files = "/home/boonth/Desktop/spatio/ocean_4/SALT*"
-iso_files = "/home/boonth/Desktop/spatio/ocean/salt_%i.pvtp"
-
-currentTimeStep = -1
-log_lines_per_file = 5
-
-
-# some initial setup
-benchmark.maximize_logs()
-
-pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-timer = paraview.vtk.vtkTimerLog()
-if len(sys.argv) < 1:
-    print 'usage: <num files>'
-else:
-    num_files = int(sys.argv[1])
-numprocs = pm.GetGlobalController().GetNumberOfProcesses()
-timer.SetMaxEntries(log_lines_per_file * num_files * numprocs + 2)
-pm.GetGlobalController().Barrier()
-timer.StartTimer()
-
-paraview.options.batch = True # this may not be necessary
-paraview.simple._DisableFirstRenderCameraReset()
-
-def CreateTimeCompartments(globalController, timeCompartmentSize):
-    if globalController.GetNumberOfProcesses() == 1:
-        print 'single process'
-        return
-    elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0:
-        print 'number of processes must be an integer multiple of time compartment size'
-        return
-    elif timeCompartmentSize == globalController.GetNumberOfProcesses():
-        return globalController
-
-    gid = globalController.GetLocalProcessId()
-    timeCompartmentGroupId = int (gid / timeCompartmentSize )
-    newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize)
-    # must unregister if the reference count is greater than 1
-    if newController.GetReferenceCount() > 1:
-        newController.UnRegister(None)
-
-    #print gid, timeCompartmentGroupId, gid % timeCompartmentSize
-    print gid, ' of global comm is ', newController.GetLocalProcessId()
-    globalController.SetGlobalController(newController)
-    return newController
-
-def CheckReader(reader):
-    if hasattr(reader, "FileName") == False:
-        print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName()
-        sys.exit(-1)
-
-    if hasattr(reader, "TimestepValues") == False:
-        print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information"
-        sys.exit(-1)
-
-def CreateControllers(timeCompartmentSize):
-    pm = paraview.servermanager.vtkProcessModule.GetProcessModule()
-    globalController = pm.GetGlobalController()
-    if timeCompartmentSize > globalController.GetNumberOfProcesses():
-        timeCompartmentSize = globalController.GetNumberOfProcesses()
-
-    temporalController = CreateTimeCompartments(globalController, timeCompartmentSize)
-    return globalController, temporalController, timeCompartmentSize
-
-def UpdateCurrentTimeStep(globalController, timeCompartmentSize):
-    global currentTimeStep
-    if currentTimeStep == -1:
-        currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize
-        return currentTimeStep
-
-    numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize
-    currentTimeStep = currentTimeStep + numTimeStepsPerIteration
-    return currentTimeStep
-
-def WriteImages(currentTimeStep, currentTime, views):
-    for view in views:
-        filename = view.tpFileName.replace("%t", str(currentTimeStep))
-        view.ViewTime = currentTime
-        WriteImage(filename, view, Magnification=view.tpMagnification)
-
-def WriteFiles(currentTimeStep, currentTime, writers, reader, contour):
-    for writer in writers:
-        originalfilename = writer.FileName
-        fname = originalfilename.replace("%t", str(currentTimeStep))
-        #folder = (currentTimeStep % 3) + 1
-        #fname = originalfilename % (folder, currentTimeStep)
-        writer.FileName = fname
-        writer.UpdatePipeline(currentTime)
-        writer.FileName = originalfilename
-
-def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views, reader, contour):
-    currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-    while currentTimeStep < len(timeSteps):
-        print globalController.GetLocalProcessId(), " is working on ", currentTimeStep
-        sys.stdout.flush()
-        WriteImages(currentTimeStep, timeSteps[currentTimeStep], views)
-        WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers, reader, contour)
-        currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize)
-
-def CreateReader(ctor, args, fileInfo):
-    "Creates a reader, checks if it can be used, and sets the filenames"
-    reader = ctor()
-    CheckReader(reader)
-    import glob
-    files = glob.glob(fileInfo)
-    files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering
-    reader.FileName = files
-    for a in args:
-        s = "reader."+a
-        exec (s)
-
-    return reader
-
-def CreateWriter(ctor, filename, tp_writers):
-    writer = ctor()
-    writer.FileName = filename
-    tp_writers.append(writer)
-    return writer
-
-def CreateView(proxy_ctor, filename, magnification, width, height, tp_views):
-    view = proxy_ctor()
-    view.add_attribute("tpFileName", filename)
-    view.add_attribute("tpMagnification", magnification)
-    tp_views.append(view)
-    view.ViewSize = [width, height]
-    return view
-
-def main():
-
-    global timer
-    global timeCompartmentSize
-
-    tp_writers = []
-    tp_views = []
-
-    # ============ end of specialized temporal parallelism sections ==========
-
-    globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize)
-
-    reader = CreateReader( NetCDFReader, ["Dimensions='(depth_t, t_lat, t_lon)'", 'ReplaceFillValueWithNan=0', 'VerticalBias=0.0', "OutputType='Automatic'", 'SphericalCoordinates=1', 'VerticalScale=1.0'], input_files )
-    timeSteps = GetActiveSource().TimestepValues
-    if len(timeSteps) == 0:
-        timeSteps = [0.0]
-    contour = Contour( guiName="contour", Isosurfaces=[0.03], ComputeNormals=1, ComputeGradients=0, ComputeScalars=0, ContourBy=['POINTS', 'SALT'], PointMergeMethod="Uniform Binning" )
-    contour.PointMergeMethod.Numberofpointsperbucket = 8
-    contour.PointMergeMethod.Divisions = [50, 50, 50]
-
-    ParallelPolyDataWriter2 = CreateWriter(XMLPPolyDataWriter,iso_files,tp_writers)
-
-    IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views, reader, contour)
-
-    globalController.Barrier()
-    timer.StopTimer()
-
-    gid = globalController.GetLocalProcessId()
-    if gid == 0:
-        print 'all done! -- total time is', timer.GetElapsedTime(), 'seconds'
-
-    benchmark.get_logs()
-    if gid == 0:
-        benchmark.print_logs()
-
-if __name__ == '__main__':
-    if len(sys.argv) < 1:
-        print 'usage: <num files>'
-    else:
-        main()
-    
-- 
GitLab


From b93d3db6af2fca6e44a4c1d4109cffde29408cc9 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 16:09:49 -0700
Subject: [PATCH 147/196] major cleanup part 2

---
 CMake/ESMP.patch                              |    33 -
 CMake/cdat_modules/basemap_deps.cmake         |     1 -
 CMake/cdat_modules/basemap_external.cmake     |    39 -
 CMake/cdat_modules/basemap_pkg.cmake          |    10 -
 CMake/cdat_modules/cairo_deps.cmake           |     1 -
 CMake/cdat_modules/cairo_external.cmake       |    18 -
 CMake/cdat_modules/cairo_pkg.cmake            |    14 -
 CMake/cdat_modules/cd77_deps.cmake            |     1 -
 CMake/cdat_modules/cd77_external.cmake        |    16 -
 CMake/cdat_modules/cd77_pkg.cmake             |    13 -
 CMake/cdat_modules/cdatlogger_deps.cmake      |     2 -
 CMake/cdat_modules/cdatlogger_external.cmake  |    16 -
 CMake/cdat_modules/cdatlogger_pkg.cmake       |     4 -
 CMake/cdat_modules/cffi_deps.cmake            |     1 -
 CMake/cdat_modules/cffi_external.cmake        |     6 -
 CMake/cdat_modules/cffi_pkg.cmake             |    10 -
 CMake/cdat_modules/clapack_deps.cmake         |     1 -
 CMake/cdat_modules/clapack_external.cmake     |    39 -
 CMake/cdat_modules/clapack_pkg.cmake          |    15 -
 CMake/cdat_modules/click_deps.cmake           |     1 -
 CMake/cdat_modules/click_external.cmake       |     6 -
 CMake/cdat_modules/click_pkg.cmake            |    13 -
 CMake/cdat_modules/cligj_deps.cmake           |     1 -
 CMake/cdat_modules/cligj_external.cmake       |     6 -
 CMake/cdat_modules/cligj_pkg.cmake            |    14 -
 CMake/cdat_modules/cmcurl_external.cmake      |    25 -
 CMake/cdat_modules/cmor_deps.cmake            |     1 -
 CMake/cdat_modules/cmor_external.cmake        |    22 -
 CMake/cdat_modules/cmor_pkg.cmake             |    14 -
 CMake/cdat_modules/configobj_deps.cmake       |     1 -
 CMake/cdat_modules/configobj_external.cmake   |    24 -
 CMake/cdat_modules/configobj_pkg.cmake        |    18 -
 CMake/cdat_modules/coverage_deps.cmake        |     1 -
 CMake/cdat_modules/coverage_external.cmake    |     4 -
 CMake/cdat_modules/coverage_pkg.cmake         |    10 -
 CMake/cdat_modules/cryptography_deps.cmake    |     1 -
 .../cdat_modules/cryptography_external.cmake  |    12 -
 CMake/cdat_modules/cryptography_pkg.cmake     |    10 -
 CMake/cdat_modules/curl_deps.cmake            |     1 -
 CMake/cdat_modules/curl_external.cmake        |    17 -
 CMake/cdat_modules/curl_pkg.cmake             |    13 -
 CMake/cdat_modules/curses_deps.cmake          |     1 -
 CMake/cdat_modules/curses_external.cmake      |    22 -
 CMake/cdat_modules/curses_pkg.cmake           |    16 -
 CMake/cdat_modules/cycler_deps.cmake          |     1 -
 CMake/cdat_modules/cycler_external.cmake      |    16 -
 CMake/cdat_modules/cycler_pkg.cmake           |    10 -
 CMake/cdat_modules/cython_deps.cmake          |     1 -
 CMake/cdat_modules/cython_external.cmake      |    21 -
 CMake/cdat_modules/cython_pkg.cmake           |    10 -
 CMake/cdat_modules/data_deps.cmake            |     1 -
 CMake/cdat_modules/data_pkg.cmake             |    17 -
 CMake/cdat_modules/dateutils_deps.cmake       |     1 -
 CMake/cdat_modules/dateutils_external.cmake   |    12 -
 CMake/cdat_modules/dateutils_pkg.cmake        |    11 -
 CMake/cdat_modules/distribute_deps.cmake      |     1 -
 CMake/cdat_modules/distribute_external.cmake  |    21 -
 CMake/cdat_modules/distribute_pkg.cmake       |    12 -
 CMake/cdat_modules/docutils_deps.cmake        |     1 -
 CMake/cdat_modules/docutils_external.cmake    |     6 -
 CMake/cdat_modules/docutils_pkg.cmake         |    12 -
 CMake/cdat_modules/enum34_deps.cmake          |     1 -
 CMake/cdat_modules/enum34_external.cmake      |     8 -
 CMake/cdat_modules/enum34_pkg.cmake           |    10 -
 CMake/cdat_modules/eof2_deps.cmake            |     1 -
 CMake/cdat_modules/eof2_external.cmake        |    16 -
 CMake/cdat_modules/eof2_pkg.cmake             |     9 -
 CMake/cdat_modules/eofs_deps.cmake            |     1 -
 CMake/cdat_modules/eofs_external.cmake        |    16 -
 CMake/cdat_modules/eofs_pkg.cmake             |    10 -
 CMake/cdat_modules/esmf_deps.cmake            |     5 -
 CMake/cdat_modules/esmf_external.cmake        |    78 -
 CMake/cdat_modules/esmf_pkg.cmake             |    57 -
 CMake/cdat_modules/ezget_deps.cmake           |     1 -
 CMake/cdat_modules/ezget_external.cmake       |    43 -
 CMake/cdat_modules/ezget_pkg.cmake            |    10 -
 CMake/cdat_modules/ffi_deps.cmake             |     1 -
 CMake/cdat_modules/ffi_external.cmake         |    16 -
 CMake/cdat_modules/ffi_pkg.cmake              |    13 -
 CMake/cdat_modules/ffmpeg_deps.cmake          |     1 -
 CMake/cdat_modules/ffmpeg_external.cmake      |    32 -
 CMake/cdat_modules/ffmpeg_pkg.cmake           |    14 -
 CMake/cdat_modules/fiona_deps.cmake           |     1 -
 CMake/cdat_modules/fiona_external.cmake       |     6 -
 CMake/cdat_modules/fiona_pkg.cmake            |    17 -
 CMake/cdat_modules/flake8_deps.cmake          |     1 -
 CMake/cdat_modules/flake8_external.cmake      |    19 -
 CMake/cdat_modules/flake8_pkg.cmake           |    17 -
 CMake/cdat_modules/fontconfig_deps.cmake      |     1 -
 CMake/cdat_modules/fontconfig_external.cmake  |    18 -
 CMake/cdat_modules/fontconfig_pkg.cmake       |    17 -
 CMake/cdat_modules/freetype_deps.cmake        |     1 -
 CMake/cdat_modules/freetype_external.cmake    |    24 -
 CMake/cdat_modules/freetype_pkg.cmake         |    18 -
 CMake/cdat_modules/g2clib_deps.cmake          |     1 -
 CMake/cdat_modules/g2clib_external.cmake      |    18 -
 CMake/cdat_modules/g2clib_pkg.cmake           |    11 -
 CMake/cdat_modules/gdal_deps.cmake            |     1 -
 CMake/cdat_modules/gdal_external.cmake        |    25 -
 CMake/cdat_modules/gdal_pkg.cmake             |    17 -
 CMake/cdat_modules/geos_deps.cmake            |     1 -
 CMake/cdat_modules/geos_external.cmake        |    16 -
 CMake/cdat_modules/geos_pkg.cmake             |    13 -
 CMake/cdat_modules/ghostscript_deps.cmake     |     1 -
 CMake/cdat_modules/ghostscript_external.cmake |    19 -
 CMake/cdat_modules/ghostscript_pkg.cmake      |     2 -
 CMake/cdat_modules/gifsicle_external.cmake    |    16 -
 CMake/cdat_modules/gsw_deps.cmake             |     1 -
 CMake/cdat_modules/gsw_external.cmake         |    24 -
 CMake/cdat_modules/gsw_pkg.cmake              |    18 -
 CMake/cdat_modules/gui_support_deps.cmake     |     1 -
 CMake/cdat_modules/gui_support_external.cmake |    20 -
 CMake/cdat_modules/gui_support_pkg.cmake      |     5 -
 CMake/cdat_modules/h5py_deps.cmake            |     1 -
 CMake/cdat_modules/h5py_external.cmake        |    24 -
 CMake/cdat_modules/h5py_pkg.cmake             |    18 -
 CMake/cdat_modules/hdf4_external.cmake        |    24 -
 CMake/cdat_modules/hdf5_deps.cmake            |     4 -
 CMake/cdat_modules/hdf5_external.cmake        |    40 -
 CMake/cdat_modules/hdf5_pkg.cmake             |    13 -
 CMake/cdat_modules/idna_deps.cmake            |     1 -
 CMake/cdat_modules/idna_external.cmake        |     6 -
 CMake/cdat_modules/idna_pkg.cmake             |    10 -
 CMake/cdat_modules/ipaddress_deps.cmake       |     1 -
 CMake/cdat_modules/ipaddress_external.cmake   |     6 -
 CMake/cdat_modules/ipaddress_pkg.cmake        |    10 -
 CMake/cdat_modules/ipython_deps.cmake         |     5 -
 CMake/cdat_modules/ipython_external.cmake     |     7 -
 CMake/cdat_modules/ipython_pkg.cmake          |    10 -
 CMake/cdat_modules/jasper_deps.cmake          |     1 -
 CMake/cdat_modules/jasper_external.cmake      |    20 -
 CMake/cdat_modules/jasper_pkg.cmake           |    10 -
 CMake/cdat_modules/jinja2_deps.cmake          |     1 -
 CMake/cdat_modules/jinja2_external.cmake      |     6 -
 CMake/cdat_modules/jinja2_pkg.cmake           |     9 -
 CMake/cdat_modules/jpeg_deps.cmake            |     1 -
 CMake/cdat_modules/jpeg_external.cmake        |    24 -
 CMake/cdat_modules/jpeg_pkg.cmake             |    12 -
 CMake/cdat_modules/lapack_deps.cmake          |     1 -
 CMake/cdat_modules/lapack_external.cmake      |    26 -
 CMake/cdat_modules/lapack_pkg.cmake           |    20 -
 CMake/cdat_modules/lats_deps.cmake            |     1 -
 CMake/cdat_modules/lats_external.cmake        |    44 -
 CMake/cdat_modules/lats_pkg.cmake             |    10 -
 CMake/cdat_modules/lepl_deps.cmake            |     1 -
 CMake/cdat_modules/lepl_external.cmake        |     5 -
 CMake/cdat_modules/lepl_pkg.cmake             |    12 -
 CMake/cdat_modules/libcdms_deps.cmake         |     7 -
 CMake/cdat_modules/libcdms_external.cmake     |    56 -
 CMake/cdat_modules/libcdms_pkg.cmake          |    12 -
 CMake/cdat_modules/libcf_deps.cmake           |     1 -
 CMake/cdat_modules/libcf_external.cmake       |    29 -
 CMake/cdat_modules/libcf_pkg.cmake            |    10 -
 CMake/cdat_modules/libdrs_deps.cmake          |     1 -
 CMake/cdat_modules/libdrs_external.cmake      |    49 -
 CMake/cdat_modules/libdrs_pkg.cmake           |    11 -
 CMake/cdat_modules/libdrsfortran_deps.cmake   |     1 -
 .../cdat_modules/libdrsfortran_external.cmake |    46 -
 CMake/cdat_modules/libdrsfortran_pkg.cmake    |    13 -
 CMake/cdat_modules/libxml2_deps.cmake         |     1 -
 CMake/cdat_modules/libxml2_external.cmake     |    17 -
 CMake/cdat_modules/libxml2_pkg.cmake          |    19 -
 CMake/cdat_modules/libxslt_deps.cmake         |     1 -
 CMake/cdat_modules/libxslt_external.cmake     |    20 -
 CMake/cdat_modules/libxslt_pkg.cmake          |    19 -
 CMake/cdat_modules/lxml_deps.cmake            |     1 -
 CMake/cdat_modules/lxml_external.cmake        |    26 -
 CMake/cdat_modules/lxml_pkg.cmake             |    13 -
 CMake/cdat_modules/markupsafe_deps.cmake      |     1 -
 CMake/cdat_modules/markupsafe_external.cmake  |     6 -
 CMake/cdat_modules/markupsafe_pkg.cmake       |     8 -
 CMake/cdat_modules/matplotlib_deps.cmake      |     1 -
 CMake/cdat_modules/matplotlib_external.cmake  |    38 -
 CMake/cdat_modules/matplotlib_pkg.cmake       |    17 -
 CMake/cdat_modules/mccabe_deps.cmake          |     1 -
 CMake/cdat_modules/mccabe_external.cmake      |    12 -
 CMake/cdat_modules/mccabe_pkg.cmake           |    17 -
 CMake/cdat_modules/md5_deps.cmake             |     1 -
 CMake/cdat_modules/md5_external.cmake         |     0
 CMake/cdat_modules/md5_pkg.cmake              |     3 -
 CMake/cdat_modules/mpi4py_deps.cmake          |     1 -
 CMake/cdat_modules/mpi4py_external.cmake      |    50 -
 CMake/cdat_modules/mpi4py_pkg.cmake           |    13 -
 CMake/cdat_modules/mpi_deps.cmake             |     1 -
 CMake/cdat_modules/mpi_external.cmake         |    16 -
 CMake/cdat_modules/mpi_pkg.cmake              |    14 -
 CMake/cdat_modules/myproxyclient_deps.cmake   |     1 -
 .../cdat_modules/myproxyclient_external.cmake |     6 -
 CMake/cdat_modules/myproxyclient_pkg.cmake    |    12 -
 CMake/cdat_modules/netcdf_deps.cmake          |     4 -
 CMake/cdat_modules/netcdf_external.cmake      |    31 -
 CMake/cdat_modules/netcdf_pkg.cmake           |    15 -
 CMake/cdat_modules/netcdfplus_deps.cmake      |     1 -
 CMake/cdat_modules/netcdfplus_external.cmake  |    18 -
 CMake/cdat_modules/netcdfplus_pkg.cmake       |    15 -
 CMake/cdat_modules/numexpr_deps.cmake         |     1 -
 CMake/cdat_modules/numexpr_external.cmake     |     6 -
 CMake/cdat_modules/numexpr_pkg.cmake          |    12 -
 CMake/cdat_modules/numpy_deps.cmake           |     1 -
 CMake/cdat_modules/numpy_external.cmake       |    45 -
 CMake/cdat_modules/numpy_pkg.cmake            |    16 -
 CMake/cdat_modules/ocgis_deps.cmake           |     1 -
 CMake/cdat_modules/ocgis_external.cmake       |    19 -
 CMake/cdat_modules/ocgis_pkg.cmake            |    12 -
 CMake/cdat_modules/openssl_deps.cmake         |     1 -
 CMake/cdat_modules/openssl_external.cmake     |    37 -
 CMake/cdat_modules/openssl_pkg.cmake          |    37 -
 CMake/cdat_modules/osmesa_deps.cmake          |     1 -
 CMake/cdat_modules/osmesa_external.cmake      |    26 -
 CMake/cdat_modules/osmesa_pkg.cmake           |    15 -
 CMake/cdat_modules/paraview_deps.cmake        |    17 -
 CMake/cdat_modules/paraview_external.cmake    |   262 -
 CMake/cdat_modules/paraview_pkg.cmake         |    11 -
 CMake/cdat_modules/pbmplus_external.cmake     |    32 -
 CMake/cdat_modules/pep8_deps.cmake            |     1 -
 CMake/cdat_modules/pep8_external.cmake        |    16 -
 CMake/cdat_modules/pep8_pkg.cmake             |    16 -
 CMake/cdat_modules/pip_deps.cmake             |     1 -
 CMake/cdat_modules/pip_external.cmake         |    21 -
 CMake/cdat_modules/pip_pkg.cmake              |    13 -
 CMake/cdat_modules/pixman_deps.cmake          |     1 -
 CMake/cdat_modules/pixman_external.cmake      |    21 -
 CMake/cdat_modules/pixman_pkg.cmake           |    19 -
 CMake/cdat_modules/pkgconfig_deps.cmake       |     1 -
 CMake/cdat_modules/pkgconfig_external.cmake   |    18 -
 CMake/cdat_modules/pkgconfig_pkg.cmake        |    22 -
 CMake/cdat_modules/pmw_deps.cmake             |     1 -
 CMake/cdat_modules/pmw_external.cmake         |    30 -
 CMake/cdat_modules/pmw_pkg.cmake              |    19 -
 CMake/cdat_modules/pnetcdf_deps.cmake         |     1 -
 CMake/cdat_modules/pnetcdf_external.cmake     |    17 -
 CMake/cdat_modules/pnetcdf_pkg.cmake          |    13 -
 CMake/cdat_modules/png_deps.cmake             |     1 -
 CMake/cdat_modules/png_external.cmake         |    45 -
 CMake/cdat_modules/png_pkg.cmake              |    14 -
 CMake/cdat_modules/proj4_deps.cmake           |     4 -
 CMake/cdat_modules/proj4_external.cmake       |    20 -
 CMake/cdat_modules/proj4_pkg.cmake            |    12 -
 CMake/cdat_modules/pyasn1_deps.cmake          |     1 -
 CMake/cdat_modules/pyasn1_external.cmake      |    12 -
 CMake/cdat_modules/pyasn1_pkg.cmake           |    10 -
 CMake/cdat_modules/pyclimate_deps.cmake       |     2 -
 CMake/cdat_modules/pyclimate_external.cmake   |     6 -
 CMake/cdat_modules/pyclimate_pkg.cmake        |    11 -
 CMake/cdat_modules/pycparser_deps.cmake       |     1 -
 CMake/cdat_modules/pycparser_external.cmake   |     6 -
 CMake/cdat_modules/pycparser_pkg.cmake        |    10 -
 CMake/cdat_modules/pyflakes_deps.cmake        |     1 -
 CMake/cdat_modules/pyflakes_external.cmake    |    12 -
 CMake/cdat_modules/pyflakes_pkg.cmake         |    17 -
 CMake/cdat_modules/pygments_deps.cmake        |     1 -
 CMake/cdat_modules/pygments_external.cmake    |     6 -
 CMake/cdat_modules/pygments_pkg.cmake         |     9 -
 CMake/cdat_modules/pylibxml2_deps.cmake       |     1 -
 CMake/cdat_modules/pylibxml2_external.cmake   |    17 -
 CMake/cdat_modules/pylibxml2_pkg.cmake        |    15 -
 CMake/cdat_modules/pynetcdf4_deps.cmake       |     1 -
 CMake/cdat_modules/pynetcdf4_external.cmake   |    19 -
 CMake/cdat_modules/pynetcdf4_pkg.cmake        |    13 -
 CMake/cdat_modules/pyopengl_external.cmake    |    29 -
 CMake/cdat_modules/pyopenssl_deps.cmake       |     1 -
 CMake/cdat_modules/pyopenssl_external.cmake   |     6 -
 CMake/cdat_modules/pyopenssl_pkg.cmake        |    10 -
 CMake/cdat_modules/pyparsing_deps.cmake       |     1 -
 CMake/cdat_modules/pyparsing_external.cmake   |     7 -
 CMake/cdat_modules/pyparsing_pkg.cmake        |    10 -
 CMake/cdat_modules/pyqt_deps.cmake            |     1 -
 CMake/cdat_modules/pyqt_external.cmake        |    28 -
 CMake/cdat_modules/pyqt_pkg.cmake             |    16 -
 CMake/cdat_modules/pyspharm_deps.cmake        |     1 -
 CMake/cdat_modules/pyspharm_external.cmake    |    25 -
 CMake/cdat_modules/pyspharm_pkg.cmake         |    11 -
 CMake/cdat_modules/pytables_deps.cmake        |     1 -
 CMake/cdat_modules/pytables_external.cmake    |    17 -
 CMake/cdat_modules/pytables_pkg.cmake         |    10 -
 CMake/cdat_modules/python_deps.cmake          |     1 -
 CMake/cdat_modules/python_external.cmake      |    66 -
 CMake/cdat_modules/python_pkg.cmake           |    59 -
 CMake/cdat_modules/pyzmq_deps.cmake           |     1 -
 CMake/cdat_modules/pyzmq_external.cmake       |    50 -
 CMake/cdat_modules/pyzmq_pkg.cmake            |     7 -
 CMake/cdat_modules/qt4_deps.cmake             |     1 -
 CMake/cdat_modules/qt4_pkg.cmake              |    10 -
 CMake/cdat_modules/qt_external.cmake          |    66 -
 CMake/cdat_modules/r_deps.cmake               |     4 -
 CMake/cdat_modules/r_external.cmake           |    51 -
 CMake/cdat_modules/r_pkg.cmake                |    13 -
 CMake/cdat_modules/readline_deps.cmake        |     1 -
 CMake/cdat_modules/readline_external.cmake    |    31 -
 CMake/cdat_modules/readline_pkg.cmake         |    11 -
 CMake/cdat_modules/rpy2_deps.cmake            |     1 -
 CMake/cdat_modules/rpy2_external.cmake        |    11 -
 CMake/cdat_modules/rpy2_pkg.cmake             |    11 -
 CMake/cdat_modules/sampledata_deps.cmake      |     1 -
 CMake/cdat_modules/sampledata_external.cmake  |    17 -
 CMake/cdat_modules/sampledata_pkg.cmake       |     2 -
 .../cdat_modules/scientificpython_deps.cmake  |     2 -
 .../scientificpython_external.cmake           |     5 -
 CMake/cdat_modules/scientificpython_pkg.cmake |     7 -
 CMake/cdat_modules/scikits_deps.cmake         |     1 -
 CMake/cdat_modules/scikits_external.cmake     |     5 -
 CMake/cdat_modules/scikits_pkg.cmake          |    10 -
 CMake/cdat_modules/scipy_deps.cmake           |     1 -
 CMake/cdat_modules/scipy_external.cmake       |    42 -
 CMake/cdat_modules/scipy_pkg.cmake            |    21 -
 CMake/cdat_modules/seawater_deps.cmake        |     1 -
 CMake/cdat_modules/seawater_external.cmake    |    24 -
 CMake/cdat_modules/seawater_pkg.cmake         |    18 -
 CMake/cdat_modules/setuptools_deps.cmake      |     1 -
 CMake/cdat_modules/setuptools_external.cmake  |    38 -
 CMake/cdat_modules/setuptools_pkg.cmake       |    10 -
 CMake/cdat_modules/shapely_deps.cmake         |     1 -
 CMake/cdat_modules/shapely_external.cmake     |     5 -
 CMake/cdat_modules/shapely_pkg.cmake          |    17 -
 CMake/cdat_modules/singledispatch_deps.cmake  |     1 -
 .../singledispatch_external.cmake             |    16 -
 CMake/cdat_modules/singledispatch_pkg.cmake   |    16 -
 CMake/cdat_modules/sip_deps.cmake             |     1 -
 CMake/cdat_modules/sip_external.cmake         |    13 -
 CMake/cdat_modules/sip_pkg.cmake              |    14 -
 CMake/cdat_modules/six_deps.cmake             |     1 -
 CMake/cdat_modules/six_external.cmake         |     6 -
 CMake/cdat_modules/six_pkg.cmake              |    10 -
 CMake/cdat_modules/sphinx_deps.cmake          |     1 -
 CMake/cdat_modules/sphinx_external.cmake      |     5 -
 CMake/cdat_modules/sphinx_pkg.cmake           |    14 -
 CMake/cdat_modules/spyder_deps.cmake          |     1 -
 CMake/cdat_modules/spyder_external.cmake      |     7 -
 CMake/cdat_modules/spyder_pkg.cmake           |     9 -
 CMake/cdat_modules/tcltk_deps.cmake           |     1 -
 CMake/cdat_modules/tcltk_external.cmake       |    62 -
 CMake/cdat_modules/tcltk_pkg.cmake            |    18 -
 CMake/cdat_modules/termcap_deps.cmake         |     1 -
 CMake/cdat_modules/termcap_external.cmake     |    16 -
 CMake/cdat_modules/termcap_pkg.cmake          |    16 -
 CMake/cdat_modules/tiff_deps.cmake            |     1 -
 CMake/cdat_modules/tiff_external.cmake        |    16 -
 CMake/cdat_modules/tiff_pkg.cmake             |    12 -
 CMake/cdat_modules/tornado_deps.cmake         |     1 -
 CMake/cdat_modules/tornado_external.cmake     |     5 -
 CMake/cdat_modules/tornado_pkg.cmake          |     7 -
 CMake/cdat_modules/udunits2_deps.cmake        |     1 -
 CMake/cdat_modules/udunits2_external.cmake    |    25 -
 CMake/cdat_modules/udunits2_pkg.cmake         |    14 -
 CMake/cdat_modules/uuid_deps.cmake            |     1 -
 CMake/cdat_modules/uuid_external.cmake        |    19 -
 CMake/cdat_modules/uuid_pkg.cmake             |    15 -
 CMake/cdat_modules/uvcmetrics_deps.cmake      |     1 -
 CMake/cdat_modules/uvcmetrics_external.cmake  |    42 -
 CMake/cdat_modules/uvcmetrics_pkg.cmake       |    14 -
 CMake/cdat_modules/vacumm_deps.cmake          |     1 -
 CMake/cdat_modules/vacumm_external.cmake      |    24 -
 CMake/cdat_modules/vacumm_pkg.cmake           |    18 -
 CMake/cdat_modules/visit_deps.cmake           |     1 -
 CMake/cdat_modules/visit_external.cmake       |   173 -
 CMake/cdat_modules/visit_pkg.cmake            |    10 -
 CMake/cdat_modules/vistrails_deps.cmake       |     1 -
 CMake/cdat_modules/vistrails_external.cmake   |    92 -
 CMake/cdat_modules/vistrails_pkg.cmake        |     6 -
 CMake/cdat_modules/vtk_deps.cmake             |    13 -
 CMake/cdat_modules/vtk_external.cmake         |   184 -
 CMake/cdat_modules/vtk_pkg.cmake              |     4 -
 CMake/cdat_modules/wget_deps.cmake            |     1 -
 CMake/cdat_modules/wget_external.cmake        |    16 -
 CMake/cdat_modules/wget_pkg.cmake             |    28 -
 CMake/cdat_modules/windfield_deps.cmake       |     1 -
 CMake/cdat_modules/windfield_external.cmake   |    16 -
 CMake/cdat_modules/windfield_pkg.cmake        |     9 -
 CMake/cdat_modules/windspharm_deps.cmake      |     1 -
 CMake/cdat_modules/windspharm_external.cmake  |    16 -
 CMake/cdat_modules/windspharm_pkg.cmake       |     9 -
 CMake/cdat_modules/x264_deps.cmake            |     2 -
 CMake/cdat_modules/x264_external.cmake        |    28 -
 CMake/cdat_modules/x264_pkg.cmake             |    13 -
 CMake/cdat_modules/xgks_external.cmake        |    21 -
 CMake/cdat_modules/yasm_deps.cmake            |     1 -
 CMake/cdat_modules/yasm_external.cmake        |    15 -
 CMake/cdat_modules/yasm_pkg.cmake             |    13 -
 CMake/cdat_modules/zlib_deps.cmake            |     1 -
 CMake/cdat_modules/zlib_external.cmake        |    55 -
 CMake/cdat_modules/zlib_pkg.cmake             |    24 -
 CMake/cdat_modules/zmq_deps.cmake             |     1 -
 CMake/cdat_modules/zmq_external.cmake         |    16 -
 CMake/cdat_modules/zmq_pkg.cmake              |    10 -
 .../CLAPACK_install_step.cmake.in             |    38 -
 .../ESMF_install_step.cmake.in                |    35 -
 .../ESMF_make_step.cmake.in                   |    45 -
 .../ESMP_install_step.cmake.in                |    34 -
 .../ESMP_patch_step.cmake.in                  |     9 -
 .../NUMPY_configure_step.cmake.in             |    42 -
 .../NUMPY_install_step.cmake.in               |    19 -
 .../NUMPY_make_step.cmake.in                  |    21 -
 .../PYLIBXML2_install_step.cmake.in           |    21 -
 .../PYLIBXML2_make_step.cmake.in              |    24 -
 .../cdat_modules_extra/PYLIBXML2_setup.py.in  |   243 -
 .../SCIPY_configure_step.cmake.in             |    62 -
 .../SCIPY_install_step.cmake.in               |    21 -
 .../SCIPY_make_step.cmake.in                  |    22 -
 .../basemap_install_step.cmake.in             |    20 -
 .../basemap_make_step.cmake.in                |    20 -
 CMake/cdat_modules_extra/cdat.in              |     4 -
 .../cdat_cmake_make_step.cmake.in             |    18 -
 .../cdat_common_environment.cmake.in          |    39 -
 .../cdat_configure_step.cmake.in              |    30 -
 .../cdat_download_sample_data.cmake.in        |    10 -
 .../cdat_install_step.cmake.in                |    13 -
 .../cdat_make_step.cmake.in                   |    18 -
 .../cdat_python_install_step.cmake.in         |    30 -
 .../cdat_python_step.cmake.in                 |    18 -
 .../cdatmpi_configure_step.cmake.in           |    22 -
 CMake/cdat_modules_extra/checked_get.sh.in    |    76 -
 .../checkout_testdata.cmake                   |   256 -
 .../cleanenv_configure_step.cmake.in          |    17 -
 .../configobj_build_step.cmake.in             |     6 -
 .../curses_patch_step.cmake.in                |     5 -
 .../ezget_Makefile.gfortran.in                |    78 -
 .../fetch_uvcmetrics_testdata.cmake           |    25 -
 .../ffmpeg_build_step.cmake.in                |    14 -
 CMake/cdat_modules_extra/git_clone.sh.in      |    10 -
 CMake/cdat_modules_extra/git_update.sh.in     |     8 -
 .../gsw_build_step.cmake.in                   |     6 -
 .../h5py_build_step.cmake.in                  |     6 -
 .../hdf5_patch_step.cmake.in                  |    10 -
 CMake/cdat_modules_extra/install.py.in        |   945 -
 .../jasper_configure_step.cmake.in            |    11 -
 .../jpeg_install_step.cmake.in                |    28 -
 .../lats_Makefile.gfortran.in                 |   208 -
 .../libcf_install_step.cmake.in               |    13 -
 .../libcf_make_step.cmake.in                  |    15 -
 .../libdrs_Makefile.Linux.gfortran.in         |    78 -
 .../libdrs_Makefile.Mac.fwrap.gfortran.in     |    85 -
 .../libdrs_Makefile.Mac.gfortran.in           |    89 -
 .../lxml_build_step.cmake.in                  |    19 -
 .../lxml_install_step.cmake.in                |    14 -
 .../matplotlib_build_step.cmake.in            |     6 -
 .../matplotlib_patch_step.cmake.in            |     9 -
 .../matplotlib_setup_cfg.in                   |    76 -
 .../mpi4py_install_step.cmake.in              |    21 -
 .../mpi4py_make_step.cmake.in                 |    20 -
 .../netcdf_patch_step.cmake.in                |     6 -
 .../paraview_download.sh.in                   |    19 -
 .../paraview_install_python_module.cmake.in   |    25 -
 .../pbmplus_configure_step.cmake.in           |     9 -
 .../pmw_install_step.cmake.in                 |    13 -
 .../cdat_modules_extra/pmw_make_step.cmake.in |    15 -
 CMake/cdat_modules_extra/predownload.py.in    |    88 -
 .../cdat_modules_extra/preofflinebuild.sh.in  |    11 -
 .../pyopengl_install_step.cmake.in            |    13 -
 .../pyopengl_make_step.cmake.in               |    13 -
 .../pyspharm_patch_step.cmake.in              |     6 -
 .../python_configure_step.cmake.in            |    42 -
 .../python_install_step.cmake.in              |    51 -
 .../python_make_step.cmake.in                 |    34 -
 .../python_patch_step.cmake.in                |    21 -
 CMake/cdat_modules_extra/python_setup.py.in   |  1918 --
 .../pyzmq_configure_step.cmake.in             |    20 -
 .../pyzmq_install_step.cmake.in               |    21 -
 CMake/cdat_modules_extra/reset_runtime.csh.in |    24 -
 CMake/cdat_modules_extra/reset_runtime.sh.in  |    16 -
 CMake/cdat_modules_extra/runpytest.in         |    26 -
 .../seawater_build_step.cmake.in              |     6 -
 CMake/cdat_modules_extra/setup_runtime.csh.in |   117 -
 CMake/cdat_modules_extra/setup_runtime.sh.in  |   111 -
 .../setuptools_install_step.cmake.in          |    13 -
 .../setuptools_make_step.cmake.in             |    13 -
 CMake/cdat_modules_extra/site.cfg.in          |     4 -
 .../udunits2_apple_configure.in               | 18006 ----------------
 CMake/cdat_modules_extra/uvcdat.in            |    64 -
 CMake/cdat_modules_extra/uvcdat.mac.in        |     6 -
 .../uvcmetrics_test_data_md5s.txt             |   232 -
 .../vacumm_build_step.cmake.in                |     6 -
 .../vtk_install_python_module.cmake.in        |    36 -
 .../xgks_configure_step.cmake.in              |    13 -
 CMake/curses_gcc5.patch                       |    30 -
 CMake/dummy.f90                               |     4 -
 CMake/fixName.py                              |    15 -
 CMake/fix_install_name.py.in                  |    33 -
 CMake/fixlink.py                              |    49 -
 CMake/install.py                              |    10 -
 CMake/netcdf_clang.patch                      |    12 -
 CMake/pyspharm_setup.patch                    |    19 -
 CMake/python_patch_step.cmake.in              |    15 -
 CMake/sqlite3_int64_v2.patch                  |    24 -
 CMake/test_python_ok.py                       |    19 -
 CMake/travis_build.cmake                      |    18 -
 CMake/travis_submit.cmake                     |    10 -
 CMake/uvcdat.plist                            |    38 -
 487 files changed, 29802 deletions(-)
 delete mode 100644 CMake/ESMP.patch
 delete mode 100644 CMake/cdat_modules/basemap_deps.cmake
 delete mode 100644 CMake/cdat_modules/basemap_external.cmake
 delete mode 100644 CMake/cdat_modules/basemap_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cairo_deps.cmake
 delete mode 100644 CMake/cdat_modules/cairo_external.cmake
 delete mode 100644 CMake/cdat_modules/cairo_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cd77_deps.cmake
 delete mode 100644 CMake/cdat_modules/cd77_external.cmake
 delete mode 100644 CMake/cdat_modules/cd77_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cdatlogger_deps.cmake
 delete mode 100644 CMake/cdat_modules/cdatlogger_external.cmake
 delete mode 100644 CMake/cdat_modules/cdatlogger_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cffi_deps.cmake
 delete mode 100644 CMake/cdat_modules/cffi_external.cmake
 delete mode 100644 CMake/cdat_modules/cffi_pkg.cmake
 delete mode 100644 CMake/cdat_modules/clapack_deps.cmake
 delete mode 100644 CMake/cdat_modules/clapack_external.cmake
 delete mode 100644 CMake/cdat_modules/clapack_pkg.cmake
 delete mode 100644 CMake/cdat_modules/click_deps.cmake
 delete mode 100644 CMake/cdat_modules/click_external.cmake
 delete mode 100644 CMake/cdat_modules/click_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cligj_deps.cmake
 delete mode 100644 CMake/cdat_modules/cligj_external.cmake
 delete mode 100644 CMake/cdat_modules/cligj_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cmcurl_external.cmake
 delete mode 100644 CMake/cdat_modules/cmor_deps.cmake
 delete mode 100644 CMake/cdat_modules/cmor_external.cmake
 delete mode 100644 CMake/cdat_modules/cmor_pkg.cmake
 delete mode 100644 CMake/cdat_modules/configobj_deps.cmake
 delete mode 100644 CMake/cdat_modules/configobj_external.cmake
 delete mode 100644 CMake/cdat_modules/configobj_pkg.cmake
 delete mode 100644 CMake/cdat_modules/coverage_deps.cmake
 delete mode 100644 CMake/cdat_modules/coverage_external.cmake
 delete mode 100644 CMake/cdat_modules/coverage_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cryptography_deps.cmake
 delete mode 100644 CMake/cdat_modules/cryptography_external.cmake
 delete mode 100644 CMake/cdat_modules/cryptography_pkg.cmake
 delete mode 100644 CMake/cdat_modules/curl_deps.cmake
 delete mode 100644 CMake/cdat_modules/curl_external.cmake
 delete mode 100644 CMake/cdat_modules/curl_pkg.cmake
 delete mode 100644 CMake/cdat_modules/curses_deps.cmake
 delete mode 100644 CMake/cdat_modules/curses_external.cmake
 delete mode 100644 CMake/cdat_modules/curses_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cycler_deps.cmake
 delete mode 100644 CMake/cdat_modules/cycler_external.cmake
 delete mode 100644 CMake/cdat_modules/cycler_pkg.cmake
 delete mode 100644 CMake/cdat_modules/cython_deps.cmake
 delete mode 100644 CMake/cdat_modules/cython_external.cmake
 delete mode 100644 CMake/cdat_modules/cython_pkg.cmake
 delete mode 100644 CMake/cdat_modules/data_deps.cmake
 delete mode 100644 CMake/cdat_modules/data_pkg.cmake
 delete mode 100644 CMake/cdat_modules/dateutils_deps.cmake
 delete mode 100644 CMake/cdat_modules/dateutils_external.cmake
 delete mode 100644 CMake/cdat_modules/dateutils_pkg.cmake
 delete mode 100644 CMake/cdat_modules/distribute_deps.cmake
 delete mode 100644 CMake/cdat_modules/distribute_external.cmake
 delete mode 100644 CMake/cdat_modules/distribute_pkg.cmake
 delete mode 100644 CMake/cdat_modules/docutils_deps.cmake
 delete mode 100644 CMake/cdat_modules/docutils_external.cmake
 delete mode 100644 CMake/cdat_modules/docutils_pkg.cmake
 delete mode 100644 CMake/cdat_modules/enum34_deps.cmake
 delete mode 100644 CMake/cdat_modules/enum34_external.cmake
 delete mode 100644 CMake/cdat_modules/enum34_pkg.cmake
 delete mode 100644 CMake/cdat_modules/eof2_deps.cmake
 delete mode 100644 CMake/cdat_modules/eof2_external.cmake
 delete mode 100644 CMake/cdat_modules/eof2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/eofs_deps.cmake
 delete mode 100644 CMake/cdat_modules/eofs_external.cmake
 delete mode 100644 CMake/cdat_modules/eofs_pkg.cmake
 delete mode 100644 CMake/cdat_modules/esmf_deps.cmake
 delete mode 100644 CMake/cdat_modules/esmf_external.cmake
 delete mode 100644 CMake/cdat_modules/esmf_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ezget_deps.cmake
 delete mode 100644 CMake/cdat_modules/ezget_external.cmake
 delete mode 100644 CMake/cdat_modules/ezget_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ffi_deps.cmake
 delete mode 100644 CMake/cdat_modules/ffi_external.cmake
 delete mode 100644 CMake/cdat_modules/ffi_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ffmpeg_deps.cmake
 delete mode 100644 CMake/cdat_modules/ffmpeg_external.cmake
 delete mode 100644 CMake/cdat_modules/ffmpeg_pkg.cmake
 delete mode 100644 CMake/cdat_modules/fiona_deps.cmake
 delete mode 100644 CMake/cdat_modules/fiona_external.cmake
 delete mode 100644 CMake/cdat_modules/fiona_pkg.cmake
 delete mode 100644 CMake/cdat_modules/flake8_deps.cmake
 delete mode 100644 CMake/cdat_modules/flake8_external.cmake
 delete mode 100644 CMake/cdat_modules/flake8_pkg.cmake
 delete mode 100644 CMake/cdat_modules/fontconfig_deps.cmake
 delete mode 100644 CMake/cdat_modules/fontconfig_external.cmake
 delete mode 100644 CMake/cdat_modules/fontconfig_pkg.cmake
 delete mode 100644 CMake/cdat_modules/freetype_deps.cmake
 delete mode 100644 CMake/cdat_modules/freetype_external.cmake
 delete mode 100644 CMake/cdat_modules/freetype_pkg.cmake
 delete mode 100644 CMake/cdat_modules/g2clib_deps.cmake
 delete mode 100644 CMake/cdat_modules/g2clib_external.cmake
 delete mode 100644 CMake/cdat_modules/g2clib_pkg.cmake
 delete mode 100644 CMake/cdat_modules/gdal_deps.cmake
 delete mode 100644 CMake/cdat_modules/gdal_external.cmake
 delete mode 100644 CMake/cdat_modules/gdal_pkg.cmake
 delete mode 100644 CMake/cdat_modules/geos_deps.cmake
 delete mode 100644 CMake/cdat_modules/geos_external.cmake
 delete mode 100644 CMake/cdat_modules/geos_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ghostscript_deps.cmake
 delete mode 100644 CMake/cdat_modules/ghostscript_external.cmake
 delete mode 100644 CMake/cdat_modules/ghostscript_pkg.cmake
 delete mode 100644 CMake/cdat_modules/gifsicle_external.cmake
 delete mode 100644 CMake/cdat_modules/gsw_deps.cmake
 delete mode 100644 CMake/cdat_modules/gsw_external.cmake
 delete mode 100644 CMake/cdat_modules/gsw_pkg.cmake
 delete mode 100644 CMake/cdat_modules/gui_support_deps.cmake
 delete mode 100644 CMake/cdat_modules/gui_support_external.cmake
 delete mode 100644 CMake/cdat_modules/gui_support_pkg.cmake
 delete mode 100644 CMake/cdat_modules/h5py_deps.cmake
 delete mode 100644 CMake/cdat_modules/h5py_external.cmake
 delete mode 100644 CMake/cdat_modules/h5py_pkg.cmake
 delete mode 100644 CMake/cdat_modules/hdf4_external.cmake
 delete mode 100644 CMake/cdat_modules/hdf5_deps.cmake
 delete mode 100644 CMake/cdat_modules/hdf5_external.cmake
 delete mode 100644 CMake/cdat_modules/hdf5_pkg.cmake
 delete mode 100644 CMake/cdat_modules/idna_deps.cmake
 delete mode 100644 CMake/cdat_modules/idna_external.cmake
 delete mode 100644 CMake/cdat_modules/idna_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ipaddress_deps.cmake
 delete mode 100644 CMake/cdat_modules/ipaddress_external.cmake
 delete mode 100644 CMake/cdat_modules/ipaddress_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ipython_deps.cmake
 delete mode 100644 CMake/cdat_modules/ipython_external.cmake
 delete mode 100644 CMake/cdat_modules/ipython_pkg.cmake
 delete mode 100644 CMake/cdat_modules/jasper_deps.cmake
 delete mode 100644 CMake/cdat_modules/jasper_external.cmake
 delete mode 100644 CMake/cdat_modules/jasper_pkg.cmake
 delete mode 100644 CMake/cdat_modules/jinja2_deps.cmake
 delete mode 100644 CMake/cdat_modules/jinja2_external.cmake
 delete mode 100644 CMake/cdat_modules/jinja2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/jpeg_deps.cmake
 delete mode 100644 CMake/cdat_modules/jpeg_external.cmake
 delete mode 100644 CMake/cdat_modules/jpeg_pkg.cmake
 delete mode 100644 CMake/cdat_modules/lapack_deps.cmake
 delete mode 100644 CMake/cdat_modules/lapack_external.cmake
 delete mode 100644 CMake/cdat_modules/lapack_pkg.cmake
 delete mode 100644 CMake/cdat_modules/lats_deps.cmake
 delete mode 100644 CMake/cdat_modules/lats_external.cmake
 delete mode 100644 CMake/cdat_modules/lats_pkg.cmake
 delete mode 100644 CMake/cdat_modules/lepl_deps.cmake
 delete mode 100644 CMake/cdat_modules/lepl_external.cmake
 delete mode 100644 CMake/cdat_modules/lepl_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libcdms_deps.cmake
 delete mode 100644 CMake/cdat_modules/libcdms_external.cmake
 delete mode 100644 CMake/cdat_modules/libcdms_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libcf_deps.cmake
 delete mode 100644 CMake/cdat_modules/libcf_external.cmake
 delete mode 100644 CMake/cdat_modules/libcf_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libdrs_deps.cmake
 delete mode 100644 CMake/cdat_modules/libdrs_external.cmake
 delete mode 100644 CMake/cdat_modules/libdrs_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libdrsfortran_deps.cmake
 delete mode 100644 CMake/cdat_modules/libdrsfortran_external.cmake
 delete mode 100644 CMake/cdat_modules/libdrsfortran_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libxml2_deps.cmake
 delete mode 100644 CMake/cdat_modules/libxml2_external.cmake
 delete mode 100644 CMake/cdat_modules/libxml2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/libxslt_deps.cmake
 delete mode 100644 CMake/cdat_modules/libxslt_external.cmake
 delete mode 100644 CMake/cdat_modules/libxslt_pkg.cmake
 delete mode 100644 CMake/cdat_modules/lxml_deps.cmake
 delete mode 100644 CMake/cdat_modules/lxml_external.cmake
 delete mode 100644 CMake/cdat_modules/lxml_pkg.cmake
 delete mode 100644 CMake/cdat_modules/markupsafe_deps.cmake
 delete mode 100644 CMake/cdat_modules/markupsafe_external.cmake
 delete mode 100644 CMake/cdat_modules/markupsafe_pkg.cmake
 delete mode 100644 CMake/cdat_modules/matplotlib_deps.cmake
 delete mode 100644 CMake/cdat_modules/matplotlib_external.cmake
 delete mode 100644 CMake/cdat_modules/matplotlib_pkg.cmake
 delete mode 100644 CMake/cdat_modules/mccabe_deps.cmake
 delete mode 100644 CMake/cdat_modules/mccabe_external.cmake
 delete mode 100644 CMake/cdat_modules/mccabe_pkg.cmake
 delete mode 100644 CMake/cdat_modules/md5_deps.cmake
 delete mode 100644 CMake/cdat_modules/md5_external.cmake
 delete mode 100644 CMake/cdat_modules/md5_pkg.cmake
 delete mode 100644 CMake/cdat_modules/mpi4py_deps.cmake
 delete mode 100644 CMake/cdat_modules/mpi4py_external.cmake
 delete mode 100644 CMake/cdat_modules/mpi4py_pkg.cmake
 delete mode 100644 CMake/cdat_modules/mpi_deps.cmake
 delete mode 100644 CMake/cdat_modules/mpi_external.cmake
 delete mode 100644 CMake/cdat_modules/mpi_pkg.cmake
 delete mode 100644 CMake/cdat_modules/myproxyclient_deps.cmake
 delete mode 100644 CMake/cdat_modules/myproxyclient_external.cmake
 delete mode 100644 CMake/cdat_modules/myproxyclient_pkg.cmake
 delete mode 100644 CMake/cdat_modules/netcdf_deps.cmake
 delete mode 100644 CMake/cdat_modules/netcdf_external.cmake
 delete mode 100644 CMake/cdat_modules/netcdf_pkg.cmake
 delete mode 100644 CMake/cdat_modules/netcdfplus_deps.cmake
 delete mode 100644 CMake/cdat_modules/netcdfplus_external.cmake
 delete mode 100644 CMake/cdat_modules/netcdfplus_pkg.cmake
 delete mode 100644 CMake/cdat_modules/numexpr_deps.cmake
 delete mode 100644 CMake/cdat_modules/numexpr_external.cmake
 delete mode 100644 CMake/cdat_modules/numexpr_pkg.cmake
 delete mode 100644 CMake/cdat_modules/numpy_deps.cmake
 delete mode 100644 CMake/cdat_modules/numpy_external.cmake
 delete mode 100644 CMake/cdat_modules/numpy_pkg.cmake
 delete mode 100644 CMake/cdat_modules/ocgis_deps.cmake
 delete mode 100644 CMake/cdat_modules/ocgis_external.cmake
 delete mode 100644 CMake/cdat_modules/ocgis_pkg.cmake
 delete mode 100644 CMake/cdat_modules/openssl_deps.cmake
 delete mode 100644 CMake/cdat_modules/openssl_external.cmake
 delete mode 100644 CMake/cdat_modules/openssl_pkg.cmake
 delete mode 100644 CMake/cdat_modules/osmesa_deps.cmake
 delete mode 100644 CMake/cdat_modules/osmesa_external.cmake
 delete mode 100644 CMake/cdat_modules/osmesa_pkg.cmake
 delete mode 100644 CMake/cdat_modules/paraview_deps.cmake
 delete mode 100644 CMake/cdat_modules/paraview_external.cmake
 delete mode 100644 CMake/cdat_modules/paraview_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pbmplus_external.cmake
 delete mode 100644 CMake/cdat_modules/pep8_deps.cmake
 delete mode 100644 CMake/cdat_modules/pep8_external.cmake
 delete mode 100644 CMake/cdat_modules/pep8_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pip_deps.cmake
 delete mode 100644 CMake/cdat_modules/pip_external.cmake
 delete mode 100644 CMake/cdat_modules/pip_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pixman_deps.cmake
 delete mode 100644 CMake/cdat_modules/pixman_external.cmake
 delete mode 100644 CMake/cdat_modules/pixman_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pkgconfig_deps.cmake
 delete mode 100644 CMake/cdat_modules/pkgconfig_external.cmake
 delete mode 100644 CMake/cdat_modules/pkgconfig_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pmw_deps.cmake
 delete mode 100644 CMake/cdat_modules/pmw_external.cmake
 delete mode 100644 CMake/cdat_modules/pmw_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pnetcdf_deps.cmake
 delete mode 100644 CMake/cdat_modules/pnetcdf_external.cmake
 delete mode 100644 CMake/cdat_modules/pnetcdf_pkg.cmake
 delete mode 100644 CMake/cdat_modules/png_deps.cmake
 delete mode 100644 CMake/cdat_modules/png_external.cmake
 delete mode 100644 CMake/cdat_modules/png_pkg.cmake
 delete mode 100644 CMake/cdat_modules/proj4_deps.cmake
 delete mode 100644 CMake/cdat_modules/proj4_external.cmake
 delete mode 100644 CMake/cdat_modules/proj4_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyasn1_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyasn1_external.cmake
 delete mode 100644 CMake/cdat_modules/pyasn1_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyclimate_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyclimate_external.cmake
 delete mode 100644 CMake/cdat_modules/pyclimate_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pycparser_deps.cmake
 delete mode 100644 CMake/cdat_modules/pycparser_external.cmake
 delete mode 100644 CMake/cdat_modules/pycparser_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyflakes_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyflakes_external.cmake
 delete mode 100644 CMake/cdat_modules/pyflakes_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pygments_deps.cmake
 delete mode 100644 CMake/cdat_modules/pygments_external.cmake
 delete mode 100644 CMake/cdat_modules/pygments_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pylibxml2_deps.cmake
 delete mode 100644 CMake/cdat_modules/pylibxml2_external.cmake
 delete mode 100644 CMake/cdat_modules/pylibxml2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pynetcdf4_deps.cmake
 delete mode 100644 CMake/cdat_modules/pynetcdf4_external.cmake
 delete mode 100644 CMake/cdat_modules/pynetcdf4_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyopengl_external.cmake
 delete mode 100644 CMake/cdat_modules/pyopenssl_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyopenssl_external.cmake
 delete mode 100644 CMake/cdat_modules/pyopenssl_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyparsing_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyparsing_external.cmake
 delete mode 100644 CMake/cdat_modules/pyparsing_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyqt_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyqt_external.cmake
 delete mode 100644 CMake/cdat_modules/pyqt_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyspharm_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyspharm_external.cmake
 delete mode 100644 CMake/cdat_modules/pyspharm_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pytables_deps.cmake
 delete mode 100644 CMake/cdat_modules/pytables_external.cmake
 delete mode 100644 CMake/cdat_modules/pytables_pkg.cmake
 delete mode 100644 CMake/cdat_modules/python_deps.cmake
 delete mode 100644 CMake/cdat_modules/python_external.cmake
 delete mode 100644 CMake/cdat_modules/python_pkg.cmake
 delete mode 100644 CMake/cdat_modules/pyzmq_deps.cmake
 delete mode 100644 CMake/cdat_modules/pyzmq_external.cmake
 delete mode 100644 CMake/cdat_modules/pyzmq_pkg.cmake
 delete mode 100644 CMake/cdat_modules/qt4_deps.cmake
 delete mode 100644 CMake/cdat_modules/qt4_pkg.cmake
 delete mode 100644 CMake/cdat_modules/qt_external.cmake
 delete mode 100644 CMake/cdat_modules/r_deps.cmake
 delete mode 100644 CMake/cdat_modules/r_external.cmake
 delete mode 100644 CMake/cdat_modules/r_pkg.cmake
 delete mode 100644 CMake/cdat_modules/readline_deps.cmake
 delete mode 100644 CMake/cdat_modules/readline_external.cmake
 delete mode 100644 CMake/cdat_modules/readline_pkg.cmake
 delete mode 100644 CMake/cdat_modules/rpy2_deps.cmake
 delete mode 100644 CMake/cdat_modules/rpy2_external.cmake
 delete mode 100644 CMake/cdat_modules/rpy2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/sampledata_deps.cmake
 delete mode 100644 CMake/cdat_modules/sampledata_external.cmake
 delete mode 100644 CMake/cdat_modules/sampledata_pkg.cmake
 delete mode 100644 CMake/cdat_modules/scientificpython_deps.cmake
 delete mode 100644 CMake/cdat_modules/scientificpython_external.cmake
 delete mode 100644 CMake/cdat_modules/scientificpython_pkg.cmake
 delete mode 100644 CMake/cdat_modules/scikits_deps.cmake
 delete mode 100644 CMake/cdat_modules/scikits_external.cmake
 delete mode 100644 CMake/cdat_modules/scikits_pkg.cmake
 delete mode 100644 CMake/cdat_modules/scipy_deps.cmake
 delete mode 100644 CMake/cdat_modules/scipy_external.cmake
 delete mode 100644 CMake/cdat_modules/scipy_pkg.cmake
 delete mode 100644 CMake/cdat_modules/seawater_deps.cmake
 delete mode 100644 CMake/cdat_modules/seawater_external.cmake
 delete mode 100644 CMake/cdat_modules/seawater_pkg.cmake
 delete mode 100644 CMake/cdat_modules/setuptools_deps.cmake
 delete mode 100644 CMake/cdat_modules/setuptools_external.cmake
 delete mode 100644 CMake/cdat_modules/setuptools_pkg.cmake
 delete mode 100644 CMake/cdat_modules/shapely_deps.cmake
 delete mode 100644 CMake/cdat_modules/shapely_external.cmake
 delete mode 100644 CMake/cdat_modules/shapely_pkg.cmake
 delete mode 100644 CMake/cdat_modules/singledispatch_deps.cmake
 delete mode 100644 CMake/cdat_modules/singledispatch_external.cmake
 delete mode 100644 CMake/cdat_modules/singledispatch_pkg.cmake
 delete mode 100644 CMake/cdat_modules/sip_deps.cmake
 delete mode 100644 CMake/cdat_modules/sip_external.cmake
 delete mode 100644 CMake/cdat_modules/sip_pkg.cmake
 delete mode 100644 CMake/cdat_modules/six_deps.cmake
 delete mode 100644 CMake/cdat_modules/six_external.cmake
 delete mode 100644 CMake/cdat_modules/six_pkg.cmake
 delete mode 100644 CMake/cdat_modules/sphinx_deps.cmake
 delete mode 100644 CMake/cdat_modules/sphinx_external.cmake
 delete mode 100644 CMake/cdat_modules/sphinx_pkg.cmake
 delete mode 100644 CMake/cdat_modules/spyder_deps.cmake
 delete mode 100644 CMake/cdat_modules/spyder_external.cmake
 delete mode 100644 CMake/cdat_modules/spyder_pkg.cmake
 delete mode 100644 CMake/cdat_modules/tcltk_deps.cmake
 delete mode 100644 CMake/cdat_modules/tcltk_external.cmake
 delete mode 100644 CMake/cdat_modules/tcltk_pkg.cmake
 delete mode 100644 CMake/cdat_modules/termcap_deps.cmake
 delete mode 100644 CMake/cdat_modules/termcap_external.cmake
 delete mode 100644 CMake/cdat_modules/termcap_pkg.cmake
 delete mode 100644 CMake/cdat_modules/tiff_deps.cmake
 delete mode 100644 CMake/cdat_modules/tiff_external.cmake
 delete mode 100644 CMake/cdat_modules/tiff_pkg.cmake
 delete mode 100644 CMake/cdat_modules/tornado_deps.cmake
 delete mode 100644 CMake/cdat_modules/tornado_external.cmake
 delete mode 100644 CMake/cdat_modules/tornado_pkg.cmake
 delete mode 100644 CMake/cdat_modules/udunits2_deps.cmake
 delete mode 100644 CMake/cdat_modules/udunits2_external.cmake
 delete mode 100644 CMake/cdat_modules/udunits2_pkg.cmake
 delete mode 100644 CMake/cdat_modules/uuid_deps.cmake
 delete mode 100644 CMake/cdat_modules/uuid_external.cmake
 delete mode 100644 CMake/cdat_modules/uuid_pkg.cmake
 delete mode 100644 CMake/cdat_modules/uvcmetrics_deps.cmake
 delete mode 100644 CMake/cdat_modules/uvcmetrics_external.cmake
 delete mode 100644 CMake/cdat_modules/uvcmetrics_pkg.cmake
 delete mode 100644 CMake/cdat_modules/vacumm_deps.cmake
 delete mode 100644 CMake/cdat_modules/vacumm_external.cmake
 delete mode 100644 CMake/cdat_modules/vacumm_pkg.cmake
 delete mode 100644 CMake/cdat_modules/visit_deps.cmake
 delete mode 100644 CMake/cdat_modules/visit_external.cmake
 delete mode 100644 CMake/cdat_modules/visit_pkg.cmake
 delete mode 100644 CMake/cdat_modules/vistrails_deps.cmake
 delete mode 100644 CMake/cdat_modules/vistrails_external.cmake
 delete mode 100644 CMake/cdat_modules/vistrails_pkg.cmake
 delete mode 100644 CMake/cdat_modules/vtk_deps.cmake
 delete mode 100644 CMake/cdat_modules/vtk_external.cmake
 delete mode 100644 CMake/cdat_modules/vtk_pkg.cmake
 delete mode 100644 CMake/cdat_modules/wget_deps.cmake
 delete mode 100644 CMake/cdat_modules/wget_external.cmake
 delete mode 100644 CMake/cdat_modules/wget_pkg.cmake
 delete mode 100644 CMake/cdat_modules/windfield_deps.cmake
 delete mode 100644 CMake/cdat_modules/windfield_external.cmake
 delete mode 100644 CMake/cdat_modules/windfield_pkg.cmake
 delete mode 100644 CMake/cdat_modules/windspharm_deps.cmake
 delete mode 100644 CMake/cdat_modules/windspharm_external.cmake
 delete mode 100644 CMake/cdat_modules/windspharm_pkg.cmake
 delete mode 100644 CMake/cdat_modules/x264_deps.cmake
 delete mode 100644 CMake/cdat_modules/x264_external.cmake
 delete mode 100644 CMake/cdat_modules/x264_pkg.cmake
 delete mode 100644 CMake/cdat_modules/xgks_external.cmake
 delete mode 100644 CMake/cdat_modules/yasm_deps.cmake
 delete mode 100644 CMake/cdat_modules/yasm_external.cmake
 delete mode 100644 CMake/cdat_modules/yasm_pkg.cmake
 delete mode 100644 CMake/cdat_modules/zlib_deps.cmake
 delete mode 100644 CMake/cdat_modules/zlib_external.cmake
 delete mode 100644 CMake/cdat_modules/zlib_pkg.cmake
 delete mode 100644 CMake/cdat_modules/zmq_deps.cmake
 delete mode 100644 CMake/cdat_modules/zmq_external.cmake
 delete mode 100644 CMake/cdat_modules/zmq_pkg.cmake
 delete mode 100644 CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/ESMF_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/ESMF_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/ESMP_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/ESMP_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/NUMPY_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/NUMPY_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/PYLIBXML2_setup.py.in
 delete mode 100644 CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/SCIPY_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/SCIPY_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/basemap_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/basemap_make_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/cdat.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_common_environment.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_python_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdat_python_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/checked_get.sh.in
 delete mode 100644 CMake/cdat_modules_extra/checkout_testdata.cmake
 delete mode 100644 CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/configobj_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/curses_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/ezget_Makefile.gfortran.in
 delete mode 100644 CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake
 delete mode 100644 CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/git_clone.sh.in
 delete mode 100755 CMake/cdat_modules_extra/git_update.sh.in
 delete mode 100644 CMake/cdat_modules_extra/gsw_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/h5py_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/hdf5_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/install.py.in
 delete mode 100644 CMake/cdat_modules_extra/jasper_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/jpeg_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/lats_Makefile.gfortran.in
 delete mode 100644 CMake/cdat_modules_extra/libcf_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/libcf_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in
 delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in
 delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in
 delete mode 100644 CMake/cdat_modules_extra/lxml_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/lxml_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/matplotlib_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/matplotlib_setup_cfg.in
 delete mode 100644 CMake/cdat_modules_extra/mpi4py_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/mpi4py_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/netcdf_patch_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/paraview_download.sh.in
 delete mode 100644 CMake/cdat_modules_extra/paraview_install_python_module.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pmw_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pmw_make_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/predownload.py.in
 delete mode 100755 CMake/cdat_modules_extra/preofflinebuild.sh.in
 delete mode 100644 CMake/cdat_modules_extra/pyopengl_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pyopengl_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/python_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/python_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/python_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/python_patch_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/python_setup.py.in
 delete mode 100644 CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/pyzmq_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/reset_runtime.csh.in
 delete mode 100644 CMake/cdat_modules_extra/reset_runtime.sh.in
 delete mode 100755 CMake/cdat_modules_extra/runpytest.in
 delete mode 100644 CMake/cdat_modules_extra/seawater_build_step.cmake.in
 delete mode 100755 CMake/cdat_modules_extra/setup_runtime.csh.in
 delete mode 100755 CMake/cdat_modules_extra/setup_runtime.sh.in
 delete mode 100644 CMake/cdat_modules_extra/setuptools_install_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/setuptools_make_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/site.cfg.in
 delete mode 100755 CMake/cdat_modules_extra/udunits2_apple_configure.in
 delete mode 100755 CMake/cdat_modules_extra/uvcdat.in
 delete mode 100755 CMake/cdat_modules_extra/uvcdat.mac.in
 delete mode 100644 CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt
 delete mode 100644 CMake/cdat_modules_extra/vacumm_build_step.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/vtk_install_python_module.cmake.in
 delete mode 100644 CMake/cdat_modules_extra/xgks_configure_step.cmake.in
 delete mode 100644 CMake/curses_gcc5.patch
 delete mode 100644 CMake/dummy.f90
 delete mode 100644 CMake/fixName.py
 delete mode 100644 CMake/fix_install_name.py.in
 delete mode 100644 CMake/fixlink.py
 delete mode 100644 CMake/install.py
 delete mode 100644 CMake/netcdf_clang.patch
 delete mode 100644 CMake/pyspharm_setup.patch
 delete mode 100644 CMake/python_patch_step.cmake.in
 delete mode 100644 CMake/sqlite3_int64_v2.patch
 delete mode 100644 CMake/test_python_ok.py
 delete mode 100644 CMake/travis_build.cmake
 delete mode 100644 CMake/travis_submit.cmake
 delete mode 100644 CMake/uvcdat.plist

diff --git a/CMake/ESMP.patch b/CMake/ESMP.patch
deleted file mode 100644
index 9d1eb2c07..000000000
--- a/CMake/ESMP.patch
+++ /dev/null
@@ -1,33 +0,0 @@
---- a/ESMP_LoadESMF.py	2014-01-14 10:00:22.000000000 -0500
-+++ b/ESMP_LoadESMF.py	2014-01-14 10:40:57.000000000 -0500
-@@ -64,6 +64,14 @@
- #      esmfmk = c[2]
- 
-   try:
-+
-+    # If we are not dealing with an absolute path treat it a relative to the
-+    # current Python module.
-+    if not os.path.isabs(esmfmk):
-+      # Get the directory for this module
-+      rel_dir = os.path.dirname(os.path.realpath(__file__))
-+      esmfmk = os.path.abspath(os.path.join(rel_dir, esmfmk))
-+
-     MKFILE = open(esmfmk, 'r')
-   except:
-     raise IOError("File not found\n  %s") % esmfmk
-@@ -72,11 +80,12 @@
-   libsdir = 0
-   esmfos = 0
-   esmfabi = 0
-+
-+  libsdir = os.path.dirname(esmfmk)
-+
- #  MKFILE = open(esmfmk,'r')
-   for line in MKFILE:
--    if 'ESMF_LIBSDIR' in line:
--      libsdir = line.split("=")[1]
--    elif 'ESMF_OS:' in line:
-+    if 'ESMF_OS:' in line:
-       esmfos = line.split(":")[1]
-     elif 'ESMF_ABI:' in line:
-       esmfabi = line.split(":")[1]
diff --git a/CMake/cdat_modules/basemap_deps.cmake b/CMake/cdat_modules/basemap_deps.cmake
deleted file mode 100644
index 98520d1d0..000000000
--- a/CMake/cdat_modules/basemap_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(basemap_deps ${matplotlib_pkg} ${geos_pkg})
diff --git a/CMake/cdat_modules/basemap_external.cmake b/CMake/cdat_modules/basemap_external.cmake
deleted file mode 100644
index 53b3a59a1..000000000
--- a/CMake/cdat_modules/basemap_external.cmake
+++ /dev/null
@@ -1,39 +0,0 @@
-# The basemap external project 
-
-set(basemap_binary "${CMAKE_CURRENT_BINARY_DIR}/build/basemap")
-
-#configure_file(
-#  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_configure_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake @ONLY)
-# to build we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake @ONLY)
-
-#set(basemap_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-#    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake)
-set(basemap_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake)
-set(basemap_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake)
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(basemap
-  URL ${basemap_URL}/${basemap_GZ}
-  URL_MD5 ${basemap_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${basemap_binary}
-  BINARY_DIR ${basemap_binary}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${basemap_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${basemap_INSTALL_COMMAND}
-  DEPENDS
-    ${basemap_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/basemap_pkg.cmake b/CMake/cdat_modules/basemap_pkg.cmake
deleted file mode 100644
index bfcaa6c07..000000000
--- a/CMake/cdat_modules/basemap_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set( basemap_MAJOR 1  )
-set( basemap_MINOR 0  )
-set( basemap_PATCH 5  )
-set(basemap_URL ${LLNL_URL})
-set(basemap_GZ basemap-${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}.tar.gz)
-set(basemap_MD5 089260ea2b3eebb9d63e1783d0b15298 )
-set(BASEMAP_VERSION ${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH})
-set(BASEMAP_SOURCE ${basemap_URL}/${basemap_GZ})
-
-add_cdat_package_dependent(basemap "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/cairo_deps.cmake b/CMake/cdat_modules/cairo_deps.cmake
deleted file mode 100644
index 78b7fe0b3..000000000
--- a/CMake/cdat_modules/cairo_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Cairo_deps ${pkgconfig_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libxml2_pkg})
diff --git a/CMake/cdat_modules/cairo_external.cmake b/CMake/cdat_modules/cairo_external.cmake
deleted file mode 100644
index 1826425c1..000000000
--- a/CMake/cdat_modules/cairo_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(Cairo_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cairo")
-set(Cairo_install "${cdat_EXTERNALS}")
-set(Cairo_conf_args --disable-static^^--enable-quartz=no^^--enable-win32=no^^--enable-skia=no^^--enable-os2=no^^--enable-beos=no^^--enable-drm=no^^--enable-gallium=no^^--enable-cogl=no)
-
-ExternalProject_Add(Cairo
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cairo_source}
-  INSTALL_DIR ${Cairo_install}
-  URL ${CAIRO_URL}/${CAIRO_GZ}
-  URL_MD5 ${CAIRO_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DSKIP_LDFLAGS=YES -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${Cairo_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${Cairo_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cairo_pkg.cmake b/CMake/cdat_modules/cairo_pkg.cmake
deleted file mode 100644
index be1bcce3c..000000000
--- a/CMake/cdat_modules/cairo_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CAIRO_MAJOR 1)
-set(CAIRO_MINOR 10)
-set(CAIRO_PATCH 2)
-set(CAIRO_MAJOR_SRC 1)
-set(CAIRO_MINOR_SRC 12)
-set(CAIRO_PATCH_SRC 14)
-set(CAIRO_URL ${LLNL_URL})
-set(CAIRO_GZ cairo-${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}.tar.gz)
-set(CAIRO_MD5 4a55de6dbbd2d22eee9eea78e6bdbbfd )
-set(CAIRO_SOURCE ${CAIRO_URL}/${CAIRO_GZ})
-set(CAIRO_VERSION ${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC})
-
-add_cdat_package_dependent(Cairo "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
-
diff --git a/CMake/cdat_modules/cd77_deps.cmake b/CMake/cdat_modules/cd77_deps.cmake
deleted file mode 100644
index e18cdbd1d..000000000
--- a/CMake/cdat_modules/cd77_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(cd77_deps ${python_pkg} ${ezget_pkg} ${libcdms_pkg} ${setuptools_pkg} )
diff --git a/CMake/cdat_modules/cd77_external.cmake b/CMake/cdat_modules/cd77_external.cmake
deleted file mode 100644
index 00e3b0833..000000000
--- a/CMake/cdat_modules/cd77_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# create an external project to install cd77
-# and configure and build it
-
-ExternalProject_Add(cd77
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/cd77
-  ${GIT_CMD_STR_cd77}
-  ${GIT_TAG_cd77}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} PYTHONPATH=${PYTHONPATH} ${USR_ENVS} ${PYTHON_EXECUTABLE} setup.py install ${USER_INSTALL_OPTIONS} ${PRFX}
-  DEPENDS ${${nm}_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/cd77_pkg.cmake b/CMake/cdat_modules/cd77_pkg.cmake
deleted file mode 100644
index 3dc195aec..000000000
--- a/CMake/cdat_modules/cd77_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(cd77_VERSION 1.0.0)
-set(cd77_BRANCH master)
-set(cd77_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/cd77.git )
-
-
-set(GIT_CMD_STR_cd77 GIT_REPOSITORY ${cd77_REPOSITORY})
-set(GIT_TAG_cd77 GIT_TAG "${cd77_BRANCH}")
-set (nm cd77)
-string(TOUPPER ${nm} uc_nm)
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(cd77 "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/cdatlogger_deps.cmake b/CMake/cdat_modules/cdatlogger_deps.cmake
deleted file mode 100644
index c2cfeeb0b..000000000
--- a/CMake/cdat_modules/cdatlogger_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set (CDATLogger_deps ${cdat_pkg})
-
diff --git a/CMake/cdat_modules/cdatlogger_external.cmake b/CMake/cdat_modules/cdatlogger_external.cmake
deleted file mode 100644
index 69cb09c1a..000000000
--- a/CMake/cdat_modules/cdatlogger_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cmake_modules/CDATLogger.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake
-  @ONLY)
-
-ExternalProject_Add(CDATLogger
-  DOWNLOAD_DIR ""
-  SOURCE_DIR ${cdat_SOURCE_DIR}
-  BINARY_DIR ${cdat_build_dir}
-  BUILD_IN_SOURCE 0
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake
-  DEPENDS ${CDATLogger_deps}
-)
diff --git a/CMake/cdat_modules/cdatlogger_pkg.cmake b/CMake/cdat_modules/cdatlogger_pkg.cmake
deleted file mode 100644
index f9e19bd05..000000000
--- a/CMake/cdat_modules/cdatlogger_pkg.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(CDATLOGGER_VERSION N/A)
-set(CDATLOGGER_SOURCE N/A)
-
-add_cdat_package(CDATLogger "" "" OFF)
diff --git a/CMake/cdat_modules/cffi_deps.cmake b/CMake/cdat_modules/cffi_deps.cmake
deleted file mode 100644
index 3e0620510..000000000
--- a/CMake/cdat_modules/cffi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CFFI_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/cffi_external.cmake b/CMake/cdat_modules/cffi_external.cmake
deleted file mode 100644
index 1fdb495c4..000000000
--- a/CMake/cdat_modules/cffi_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CFFI)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cffi_pkg.cmake b/CMake/cdat_modules/cffi_pkg.cmake
deleted file mode 100644
index 889da6bb7..000000000
--- a/CMake/cdat_modules/cffi_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CFFI_MAJOR_SRC 1)
-set(CFFI_MINOR_SRC 5)
-set(CFFI_PATCH_SRC 2)
-
-set(CFFI_VERSION ${CFFI_MAJOR_SRC}.${CFFI_MINOR_SRC}.${CFFI_PATCH_SRC})
-set(CFFI_GZ cffi-${CFFI_VERSION}.tar.gz)
-set(CFFI_SOURCE ${LLNL_URL}/${CFFI_GZ})
-set(CFFI_MD5 fa766133f7299464c8bf857e0c966a82)
-
-add_cdat_package_dependent(CFFI "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/clapack_deps.cmake b/CMake/cdat_modules/clapack_deps.cmake
deleted file mode 100644
index e0f544de8..000000000
--- a/CMake/cdat_modules/clapack_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLAPACK_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/clapack_external.cmake b/CMake/cdat_modules/clapack_external.cmake
deleted file mode 100644
index bab6e0f94..000000000
--- a/CMake/cdat_modules/clapack_external.cmake
+++ /dev/null
@@ -1,39 +0,0 @@
-# The CLAPACK external project
-
-set(clapack_source "${CMAKE_CURRENT_BINARY_DIR}/LAPACK")
-set(clapack_binary "${CMAKE_CURRENT_BINARY_DIR}/LAPACK-build")
-set(clapack_install "${cdat_EXTERNALS}")
-set(NUMPY_LAPACK_binary ${clapack_binary})
-
-#
-# To fix compilation problem: relocation R_X86_64_32 against `a local symbol' can not be
-# used when making a shared object; recompile with -fPIC
-# See http://www.cmake.org/pipermail/cmake/2007-May/014350.html
-#
-if(UNIX AND CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64")
-  set(cdat_tpl_c_flags_LAPACK "-fPIC ${cdat_tpl_c_flags}")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/CLAPACK_install_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake
-    @ONLY)
-
-set(CLAPACK_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake)
-
-ExternalProject_Add(CLAPACK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${clapack_source}
-  BINARY_DIR ${clapack_binary}
-  INSTALL_DIR ${clapack_install}
-  URL ${CLAPACK_URL}/${CLAPACK_GZ}
-  URL_MD5 ${CLAPACK_MD5}
-  CMAKE_CACHE_ARGS
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-    ${CLAPACK_EXTRA_ARGS}
-  INSTALL_COMMAND ${CLAPACK_INSTALL_COMMAND}
-  DEPENDS ${CLAPACK_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/clapack_pkg.cmake b/CMake/cdat_modules/clapack_pkg.cmake
deleted file mode 100644
index 38f156a44..000000000
--- a/CMake/cdat_modules/clapack_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(CLAPACK_MAJOR 3)
-set(CLAPACK_MINOR 2)
-set(CLAPACK_PATCH 1)
-set(CLAPACK_VERSION ${CLAPACK_MAJOR}.${CLAPACK_MINOR}.${CLAPACK_PATCH})
-set(CLAPACK_URL http://www.netlib.org/clapack)
-set(CLAPACK_GZ clapack-${CLAPACK_VERSION}-CMAKE.tgz)
-set(CLAPACK_MD5 4fd18eb33f3ff8c5d65a7d43913d661b)
-set(CLAPACK_SOURCE ${CLAPACK_URL}/${CLAPACK_GZ})
-
-if(NOT APPLE)
-  if(NOT CMAKE_Fortran_COMPILER)
-    add_cdat_package(CLAPACK "" "" OFF)
-  endif()
-endif()
-
diff --git a/CMake/cdat_modules/click_deps.cmake b/CMake/cdat_modules/click_deps.cmake
deleted file mode 100644
index ee4e50d5d..000000000
--- a/CMake/cdat_modules/click_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLICK_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/click_external.cmake b/CMake/cdat_modules/click_external.cmake
deleted file mode 100644
index 0c89a3bf1..000000000
--- a/CMake/cdat_modules/click_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CLICK)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/click_pkg.cmake b/CMake/cdat_modules/click_pkg.cmake
deleted file mode 100644
index b0aef777c..000000000
--- a/CMake/cdat_modules/click_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(CLICK_MAJOR_SRC 4)
-set(CLICK_MINOR_SRC 1)
-
-set(CLICK_VERSION ${CLICK_MAJOR_SRC}.${CLICK_MINOR_SRC})
-set(CLICK_GZ click-${CLICK_VERSION}.tar.gz)
-set(CLICK_SOURCE ${LLNL_URL}/${CLICK_GZ})
-set(CLICK_MD5 6a3fa88c738f2f775ec6de126feb99a4)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CLICK "" "" ON)
-else()
-  add_cdat_package(CLICK "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/cligj_deps.cmake b/CMake/cdat_modules/cligj_deps.cmake
deleted file mode 100644
index ce62bdb2f..000000000
--- a/CMake/cdat_modules/cligj_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CLIGJ_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ${click_pkg} )
diff --git a/CMake/cdat_modules/cligj_external.cmake b/CMake/cdat_modules/cligj_external.cmake
deleted file mode 100644
index 805117570..000000000
--- a/CMake/cdat_modules/cligj_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CLIGJ)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cligj_pkg.cmake b/CMake/cdat_modules/cligj_pkg.cmake
deleted file mode 100644
index 06adad506..000000000
--- a/CMake/cdat_modules/cligj_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CLIGJ_MAJOR_SRC 0)
-set(CLIGJ_MINOR_SRC 3)
-set(CLIGJ_PATCH_SRC 0)
-
-set(CLIGJ_VERSION ${CLIGJ_MAJOR_SRC}.${CLIGJ_MINOR_SRC}.${CLIGJ_PATCH_SRC})
-set(CLIGJ_GZ cligj-${CLIGJ_VERSION}.tar.gz)
-set(CLIGJ_SOURCE ${LLNL_URL}/${CLIGJ_GZ})
-set(CLIGJ_MD5 cd135f171b4ef2c07ebd34731ccf09a5)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CLIGJ "" "" ON)
-else()
-  add_cdat_package(CLIGJ "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/cmcurl_external.cmake b/CMake/cdat_modules/cmcurl_external.cmake
deleted file mode 100644
index 8a6033f35..000000000
--- a/CMake/cdat_modules/cmcurl_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-# The cmCurl external project for Titan
-
-set(curl_source "${CMAKE_CURRENT_SOURCE_DIR}/cmcurl")
-set(curl_binary "${CMAKE_CURRENT_BINARY_DIR}/cmcurl")
-
-ExternalProject_Add(cmcurl
-  DOWNLOAD_COMMAND ""
-  SOURCE_DIR "${curl_source}"
-  BINARY_DIR "${curl_binary}"
-  CMAKE_GENERATOR ${gen}
-  CMAKE_ARGS
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=OFF
-    -DBUILD_CURL_TESTS:BOOL=OFF
-    -DBUILD_CURL_EXE:BOOL=OFF
-    -DCURL_DISABLE_LDAP:BOOL=ON
-    -DCURL_DISABLE_LDAPS:BOOL=ON
-    ${titan_compiler_args}
-    ${titan_binary_args}
-    ${cmcurl_EXTRA_ARGS}
-    -DTRIGGER_REBUILD:STRING=0
-  INSTALL_COMMAND ""
-  DEPENDS ${cmcurl_deps}
-)
diff --git a/CMake/cdat_modules/cmor_deps.cmake b/CMake/cdat_modules/cmor_deps.cmake
deleted file mode 100644
index 719a3c001..000000000
--- a/CMake/cdat_modules/cmor_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CMOR_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${netcdf_pkg} ${zlib_pkg} ${uuid_pkg} ${udunits2_pkg} ${cdat_pkg})
diff --git a/CMake/cdat_modules/cmor_external.cmake b/CMake/cdat_modules/cmor_external.cmake
deleted file mode 100644
index 5cf405322..000000000
--- a/CMake/cdat_modules/cmor_external.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(cmor_source_dir  "${CMAKE_CURRENT_BINARY_DIR}/build/cmor")
-set(cmor_binary_dir  "${CMAKE_CURRENT_BINARY_DIR}/build/cmor")
-set(cmor_install_dir "${cdat_EXTERNALS}")
-
-set(cmor_configure_args --with-netcdf=${netcdf_install} --with-udunits2=${udunits_install} --with-uuid=${uuid_install} --enable-fortran=yes --with-python=${CMAKE_INSTALL_PREFIX} --prefix=${CMAKE_INSTALL_PREFIX})
-
-# it appears currently we only configure cmor but not build it.
-ExternalProject_Add(CMOR
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${cmor_source_dir}
-  BUILD_IN_SOURCE 1
-  ${GIT_CMD_STR_CMOR}
-  ${GIT_TAG}
-  INSTALL_DIR ${cmor_install_dir}
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND sh ${cmor_source_dir}/configure ${cmor_configure_args}
-  DEPENDS ${CMOR_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_CMOR)
-  unset(GIT_CMD_STR_CMOR)
-endif()
diff --git a/CMake/cdat_modules/cmor_pkg.cmake b/CMake/cdat_modules/cmor_pkg.cmake
deleted file mode 100644
index e3b785ac3..000000000
--- a/CMake/cdat_modules/cmor_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(CMOR_VERSION 2.9.2)
-set(CMOR_BRANCH uvcdat-2.4.0)
-set(CMOR_REPOSITORY ${GIT_PROTOCOL}github.com/PCMDI/cmor.git )
-
-set(GIT_CMD_STR_CMOR GIT_REPOSITORY ${CMOR_REPOSITORY})
-set(GIT_TAG GIT_TAG "${CMOR_BRANCH}")
-set (nm CMOR)
-string(TOUPPER ${nm} uc_nm)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(CMOR "" "" ON)
-else()
-  add_cdat_package_dependent(CMOR "" "" ON "CDAT_BUILD_CMOR" OFF)
-endif()
diff --git a/CMake/cdat_modules/configobj_deps.cmake b/CMake/cdat_modules/configobj_deps.cmake
deleted file mode 100644
index 1835fd3b4..000000000
--- a/CMake/cdat_modules/configobj_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(configobj_deps ${python_pkg} ${six_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/configobj_external.cmake b/CMake/cdat_modules/configobj_external.cmake
deleted file mode 100644
index ce77c8f03..000000000
--- a/CMake/cdat_modules/configobj_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# configobj
-#
-set(configobj_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/configobj")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/configobj_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake"
-  @ONLY
-)
-
-set(configobj_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake)
-
-ExternalProject_Add(configobj
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${configobj_source_dir}
-  URL ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ}
-  URL_MD5 ${CONFIGOBJ_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${configobj_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${configobj_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/configobj_pkg.cmake b/CMake/cdat_modules/configobj_pkg.cmake
deleted file mode 100644
index c236b8694..000000000
--- a/CMake/cdat_modules/configobj_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set( CONFIGOBJ_MAJOR 5 )
-set( CONFIGOBJ_MINOR 0 )
-set( CONFIGOBJ_PATCH 6 )
-set( CONFIGOBJ_VERSION ${CONFIGOBJ_MAJOR}.${CONFIGOBJ_MINOR}.${CONFIGOBJ_PATCH} )
-set( CONFIGOBJ_URL ${LLNL_URL} )
-set( CONFIGOBJ_GZ configobj-${CONFIGOBJ_VERSION}.tar.gz )
-set( CONFIGOBJ_MD5 e472a3a1c2a67bb0ec9b5d54c13a47d6 )
-
-set (nm CONFIGOBJ)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(CONFIGOBJ_SOURCE ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(configobj "" "" ON)
-else()
-  add_cdat_package(configobj "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/coverage_deps.cmake b/CMake/cdat_modules/coverage_deps.cmake
deleted file mode 100644
index d2744141d..000000000
--- a/CMake/cdat_modules/coverage_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(coverage_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/coverage_external.cmake b/CMake/cdat_modules/coverage_external.cmake
deleted file mode 100644
index e09cfb7fb..000000000
--- a/CMake/cdat_modules/coverage_external.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-# External coverage.py package
-set(nm COVERAGE)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/coverage_pkg.cmake b/CMake/cdat_modules/coverage_pkg.cmake
deleted file mode 100644
index 7e32eaa61..000000000
--- a/CMake/cdat_modules/coverage_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(COVERAGE_MAJOR_SRC 4)
-set(COVERAGE_MINOR_SRC 0)
-set(COVERAGE_PATCH_SRC 3)
-
-set(COVERAGE_VERSION ${COVERAGE_MAJOR_SRC}.${COVERAGE_MINOR_SRC}.${COVERAGE_PATCH_SRC})
-set(COVERAGE_GZ coverage-${COVERAGE_VERSION}.tar.gz)
-set(COVERAGE_SOURCE ${LLNL_URL}/${COVERAGE_GZ})
-set(COVERAGE_MD5 c7d3db1882484022c81bf619be7b6365)
-
-add_cdat_package_dependent(COVERAGE "" "" ON "CDAT_MEASURE_COVERAGE" OFF)
diff --git a/CMake/cdat_modules/cryptography_deps.cmake b/CMake/cdat_modules/cryptography_deps.cmake
deleted file mode 100644
index cad6e0ddd..000000000
--- a/CMake/cdat_modules/cryptography_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CRYPTOGRAPHY_deps ${cffi_pkg} ${enum34_pkg} ${idna_pkg} ${ipaddress_pkg} ${openssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/cryptography_external.cmake b/CMake/cdat_modules/cryptography_external.cmake
deleted file mode 100644
index 9e10cb4bb..000000000
--- a/CMake/cdat_modules/cryptography_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm CRYPTOGRAPHY)
-
-# Set LDFlags and CFlags to make it easier to find OpenSSL
-list(APPEND USR_ENVS
-  "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}"
-  "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/cryptography_pkg.cmake b/CMake/cdat_modules/cryptography_pkg.cmake
deleted file mode 100644
index 0b5671da1..000000000
--- a/CMake/cdat_modules/cryptography_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CRYPTOGRAPHY_MAJOR_SRC 1)
-set(CRYPTOGRAPHY_MINOR_SRC 3)
-set(CRYPTOGRAPHY_PATCH_SRC 2)
-
-set(CRYPTOGRAPHY_VERSION ${CRYPTOGRAPHY_MAJOR_SRC}.${CRYPTOGRAPHY_MINOR_SRC}.${CRYPTOGRAPHY_PATCH_SRC})
-set(CRYPTOGRAPHY_GZ cryptography-${CRYPTOGRAPHY_VERSION}.tar.gz)
-set(CRYPTOGRAPHY_SOURCE ${LLNL_URL}/${CRYPTOGRAPHY_GZ})
-set(CRYPTOGRAPHY_MD5 0359190f291824dc8ad9e6d477a607b2)
-
-add_cdat_package_dependent(CRYPTOGRAPHY "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/curl_deps.cmake b/CMake/cdat_modules/curl_deps.cmake
deleted file mode 100644
index 432b4d319..000000000
--- a/CMake/cdat_modules/curl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(CURL_deps ${pkgconfig_pkg} ${libXML2_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/curl_external.cmake b/CMake/cdat_modules/curl_external.cmake
deleted file mode 100644
index 6dd77c373..000000000
--- a/CMake/cdat_modules/curl_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(curl_source "${CMAKE_CURRENT_BINARY_DIR}/build/CURL")
-set(curl_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(CURL
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${curl_source}
-  INSTALL_DIR ${curl_install}
-  URL ${CURL_URL}/${CURL_GZ}
-  URL_MD5 ${CURL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${CURL_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/curl_pkg.cmake b/CMake/cdat_modules/curl_pkg.cmake
deleted file mode 100644
index 6946ad58d..000000000
--- a/CMake/cdat_modules/curl_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(CURL_MAJOR 7)
-set(CURL_MINOR 22)
-set(CURL_PATCH 0)
-set(CURL_MAJOR_SRC 7)
-set(CURL_MINOR_SRC 33)
-set(CURL_PATCH_SRC 0)
-set(CURL_URL ${LLNL_URL})
-set(CURL_GZ curl-${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}.tar.gz)
-set(CURL_MD5 c8a4eaac7ce7b0d1bf458d62ccd4ef93 )
-set(CURL_VERSION ${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC})
-set(CURL_SOURCE ${CURL_URL}/${CURL_GZ})
-
-add_cdat_package(CURL "" "" OFF)
diff --git a/CMake/cdat_modules/curses_deps.cmake b/CMake/cdat_modules/curses_deps.cmake
deleted file mode 100644
index 1926beb7c..000000000
--- a/CMake/cdat_modules/curses_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(curses_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/curses_external.cmake b/CMake/cdat_modules/curses_external.cmake
deleted file mode 100644
index 2fe0ea547..000000000
--- a/CMake/cdat_modules/curses_external.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(curses_source "${CMAKE_CURRENT_BINARY_DIR}/build/curses")
-set(curses_install "${cdat_EXTERNALS}")
-set(curses_conf_args)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/curses_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake
-  @ONLY)
-
-set(curses_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake)
-
-ExternalProject_Add(Curses
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${curses_source}
-  INSTALL_DIR ${curses_install}
-  URL ${CURSES_URL}/${CURSES_GZ}
-  URL_MD5 ${CURSES_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${curses_PATCH_COMMAND}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${curses_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${curses_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/curses_pkg.cmake b/CMake/cdat_modules/curses_pkg.cmake
deleted file mode 100644
index 29b6d5240..000000000
--- a/CMake/cdat_modules/curses_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set (package Curses)
-string(TOUPPER ${package} package_uc)
-
-set(${package_uc}_MAJOR_SRC 6)
-set(${package_uc}_MINOR_SRC 0)
-set(${package_uc}_PATCH_SRC 0)
-set(${package_uc}_URL ${LLNL_URL})
-#set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}.tar.gz)
-set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.tar.gz)
-set(${pacakge_uc}_MD5 931959c0e1a9949999407b025cf44d3d)
-#set(${package_uc}_MD5 8cb9c412e5f2d96bc6f459aa8c6282a1)
-set(${package_uc}_SOURCE ${${package_uc}_URL}/${${package_uc}_GZ})
-set(${package_uc}_MD5 ${${package_uc}_MD5})
-
-set(${package_uc}_VERSION ${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC})
-add_cdat_package(${package} "" "" OFF)
diff --git a/CMake/cdat_modules/cycler_deps.cmake b/CMake/cdat_modules/cycler_deps.cmake
deleted file mode 100644
index 6c4db4535..000000000
--- a/CMake/cdat_modules/cycler_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(cycler_deps ${python_pkg} ${setuptools_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/cycler_external.cmake b/CMake/cdat_modules/cycler_external.cmake
deleted file mode 100644
index 5cd06b6e8..000000000
--- a/CMake/cdat_modules/cycler_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Cycler
-
-set(Cycler_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cycler")
-
-ExternalProject_Add(Cycler
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cycler_source}
-  URL ${CYCLER_URL}/${CYCLER_GZ}
-  URL_MD5 ${CYCLER_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${cycler_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cycler_pkg.cmake b/CMake/cdat_modules/cycler_pkg.cmake
deleted file mode 100644
index b2310801a..000000000
--- a/CMake/cdat_modules/cycler_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CYCLER_MAJOR 0)
-set(CYCLER_MINOR 9)
-set(CYCLER_PATCH 0)
-set(CYCLER_VERSION ${CYCLER_MAJOR}.${CYCLER_MINOR}.${CYCLER_PATCH})
-set(CYCLER_URL ${LLNL_URL})
-set(CYCLER_GZ cycler-${CYCLER_VERSION}.tar.gz)
-set(CYCLER_MD5 c10ade5ca3f0aadf575eb25203b225a5)
-set(CYCLER_SOURCE ${CYCLER_URL}/${CYCLER_GZ})
-
-add_cdat_package_dependent(Cycler "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/cython_deps.cmake b/CMake/cdat_modules/cython_deps.cmake
deleted file mode 100644
index eab0a78be..000000000
--- a/CMake/cdat_modules/cython_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Cython_deps ${pkgconfig_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/cython_external.cmake b/CMake/cdat_modules/cython_external.cmake
deleted file mode 100644
index a059bdd30..000000000
--- a/CMake/cdat_modules/cython_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# Cython
-#
-# --old-and-unmangeable solution avoids the use of eggs
-# and  forces to create a directory.
-# this seems to fix issues of the type encountered in 
-# bug #1192 and #1486
-
-set(Cython_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cython")
-
-ExternalProject_Add(Cython
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Cython_source}
-  URL ${CYTHON_URL}/${CYTHON_GZ}
-  URL_MD5 ${CYTHON_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${Cython_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/cython_pkg.cmake b/CMake/cdat_modules/cython_pkg.cmake
deleted file mode 100644
index 3d1fe53d5..000000000
--- a/CMake/cdat_modules/cython_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CYTHON_MAJOR 0)
-set(CYTHON_MINOR 23)
-set(CYTHON_PATCH 4)
-set(CYTHON_VERSION ${CYTHON_MAJOR}.${CYTHON_MINOR}.${CYTHON_PATCH})
-set(CYTHON_URL ${LLNL_URL} )
-set(CYTHON_GZ Cython-${CYTHON_VERSION}.tar.gz)
-set(CYTHON_MD5 157df1f69bcec6b56fd97e0f2e057f6e)
-set(CYTHON_SOURCE ${CYTHON_URL}/${CYTHON_GZ})
-
-add_cdat_package_dependent(Cython "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/data_deps.cmake b/CMake/cdat_modules/data_deps.cmake
deleted file mode 100644
index 8b1378917..000000000
--- a/CMake/cdat_modules/data_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/CMake/cdat_modules/data_pkg.cmake b/CMake/cdat_modules/data_pkg.cmake
deleted file mode 100644
index d4be977a8..000000000
--- a/CMake/cdat_modules/data_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-# Do we download the data ?
-option(CDAT_DOWNLOAD_SAMPLE_DATA "Download sample data" ON)
-if (CDAT_BUILD_LEAN)
-    message("[INFO] Disabling download data for ESGF")
-    set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-
-if (OFFLINE_BUILD)
-    message("[INFO] Disabling download data for offline build")
-    set(CDAT_DOWNLOAD_SAMPLE_DATA OFF)
-endif()
-
-if (CDAT_DOWNLOAD_SAMPLE_DATA)
-  set(SAMPLE_DATA "")
-else()
-  set(SAMPLE_DATA --disable-sampledata)
-endif()
diff --git a/CMake/cdat_modules/dateutils_deps.cmake b/CMake/cdat_modules/dateutils_deps.cmake
deleted file mode 100644
index 08ee1bda7..000000000
--- a/CMake/cdat_modules/dateutils_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(DATEUTILS_deps ${python_pkg} ${pip_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/dateutils_external.cmake b/CMake/cdat_modules/dateutils_external.cmake
deleted file mode 100644
index e157432d1..000000000
--- a/CMake/cdat_modules/dateutils_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-# --old-and-unmangeable solution avoids the use of eggs
-# and  forces to create a directory.
-# this seems to fix issues of the type encountered in 
-# bug #1192 and #1486
-
-set(nm DATEUTILS)
-set(USER_INSTALL_OPTIONS --old-and-unmanageable)
-include(pipinstaller)
-unset(USER_INSTALL_OPTIONS)
diff --git a/CMake/cdat_modules/dateutils_pkg.cmake b/CMake/cdat_modules/dateutils_pkg.cmake
deleted file mode 100644
index 9b1fe3fd5..000000000
--- a/CMake/cdat_modules/dateutils_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(DATEUTILS_MAJOR_SRC 2)
-set(DATEUTILS_MINOR_SRC 2)
-set(DATEUTILS_PATCH_SRC -)
-
-set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}.${DATEUTILS_PATCH_SRC})
-set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC})
-set(DATEUTILS_GZ python-dateutil-${DATEUTILS_VERSION}.tar.gz)
-set(DATEUTILS_SOURCE ${LLNL_URL}/${DATEUTILS_GZ})
-set(DATEUTILS_MD5 c1f654d0ff7e33999380a8ba9783fd5c)
-
-add_cdat_package_dependent(DATEUTILS "" "" OFF "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/distribute_deps.cmake b/CMake/cdat_modules/distribute_deps.cmake
deleted file mode 100644
index d6313c1c9..000000000
--- a/CMake/cdat_modules/distribute_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(distribute_deps  ${setuptools_pkg})
diff --git a/CMake/cdat_modules/distribute_external.cmake b/CMake/cdat_modules/distribute_external.cmake
deleted file mode 100644
index c8f536fa0..000000000
--- a/CMake/cdat_modules/distribute_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if (NOT OFFLINE_BUILD) 
-    set(EGG_GZ "distribute==${DISTRIBUTE_VERSION}")
-else ()
-    set(EGG_GZ ${CDAT_PACKAGE_CACHE_DIR}/${DISTRIBUTE_GZ})
-endif()
-
-ExternalProject_Add(distribute
-  DOWNLOAD_COMMAND ""
-  WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${EGG_GZ}
-  DEPENDS ${distribute_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/distribute_pkg.cmake b/CMake/cdat_modules/distribute_pkg.cmake
deleted file mode 100644
index 2a0415f6b..000000000
--- a/CMake/cdat_modules/distribute_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(DISTRIBUTE_MAJOR_SRC 0)
-set(DISTRIBUTE_MINOR_SRC 6)
-set(DISTRIBUTE_PATCH_SRC 45)
-
-set (nm DISTRIBUTE)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(DISTRIBUTE_GZ distribute-${DISTRIBUTE_VERSION}.tar.gz)
-set(DISTRIBUTE_SOURCE ${LLNL_URL}/${DISTRIBUTE_GZ})
-set(DISTRIBUTE_MD5 8953f2c07e6700dabf2ec150129b8c31 )
-
-add_cdat_package(distribute "" "" OFF)
diff --git a/CMake/cdat_modules/docutils_deps.cmake b/CMake/cdat_modules/docutils_deps.cmake
deleted file mode 100644
index ef9fc3c52..000000000
--- a/CMake/cdat_modules/docutils_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(docutils_deps ${pip_pkg} ${jinja2_pkg})
diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake
deleted file mode 100644
index 36bdaedb1..000000000
--- a/CMake/cdat_modules/docutils_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install docutils,
-# and configure and build it
-
-set(nm docutils)
-set(OLD "OFF")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/docutils_pkg.cmake b/CMake/cdat_modules/docutils_pkg.cmake
deleted file mode 100644
index 1aaa2505d..000000000
--- a/CMake/cdat_modules/docutils_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(docutils_MAJOR_SRC 0)
-set(docutils_MINOR_SRC 10)
-set(docutils_PATCH_SRC )
-
-set (nm docutils)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC})
-set(DOCUTILS_GZ docutils-${DOCUTILS_VERSION}.tar.gz)
-set(DOCUTILS_SOURCE ${LLNL_URL}/${DOCUTILS_GZ})
-set(DOCUTILS_MD5 d8d4660c08302c791b2d71a155a2f4bc )
-
-add_cdat_package_dependent(docutils "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/enum34_deps.cmake b/CMake/cdat_modules/enum34_deps.cmake
deleted file mode 100644
index ed6c021a4..000000000
--- a/CMake/cdat_modules/enum34_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ENUM34_deps ${python_pkg})
diff --git a/CMake/cdat_modules/enum34_external.cmake b/CMake/cdat_modules/enum34_external.cmake
deleted file mode 100644
index 2edf14978..000000000
--- a/CMake/cdat_modules/enum34_external.cmake
+++ /dev/null
@@ -1,8 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm ENUM34)
-
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/enum34_pkg.cmake b/CMake/cdat_modules/enum34_pkg.cmake
deleted file mode 100644
index b4a57ec13..000000000
--- a/CMake/cdat_modules/enum34_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(ENUM34_MAJOR_SRC 1)
-set(ENUM34_MINOR_SRC 1)
-set(ENUM34_PATCH_SRC 2)
-
-set(ENUM34_VERSION ${ENUM34_MAJOR_SRC}.${ENUM34_MINOR_SRC}.${ENUM34_PATCH_SRC})
-set(ENUM34_GZ enum34-${ENUM34_VERSION}.tar.gz)
-set(ENUM34_SOURCE ${LLNL_URL}/${ENUM34_GZ})
-set(ENUM34_MD5 025bb71b3f9d2fad15d0ee53e48dc873)
-
-add_cdat_package_dependent(ENUM34 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/eof2_deps.cmake b/CMake/cdat_modules/eof2_deps.cmake
deleted file mode 100644
index fc79a9356..000000000
--- a/CMake/cdat_modules/eof2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(eof2_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/eof2_external.cmake b/CMake/cdat_modules/eof2_external.cmake
deleted file mode 100644
index d1d98ee83..000000000
--- a/CMake/cdat_modules/eof2_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# EOF2
-#
-set(eof2_source "${CMAKE_CURRENT_BINARY_DIR}/build/eof2")
-
-ExternalProject_Add(eof2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${eof2_source}
-  URL ${eof2_URL}/${eof2_GZ}
-  URL_MD5 ${eof2_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${eof2_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/eof2_pkg.cmake b/CMake/cdat_modules/eof2_pkg.cmake
deleted file mode 100644
index 89c374067..000000000
--- a/CMake/cdat_modules/eof2_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(eof2_MAJOR )
-set(eof2_MINOR )
-set(eof2_VERSION 620a921b46b)
-set(eof2_URL ${LLNL_URL} )
-set(eof2_GZ eof2-${eof2_VERSION}.zip)
-set(eof2_MD5 39e21a8633f272dc8dc748adb4c7f0e8)
-set(eof2_SOURCE ${eof2_URL}/${eof2_GZ})
-
-add_cdat_package_dependent(eof2 "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/eofs_deps.cmake b/CMake/cdat_modules/eofs_deps.cmake
deleted file mode 100644
index 2746e30df..000000000
--- a/CMake/cdat_modules/eofs_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(eofs_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/eofs_external.cmake b/CMake/cdat_modules/eofs_external.cmake
deleted file mode 100644
index 23c9f26fd..000000000
--- a/CMake/cdat_modules/eofs_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Windfield`
-#
-set(eofs_source "${CMAKE_CURRENT_BINARY_DIR}/build/eofs")
-
-ExternalProject_Add(eofs
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${eofs_source}
-  URL ${eofs_URL}/${eofs_GZ}
-  URL_MD5 ${eofs_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${eofs_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/eofs_pkg.cmake b/CMake/cdat_modules/eofs_pkg.cmake
deleted file mode 100644
index 7fbe79aeb..000000000
--- a/CMake/cdat_modules/eofs_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(eofs_MAJOR_SRC 1)
-set(eofs_MINOR_SRC 1)
-set(eofs_PATCH_SRC 0)
-set(eofs_VERSION ${eofs_MAJOR_SRC}.${eofs_MINOR_SRC}.${eofs_PATCH_SRC})
-set(eofs_URL ${LLNL_URL})
-set(eofs_GZ eofs-${eofs_VERSION}.tar.gz)
-set(eofs_MD5 52fce9f666d540069c90a6c109fcb3b4)
-set(eofs_SOURCE ${eofs_URL}/${eofs_GZ})
-
-add_cdat_package_dependent(eofs "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/esmf_deps.cmake b/CMake/cdat_modules/esmf_deps.cmake
deleted file mode 100644
index 631f2a9e4..000000000
--- a/CMake/cdat_modules/esmf_deps.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-set(ESMF_deps ${pkgconfig_pkg} ${python_pkg})
-
-if(CDAT_BUILD_ESMF_PARALLEL)
-  set(ESMF_deps ${mpi_pkg} ${ESMF_deps})
-endif()
diff --git a/CMake/cdat_modules/esmf_external.cmake b/CMake/cdat_modules/esmf_external.cmake
deleted file mode 100644
index eaf9518ad..000000000
--- a/CMake/cdat_modules/esmf_external.cmake
+++ /dev/null
@@ -1,78 +0,0 @@
-set(ESMF_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "")
-set(ESMF_source "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "")
-set(ESMF_install "${cdat_EXTERNALS}" CACHE INTERNAL "")
-set(ESMF_pthreads "OFF")
-set(ESMF_os "${CDAT_BUILD_ESMF_OS}")
-set(ESMF_compiler "${CDAT_BUILD_ESMF_COMPILER}")
-set(ESMF_abi "${CDAT_BUILD_ESMF_ABI}")
-set(ESMF_openmp "ON")
-
-if(APPLE)
-  if("${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" AND ${CMAKE_C_COMPILER_VERSION} VERSION_GREATER 4.2)
-    # xcode 5 clang does not support openmp
-    set(ESMF_openmp "OFF")
-  endif()
-endif()
-
-# Check if ESMF should be built in parallel
-set(emsf_enable_mpi FALSE)
-if(CDAT_BUILD_ESMF_PARALLEL)
-  set(emsf_enable_mpi TRUE)
-endif()
-
-if("${emsf_enable_mpi}")
-  set(ESMF_comm "${CDAT_BUILD_ESMF_COMM}")
-else()
-  message("[INFO] CDAT will build ESMF serial")
-  set(ESMF_comm "mpiuni")
-endif()
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake
-  @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake
-  @ONLY
-)
-
-set(ESMF_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake)
-set(ESMF_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake)
-
-# ESMF Python interface. Install after ESMF is done.
-set(ESMP_source "${ESMF_source_dir}/ESMP")
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake
-  @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake
-  @ONLY
-)
-
-set(ESMP_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake)
-set(ESMP_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake)
-
-ExternalProject_Add(ESMF
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ESMF_source_dir}
-  INSTALL_DIR ${ESMF_install}
-  URL ${ESMF_URL}/${ESMF_GZ}
-  URL_MD5 ${ESMF_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${ESMF_build_command}
-  INSTALL_COMMAND ${ESMF_install_command}
-  INSTALL_COMMAND ${ESMP_install_command}
-  PATCH_COMMAND ${ESMP_patch_command}
-  DEPENDS ${ESMF_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/esmf_pkg.cmake b/CMake/cdat_modules/esmf_pkg.cmake
deleted file mode 100644
index 0fde09269..000000000
--- a/CMake/cdat_modules/esmf_pkg.cmake
+++ /dev/null
@@ -1,57 +0,0 @@
-include(CMakeDependentOption)
-
-set(ESMF_MAJOR 6)
-set(ESMF_MINOR 3)
-set(ESMF_PATCH 0rp1)
-set(ESMP_MAJOR 01)
-set(ESMF_VERSION ${ESMF_MAJOR}_${ESMF_MINOR}_${ESMF_PATCH})
-set(ESMF_URL ${LLNL_URL})
-set(ESMF_GZ esmp.ESMF_${ESMF_VERSION}_ESMP_${ESMP_MAJOR}.tar.bz2)
-set(ESMF_MD5  a9be4fb51da1bc1fab027137297c5030 )
-set(ESMF_SOURCE ${ESMF_URL}/${ESMF_GZ})
-
-if (CDAT_BUILD_LEAN)
-    option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" OFF)
-else ()
-    option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" ON)
-endif()
-
-cmake_dependent_option(CDAT_BUILD_ESMF_PARALLEL
-  "Build parallel version of Earth System Modeling Framework library" ON
-  "CDAT_BUILD_PARALLEL" OFF
-)
-
-set(TXCMAKE_DIR ${cdat_SOURCE_DIR}/contrib/sciMake)
-include(${TXCMAKE_DIR}/sciFuncsMacros.cmake)
-include(${TXCMAKE_DIR}/sciFortranChecks.cmake)
-
-if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU)
-  string(REGEX MATCHALL "[0-9]+\\." test_version_list ${Fortran_VERSION})
-  string(SUBSTRING ${Fortran_VERSION} 0 3 Fortran_MAJOR_VERSION)
-  LIST(GET test_version_list 0 Fortran_MAJOR_VERSION)
-  LIST(GET test_version_list 1 Fortran_MINOR_VERSION)
-else()
-  set(Fortran_MINOR_VERSION "")
-endif()
-
-if(CDAT_BUILD_ESMF_ESMP)
-    if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU)
-      # GNU gfortran must be >= 4.3 last 4.2 gfortran release was 4.2.4
-      if(${Fortran_VERSION} VERSION_GREATER "4.2.9" AND ${Fortran_VERSION} VERSION_LESS "5.2.2")
-        ## On APPLE need to test for -arch as well!
-        add_cdat_package(ESMF "" "Build ESMF" ON)
-      else()
-        message(FATAL_ERROR "[ERROR] gfortran must be 4.3 <= version < 5.2.2; you have ${Fortran_VERSION}")
-      endif()
-    else()
-      add_cdat_package(ESMF "" "Build ESMF" ON)
-      message("[INFO] Fortran Compiler is: ${CMAKE_Fortran_COMPILER}")
-    endif()
-
-     # the following may need to be adjusted on Crays, otherwise the defaults will likely apply
-     set(CDAT_BUILD_ESMF_OS "${CMAKE_SYSTEM_NAME}" CACHE STRING "ESMF_OS env variable, may need to change to Unicos on Crays")
-     set(CDAT_BUILD_ESMF_COMPILER "gfortran" CACHE STRING "ESMF_COMPILER env variable, choices are gfortran, intel, pgi, g95, or nag")
-     set(CDAT_BUILD_ESMF_COMM "openmpi" CACHE STRING "ESMF_COMM env variable, choices are openmpi, mpiuni, mpi, mpich2, or mvapich2")
-     set(CDAT_BUILD_ESMF_ABI "64" CACHE STRING "ESMF_ABI env variable, choices are 32 or 64")
-endif()
-
diff --git a/CMake/cdat_modules/ezget_deps.cmake b/CMake/cdat_modules/ezget_deps.cmake
deleted file mode 100644
index e859d355d..000000000
--- a/CMake/cdat_modules/ezget_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ezget_deps ${netcdf_pkg} ${libdrs_pkg} ${libcdms_pkg})
diff --git a/CMake/cdat_modules/ezget_external.cmake b/CMake/cdat_modules/ezget_external.cmake
deleted file mode 100644
index 078bebda0..000000000
--- a/CMake/cdat_modules/ezget_external.cmake
+++ /dev/null
@@ -1,43 +0,0 @@
-set(ezget_source "${CMAKE_CURRENT_BINARY_DIR}/build/ezget")
-set(ezget_install "${cdat_EXTERNALS}")
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ezget_Makefile.gfortran.in
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_EZGET )
-    message("[INFO] [ezget] Installing ${nm} from ${GIT_CMD_STR_EZGET}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [ezget] Installed ${nm} from tarball ${EZGET_GZ}")
-    set(URL_STR URL ${EZGET_URL}/${EZGET_GZ})
-    set(URL_MD5_STR URL_MD5 ${EZGET_MD5})
-    set(GIT_CMD_STR_EZGET )
-    set(GIT_TAG )
-endif()
-set(EZGET_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile)
-set(EZGET_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile^^install)
-set(EZGET_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(ezget
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ezget_source}
-  INSTALL_DIR ${ezget_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_EZGET}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${ezget_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_EZGET)
-  unset(GIT_CMD_STR_EZGET)
-endif()
diff --git a/CMake/cdat_modules/ezget_pkg.cmake b/CMake/cdat_modules/ezget_pkg.cmake
deleted file mode 100644
index a18d67cfd..000000000
--- a/CMake/cdat_modules/ezget_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(EZGET_VERSION 1.0.0)
-set(EZGET_BRANCH master)
-set(EZGET_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/EzGet.git )
-
-set(GIT_CMD_STR_EZGET GIT_REPOSITORY ${EZGET_REPOSITORY})
-set(GIT_TAG GIT_TAG "${EZGET_BRANCH}")
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(ezget "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/ffi_deps.cmake b/CMake/cdat_modules/ffi_deps.cmake
deleted file mode 100644
index 548c543fe..000000000
--- a/CMake/cdat_modules/ffi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(FFI_deps ${pip_pkg})
diff --git a/CMake/cdat_modules/ffi_external.cmake b/CMake/cdat_modules/ffi_external.cmake
deleted file mode 100644
index df33e73ac..000000000
--- a/CMake/cdat_modules/ffi_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(ffi_source "${CMAKE_CURRENT_BINARY_DIR}/build/ffi-${FFI_MAJOR}.${FFI_MINOR}.${FFI_PATCH}")
-set(ffi_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(FFI
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ffi_source}
-  INSTALL_DIR ${ffi_install}
-  URL ${FFI_URL}/${FFI_BZ2}
-  URL_MD5 ${FFI_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${FFI_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/ffi_pkg.cmake b/CMake/cdat_modules/ffi_pkg.cmake
deleted file mode 100644
index e7f0152d2..000000000
--- a/CMake/cdat_modules/ffi_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( FFI_MAJOR 3  )
-set( FFI_MINOR 1  )
-set( FFI_PATCH 5  )
-set(FFI_URL ${LLNL_URL})
-set(FFI_BZ2 libffi-${FFI_MAJOR}.${FFI_MINOR}.tar.gz)
-set(FFI_MD5 f5898b29bbfd70502831a212d9249d10)
-
-set (nm FFI)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR})
-set(FFI_SOURCE ${FFI_URL}/${FFI_BZ2})
-
-add_cdat_package_dependent(FFI "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/ffmpeg_deps.cmake b/CMake/cdat_modules/ffmpeg_deps.cmake
deleted file mode 100644
index b92781684..000000000
--- a/CMake/cdat_modules/ffmpeg_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(FFMPEG_deps ${pkgconfig_pkg} ${x264_pkg})
diff --git a/CMake/cdat_modules/ffmpeg_external.cmake b/CMake/cdat_modules/ffmpeg_external.cmake
deleted file mode 100644
index 1a2fe723a..000000000
--- a/CMake/cdat_modules/ffmpeg_external.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-# The FFMPEG external project for ParaView
-set(ffmpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/FFMPEG")
-set(ffmpeg_install "${cdat_EXTERNALS}")
-set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin)
-
-find_program(YASM_BIN "yasm")
-
-if (NOT YASM_BIN)
-  set(ffmpeg_conf_args --disable-yasm^^--enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib)
-else()
-  set(ffmpeg_conf_args --enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib)
-endif()
-
-ExternalProject_Add(FFMPEG
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ffmpeg_source}
-  INSTALL_DIR ${ffmpeg_install}
-  URL ${FFMPEG_URL}/${FFMPEG_GZ}
-  URL_MD5 ${FFMPEG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${ffmpeg_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${FFMPEG_deps}
-  ${ep_log_options}
-  )
-
-set(FFMPEG_INCLUDE_DIR ${ffmpeg_install}/include)
-set(FFMPEG_avcodec_LIBRARY ${ffmpeg_install}/lib/libavcodec${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_avformat_LIBRARY ${ffmpeg_install}/lib/libavformat${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_avutil_LIBRARY ${ffmpeg_install}/lib/libavutil${_LINK_LIBRARY_SUFFIX})
-set(FFMPEG_swscale_LIBRARY ${ffmpeg_install}/lib/libswscale${_LINK_LIBRARY_SUFFIX})
diff --git a/CMake/cdat_modules/ffmpeg_pkg.cmake b/CMake/cdat_modules/ffmpeg_pkg.cmake
deleted file mode 100644
index 65db29865..000000000
--- a/CMake/cdat_modules/ffmpeg_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(FFMPEG_MAJOR_SRC 2)
-set(FFMPEG_MINOR_SRC 7)
-set(FFMPEG_PATCH_SRC 0)
-set(FFMPEG_URL ${LLNL_URL})
-set(FFMPEG_GZ ffmpeg-${FFMPEG_MAJOR_SRC}.${FFMPEG_MINOR_SRC}.tar.gz)
-set(FFMPEG_MD5 3ad0554981faf2c6deef23a1cd4c8c57)
-
-set (nm FFMPEG)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC})
-set(FFMPEG_SOURCE ${FFMPEG_URL}/${FFMPEG_GZ})
-set(FFMPEG_ROOT ${cdat_EXTERNALS} CACHE PATH "Path to FFMPEG root directory")
-
-add_cdat_package_dependent(FFMPEG "" "" ON "CDAT_BUILD_GRAPHICS" OFF) 
diff --git a/CMake/cdat_modules/fiona_deps.cmake b/CMake/cdat_modules/fiona_deps.cmake
deleted file mode 100644
index 624113df2..000000000
--- a/CMake/cdat_modules/fiona_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Fiona_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${shapely_pkg} ${gdal_pkg} ${cligj_pkg})
diff --git a/CMake/cdat_modules/fiona_external.cmake b/CMake/cdat_modules/fiona_external.cmake
deleted file mode 100644
index 4d7e45c75..000000000
--- a/CMake/cdat_modules/fiona_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install Fiona
-# and configure and build it
-set(nm Fiona)
-set(USR_ENVS "GDAL_CONFIG=${cdat_EXTERNALS}/bin/gdal-config")
-#set(USER_BUILD_EXT_OPTS "build_ext -I${cdat_EXTERNALS}/include -L${cdat_EXTERNALS}/lib -lgdal")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/fiona_pkg.cmake b/CMake/cdat_modules/fiona_pkg.cmake
deleted file mode 100644
index 1cd902434..000000000
--- a/CMake/cdat_modules/fiona_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( FIONA_MAJOR_SRC 1  )
-set( FIONA_MINOR_SRC 6 )
-set( FIONA_PATCH_SRC 0  )
-set(FIONA_URL ${LLNL_URL})
-set(FIONA_GZ
-    Fiona-${FIONA_MAJOR_SRC}.${FIONA_MINOR_SRC}.${FIONA_PATCH_SRC}.tar.gz)
-set(FIONA_MD5 40f945898c550721db715f69658cf7e9 )
-set(FIONA_SOURCE ${FIONA_URL}/${FIONA_GZ})
-
-set (nm FIONA)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_ALL)
-  add_cdat_package(Fiona "" "" ON)
-else()
-  add_cdat_package(Fiona "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/flake8_deps.cmake b/CMake/cdat_modules/flake8_deps.cmake
deleted file mode 100644
index 490185ec0..000000000
--- a/CMake/cdat_modules/flake8_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(flake8_deps ${python_pkg} ${setuptools_pkg} ${pyflakes_pkg} ${pep8_pkg} ${mccabe_pkg})
diff --git a/CMake/cdat_modules/flake8_external.cmake b/CMake/cdat_modules/flake8_external.cmake
deleted file mode 100644
index 5f05cb3f8..000000000
--- a/CMake/cdat_modules/flake8_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-ExternalProject_Add(flake8
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/flake8"
-  URL "${FLAKE8_SOURCE}"
-  URL_MD5 ${FLAKE8_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${flake8_deps}
-  ${ep_log_options}
-  )
-
-if (APPLE)
-  set(FLAKE8_EXECUTABLE
-    "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/flake8")
-else()
-  set(FLAKE8_EXECUTABLE "${CMAKE_INSTALL_PREFIX}/bin/flake8")
-endif()
diff --git a/CMake/cdat_modules/flake8_pkg.cmake b/CMake/cdat_modules/flake8_pkg.cmake
deleted file mode 100644
index f10ebf053..000000000
--- a/CMake/cdat_modules/flake8_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm flake8)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 2)
-set(${uc_nm}_MINOR 4)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 ed45d3db81a3b7c88bd63c6e37ca1d65)
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/fontconfig_deps.cmake b/CMake/cdat_modules/fontconfig_deps.cmake
deleted file mode 100644
index 87455d135..000000000
--- a/CMake/cdat_modules/fontconfig_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(fontconfig_deps ${pkgconfig_pkg} ${libxml2_pkg} ${freetype_pkg})
diff --git a/CMake/cdat_modules/fontconfig_external.cmake b/CMake/cdat_modules/fontconfig_external.cmake
deleted file mode 100644
index fa57bc888..000000000
--- a/CMake/cdat_modules/fontconfig_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(fontconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/fontconfig")
-set(fontconfig_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(fontconfig
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${fontconfig_source}
-  INSTALL_DIR ${fontconfig_install}
-  URL ${FTCFG_URL}/${FTCFG_GZ}
-  URL_MD5 ${FTCFG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=--disable-docs^^--enable-libxml2 -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${fontconfig_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/fontconfig_pkg.cmake b/CMake/cdat_modules/fontconfig_pkg.cmake
deleted file mode 100644
index 959811582..000000000
--- a/CMake/cdat_modules/fontconfig_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(FTCFG_MAJOR 2)
-set(FTCFG_MINOR 4)
-set(FTCFG_PATCH 2)
-set(FTCFG_MAJOR_SRC 2)
-set(FTCFG_MINOR_SRC 10)
-set(FTCFG_PATCH_SRC 1)
-set(FTCFG_URL ${LLNL_URL})
-set(FTCFG_GZ fontconfig-${FTCFG_MAJOR_SRC}.${FTCFG_MINOR_SRC}.${FTCFG_PATCH_SRC}.tar.gz)
-set(FTCFG_MD5 43808dd9153cff1c3ac302e94e024814)
-
-set (nm FTCFG)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(FONTCONFIG_VERSION ${FTCFG_VERSION})
-set(FONTCONFIG_SOURCE ${FTCFG_URL}/${FTCFG_GZ})
-
-add_cdat_package_dependent(fontconfig "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/freetype_deps.cmake b/CMake/cdat_modules/freetype_deps.cmake
deleted file mode 100644
index 6d451c65f..000000000
--- a/CMake/cdat_modules/freetype_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(freetype_deps ${pkgconfig_pkg} ${png_pkg})
diff --git a/CMake/cdat_modules/freetype_external.cmake b/CMake/cdat_modules/freetype_external.cmake
deleted file mode 100644
index 3af294399..000000000
--- a/CMake/cdat_modules/freetype_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(freetype_source "${CMAKE_CURRENT_BINARY_DIR}/build/freetype")
-set(freetype_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(freetype
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${freetype_source}
-  INSTALL_DIR ${freetype_install}
-  URL ${FT_URL}/${FT_GZ}
-  URL_MD5 ${FT_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${freetype_deps}
-  ${ep_log_options}
-)
-
-#ln -sf @EXTERNALS@/include/freetype2/freetype @EXTERNALS@/include/freetype
-
-ExternalProject_Add_Step(freetype symlink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${cdat_EXTERNALS}/include/freetype2/freetype ${cdat_EXTERNALS}/include/freetype
-  COMMENT "Symlink include/freetype2/freetype include directory as include/freetype"
-  DEPENDEES install
-)
diff --git a/CMake/cdat_modules/freetype_pkg.cmake b/CMake/cdat_modules/freetype_pkg.cmake
deleted file mode 100644
index 596ce205a..000000000
--- a/CMake/cdat_modules/freetype_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(FT_MAJOR 9)
-set(FT_MINOR 7)
-set(FT_PATCH 3)
-set(FT_MAJOR_SRC 2)
-set(FT_MINOR_SRC 4)
-set(FT_PATCH_SRC 10)
-set(FT_URL ${LLNL_URL})
-set(FT_GZ freetype-${FT_MAJOR_SRC}.${FT_MINOR_SRC}.${FT_PATCH_SRC}.tar.gz)
-set(FT_MD5 4b1887901730ff2e12562ef30fa521d5)
-
-set (nm FT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(FREETYPE_VERSION ${FT_VERSION})
-set(FREETYPE_SOURCE ${FT_URL}/${FT_GZ})
-
-
-add_cdat_package_dependent(freetype "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/g2clib_deps.cmake b/CMake/cdat_modules/g2clib_deps.cmake
deleted file mode 100644
index a2994c832..000000000
--- a/CMake/cdat_modules/g2clib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(g2clib_deps ${pkgconfig_pkg} ${png_pkg} ${zlib_pkg} ${jasper_pkg})
diff --git a/CMake/cdat_modules/g2clib_external.cmake b/CMake/cdat_modules/g2clib_external.cmake
deleted file mode 100644
index 5a1406979..000000000
--- a/CMake/cdat_modules/g2clib_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(g2clib_source "${CMAKE_CURRENT_BINARY_DIR}/build/g2clib")
-set(g2clib_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(g2clib
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${g2clib_source}
-  INSTALL_DIR ${g2clib_install}
-  URL ${G2CLIB_URL}/${G2CLIB_GZ}
-  URL_MD5 ${G2CLIB_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${g2clib_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/g2clib_pkg.cmake b/CMake/cdat_modules/g2clib_pkg.cmake
deleted file mode 100644
index 41580b8eb..000000000
--- a/CMake/cdat_modules/g2clib_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(G2CLIB_MAJOR 1)
-set(G2CLIB_MINOR 4)
-set(G2CLIB_PATCH 0b)
-set(G2CLIB_VERSION ${G2CLIB_MAJOR}.${G2CLIB_MINOR}.${G2CLIB_PATCH})
-set(G2CLIB_URL ${LLNL_URL})
-set(G2CLIB_GZ g2clib-${G2CLIB_VERSION}.tar.gz)
-set(G2CLIB_MD5 72378d980b2f4d6b09fd86e23e884a4b)
-set(G2CLIB_SOURCE ${G2CLIB_URL}/${G2CLIB_GZ})
-
-
-add_cdat_package(g2clib "" "" ON)
diff --git a/CMake/cdat_modules/gdal_deps.cmake b/CMake/cdat_modules/gdal_deps.cmake
deleted file mode 100644
index 3fbc8ce4d..000000000
--- a/CMake/cdat_modules/gdal_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gdal_deps ${pkgconfig_pkg} ${python_pkg} ${uuid_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jasper_pkg} ${geos_pkg} ${setuptools_pkg} ${proj4_pkg})
diff --git a/CMake/cdat_modules/gdal_external.cmake b/CMake/cdat_modules/gdal_external.cmake
deleted file mode 100644
index 33e4c8e60..000000000
--- a/CMake/cdat_modules/gdal_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-set(gdal_source "${CMAKE_CURRENT_BINARY_DIR}/build/gdal")
-set(gdal_install "${cdat_EXTERNALS}")
-set(gdal_configure_args "--prefix=${cdat_EXTERNALS}^^--with-hdf5=${cdat_EXTERNALS}^^--with-netcdf=${cdat_EXTERNALS}^^--with-curl=${cdat_EXTERNALS}^^--with-geos=${cdat_EXTERNALS}/bin/geos-config^^--with-python=${PYTHON_EXECUTABLE}^^--with-jpeg=no^^--with-libtiff=internal^^--without-jpeg12^^--with-geotiff=internal^^--with-static-proj4=${cdat_EXTERNALS}/proj4")
-
-if (CDAT_BUILD_PARALLEL)
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-message("[GDAL] CONF FILE IS:"${configure_file})
-ExternalProject_Add(gdal
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gdal_source}
-  INSTALL_DIR ${gdal_install}
-  URL ${GDAL_URL}/${GDAL_GZ}
-  URL_MD5 ${GDAL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${gdal_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS "${gdal_deps}"
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gdal_pkg.cmake b/CMake/cdat_modules/gdal_pkg.cmake
deleted file mode 100644
index d8756b2bb..000000000
--- a/CMake/cdat_modules/gdal_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( GDAL_MAJOR 1  )
-set( GDAL_MINOR 11  )
-set( GDAL_PATCH 2  )
-set(GDAL_URL ${LLNL_URL})
-set(GDAL_GZ gdal-${GDAL_MAJOR}.${GDAL_MINOR}.${GDAL_PATCH}.tar.gz)
-set(GDAL_MD5 866a46f72b1feadd60310206439c1a76 )
-
-set (nm GDAL)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GDAL_SOURCE ${GDAL_URL}/${GDAL_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" ON)
-else()
-  add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" OFF)
-endif()
diff --git a/CMake/cdat_modules/geos_deps.cmake b/CMake/cdat_modules/geos_deps.cmake
deleted file mode 100644
index 3b1cbf81b..000000000
--- a/CMake/cdat_modules/geos_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(GEOS_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/geos_external.cmake b/CMake/cdat_modules/geos_external.cmake
deleted file mode 100644
index d7f8e6567..000000000
--- a/CMake/cdat_modules/geos_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(geos_source "${CMAKE_CURRENT_BINARY_DIR}/build/geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}")
-set(geos_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(GEOS
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${geos_source}
-  INSTALL_DIR ${geos_install}
-  URL ${GEOS_URL}/${GEOS_BZ2}
-  URL_MD5 ${GEOS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${GEOS_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/geos_pkg.cmake b/CMake/cdat_modules/geos_pkg.cmake
deleted file mode 100644
index d2927b020..000000000
--- a/CMake/cdat_modules/geos_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( GEOS_MAJOR 3  )
-set( GEOS_MINOR 3  )
-set( GEOS_PATCH 5  )
-set(GEOS_URL ${LLNL_URL})
-set(GEOS_BZ2 geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}.tar.bz2)
-set(GEOS_MD5 2ba61afb7fe2c5ddf642d82d7b16e75b)
-
-set (nm GEOS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GEOS_SOURCE ${GEOS_URL}/${GEOS_BZ2})
-
-add_cdat_package_dependent(GEOS "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/ghostscript_deps.cmake b/CMake/cdat_modules/ghostscript_deps.cmake
deleted file mode 100644
index 0f4bedf96..000000000
--- a/CMake/cdat_modules/ghostscript_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ghostscript_deps ${pkgconfig_pkg} ${zlib_pkg} ${jpeg_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libXSLT_pkg} ${libXML2_pkg})
diff --git a/CMake/cdat_modules/ghostscript_external.cmake b/CMake/cdat_modules/ghostscript_external.cmake
deleted file mode 100644
index fc322b66a..000000000
--- a/CMake/cdat_modules/ghostscript_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-
-set(ghostscript_source "${CMAKE_CURRENT_BINARY_DIR}/build/ghostscript")
-set(ghostscript_install "${cdat_EXTERNALS}")
-
-set(ghostscripts_args "--with-drivers=PS,BMP --disable-cups")
-
-ExternalProject_Add(ghostscript
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ghostscript_source}
-  INSTALL_DIR ${ghostscript_install}
-  URL ${GS_URL}/${GS_GZ}
-  URL_MD5 ${GS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${ghostscripts_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DBUILD_ARGS=${ghostscript_source} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${ghostscript_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/ghostscript_pkg.cmake b/CMake/cdat_modules/ghostscript_pkg.cmake
deleted file mode 100644
index d56e90ece..000000000
--- a/CMake/cdat_modules/ghostscript_pkg.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-add_cdat_package(ghostscript "" "" OFF)
-
diff --git a/CMake/cdat_modules/gifsicle_external.cmake b/CMake/cdat_modules/gifsicle_external.cmake
deleted file mode 100644
index 853f5d55a..000000000
--- a/CMake/cdat_modules/gifsicle_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(gifsicle_source "${CMAKE_CURRENT_BINARY_DIR}/build/gifsicle")
-set(gifsicle_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(gifsicle
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gifsicle_source}
-  INSTALL_DIR ${gifsicle_install}
-  URL ${GIFSICLE_URL}/${GIFSICLE_GZ}
-  URL_MD5 ${GIFSICLE_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${gifsicle_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gsw_deps.cmake b/CMake/cdat_modules/gsw_deps.cmake
deleted file mode 100644
index 9d0b19879..000000000
--- a/CMake/cdat_modules/gsw_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gsw_deps ${python_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/gsw_external.cmake b/CMake/cdat_modules/gsw_external.cmake
deleted file mode 100644
index 24c3c0e58..000000000
--- a/CMake/cdat_modules/gsw_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# gsw (Gibbs Seawater)
-#
-set(gsw_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gsw")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/gsw_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake"
-  @ONLY
-)
-
-set(gsw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake)
-
-ExternalProject_Add(gsw
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${gsw_source_dir}
-  URL ${GSW_URL}/${GSW_GZ}
-  URL_MD5 ${GSW_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${gsw_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${gsw_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/gsw_pkg.cmake b/CMake/cdat_modules/gsw_pkg.cmake
deleted file mode 100644
index 127a403c0..000000000
--- a/CMake/cdat_modules/gsw_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set( GSW_MAJOR 3 )
-set( GSW_MINOR 0 )
-set( GSW_PATCH 3 )
-set( GSW_VERSION ${GSW_MAJOR}.${GSW_MINOR}.${GSW_PATCH} )
-set( GSW_URL ${LLNL_URL} )
-set( GSW_GZ python-gsw-${GSW_VERSION}.tar.gz )
-set( GSW_MD5 a522a9ab6ab41fb70064e0378e904ffd )
-
-set (nm GSW)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(GSW_SOURCE ${GSW_URL}/${GSW_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(gsw "" "" ON)
-else()
-  add_cdat_package(gsw "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/gui_support_deps.cmake b/CMake/cdat_modules/gui_support_deps.cmake
deleted file mode 100644
index 3c7bc7379..000000000
--- a/CMake/cdat_modules/gui_support_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(gui_support_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/gui_support_external.cmake b/CMake/cdat_modules/gui_support_external.cmake
deleted file mode 100644
index 5d10b82e7..000000000
--- a/CMake/cdat_modules/gui_support_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(gui_support_source_dir "${cdat_SOURCE_DIR}/Packages/gui_support")
-set(gui_support_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gui_support-build")
-set(runtime_library_path ${CMAKE_INSTALL_PREFIX}/lib:${cdat_EXTERNALS}/lib)
-
-#  BUILD_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py install --prefix=${CMAKE_INSTALL_PREFIX}
-ExternalProject_Add(gui_support
-  DOWNLOAD_DIR ""
-  SOURCE_DIR ${gui_support_source_dir}
-  BINARY_DIR ${gui_support_binary_dir}
-  BUILD_IN_SOURCE 0
-  BUILD_COMMAND ""
-#  BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS}  ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir}
-  INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} install ${PYTHON_EXTRA_PREFIX}
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  DEPENDS ${gui_support_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/gui_support_pkg.cmake b/CMake/cdat_modules/gui_support_pkg.cmake
deleted file mode 100644
index f4fcee7d6..000000000
--- a/CMake/cdat_modules/gui_support_pkg.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-set(GUI_SUPPORT_SOURCE N/A)
-set(GUI_SUPPORT_VERSION N/A)
-set(GUI_SUPPORT_MD5 N/A)
-
-add_cdat_package_dependent(gui_support "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/h5py_deps.cmake b/CMake/cdat_modules/h5py_deps.cmake
deleted file mode 100644
index f1ce2f917..000000000
--- a/CMake/cdat_modules/h5py_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(h5py_deps ${python_pkg} ${hdf5_pkg} ${numpy_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/h5py_external.cmake b/CMake/cdat_modules/h5py_external.cmake
deleted file mode 100644
index 83762f1e6..000000000
--- a/CMake/cdat_modules/h5py_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# h5py
-#
-set(h5py_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/h5py")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/h5py_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake"
-  @ONLY
-)
-
-set(h5py_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake)
-
-ExternalProject_Add(h5py
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${h5py_source_dir}
-  URL ${H5PY_URL}/${H5PY_GZ}
-  URL_MD5 ${H5PY_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${h5py_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${h5py_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/h5py_pkg.cmake b/CMake/cdat_modules/h5py_pkg.cmake
deleted file mode 100644
index 3a753fc2b..000000000
--- a/CMake/cdat_modules/h5py_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(H5PY_MAJOR 2)
-set(H5PY_MINOR 5)
-set(H5PY_PATCH 0)
-set(H5PY_VERSION ${H5PY_MAJOR}.${H5PY_MINOR}.${H5PY_PATCH})
-set(H5PY_URL ${LLNL_URL})
-set(H5PY_GZ h5py-${H5PY_VERSION}.tar.gz)
-set(H5PY_MD5 969c78e366e8e86dcd0376d945a72dd0)
-
-set (nm H5PY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(H5PY_SOURCE ${H5PY_URL}/${H5PY_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(h5py "" "" ON)
-else()
-  add_cdat_package(h5py "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/hdf4_external.cmake b/CMake/cdat_modules/hdf4_external.cmake
deleted file mode 100644
index 7b34bef0b..000000000
--- a/CMake/cdat_modules/hdf4_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(HDF4_source "${CMAKE_CURRENT_BINARY_DIR}/HDF4")
-set(HDF4_install "${cdat_EXTERNALS}")
-
-if(NOT CMAKE_Fortran_COMPILER)
-  set(hdf4_configure_args --disable-fortran)
-else()
-  set(hdf4_configure_args --enable-fortran)
-endif()
-
-ExternalProject_Add(HDF4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${HDF4_source}
-  INSTALL_DIR ${HDF4_install}
-  URL ${HDF4_URL}/${HDF4_GZ}
-  URL_MD5 ${HDF4_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${hdf4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${HDF4_deps}
-)
-
-set(HDF4_DIR "${HDF4_binary}" CACHE PATH "HDF4 binary directory" FORCE)
-mark_as_advanced(HDF4_DIR)
diff --git a/CMake/cdat_modules/hdf5_deps.cmake b/CMake/cdat_modules/hdf5_deps.cmake
deleted file mode 100644
index 45a66d741..000000000
--- a/CMake/cdat_modules/hdf5_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(HDF5_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg})
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND HDF5_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/hdf5_external.cmake b/CMake/cdat_modules/hdf5_external.cmake
deleted file mode 100644
index 7f20675f7..000000000
--- a/CMake/cdat_modules/hdf5_external.cmake
+++ /dev/null
@@ -1,40 +0,0 @@
-
-set(HDF5_source "${CMAKE_CURRENT_BINARY_DIR}/build/HDF5")
-set(HDF5_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/hdf5_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake
-  @ONLY)
-if (CDAT_BUILD_PARALLEL)
-  set(hdf5_configure_args "--enable-parallel")
-  set(hdf5_additional_cflags "-w -fPIC")
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(hdf5_configure_args "")
-  set(hdf5_additional_cflags "-w")
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-# we disable HDF5 warnings because it has way too many of them.
-ExternalProject_Add(HDF5
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${HDF5_source}
-  INSTALL_DIR ${HDF5_install}
-  URL ${HDF5_URL}/${HDF5_GZ}
-  URL_MD5 ${HDF5_MD5}
-  BUILD_IN_SOURCE 1
-  #PATCH_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${hdf5_configure_args} -DADDITIONAL_CFLAGS=${hdf5_additional_cflags} -DADDITIONAL_CPPFPAGS=-w -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  LOG_BUILD 1
-  DEPENDS ${HDF5_deps}
-  ${ep_log_options}
-)
-
-if(WIN32)
-  set(HDF5_INCLUDE_DIR ${HDF5_install}/include)
-  set(HDF5_LIBRARY ${HDF5_install}/lib/hdf5dll${_LINK_LIBRARY_SUFFIX})
-else()
-  set(HDF5_INCLUDE_DIR ${HDF5_install}/include)
-  set(HDF5_LIBRARY ${HDF5_install}/lib/libhdf5${_LINK_LIBRARY_SUFFIX})
-endif()
diff --git a/CMake/cdat_modules/hdf5_pkg.cmake b/CMake/cdat_modules/hdf5_pkg.cmake
deleted file mode 100644
index 4599c9c95..000000000
--- a/CMake/cdat_modules/hdf5_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(HDF5_MAJOR_SRC 1)
-set(HDF5_MINOR_SRC 8)
-set(HDF5_PATCH_SRC 15)
-set(HDF5_URL ${LLNL_URL})
-set(HDF5_GZ hdf5-${HDF5_MAJOR_SRC}.${HDF5_MINOR_SRC}.${HDF5_PATCH_SRC}.tar.gz)
-set(HDF5_MD5 03cccb5b33dbe975fdcd8ae9dc021f24 )
-
-set (nm HDF5)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(HDF5_SOURCE ${HDF5_URL}/${HDF5_GZ})
-
-add_cdat_package(HDF5 "" "" ON)
diff --git a/CMake/cdat_modules/idna_deps.cmake b/CMake/cdat_modules/idna_deps.cmake
deleted file mode 100644
index e2aa851a8..000000000
--- a/CMake/cdat_modules/idna_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(IDNA_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/idna_external.cmake b/CMake/cdat_modules/idna_external.cmake
deleted file mode 100644
index a987e968f..000000000
--- a/CMake/cdat_modules/idna_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IDNA)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/idna_pkg.cmake b/CMake/cdat_modules/idna_pkg.cmake
deleted file mode 100644
index 5bf853929..000000000
--- a/CMake/cdat_modules/idna_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IDNA_MAJOR_SRC 2)
-set(IDNA_MINOR_SRC 0)
-set(IDNA_PATCH_SRC 0)
-
-set(IDNA_VERSION ${IDNA_MAJOR_SRC}.${IDNA_MINOR_SRC})
-set(IDNA_GZ idna-${IDNA_VERSION}.tar.gz)
-set(IDNA_SOURCE ${LLNL_URL}/${IDNA_GZ})
-set(IDNA_MD5 9ef51e6e51ea91b6c62426856c8a5b7c)
-
-add_cdat_package_dependent(IDNA "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/ipaddress_deps.cmake b/CMake/cdat_modules/ipaddress_deps.cmake
deleted file mode 100644
index ca515655f..000000000
--- a/CMake/cdat_modules/ipaddress_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(IPADDRESS_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/ipaddress_external.cmake b/CMake/cdat_modules/ipaddress_external.cmake
deleted file mode 100644
index 4773cea4c..000000000
--- a/CMake/cdat_modules/ipaddress_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IPADDRESS)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/ipaddress_pkg.cmake b/CMake/cdat_modules/ipaddress_pkg.cmake
deleted file mode 100644
index 68ce4f629..000000000
--- a/CMake/cdat_modules/ipaddress_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IPADDRESS_MAJOR_SRC 1)
-set(IPADDRESS_MINOR_SRC 0)
-set(IPADDRESS_PATCH_SRC 16)
-
-set(IPADDRESS_VERSION ${IPADDRESS_MAJOR_SRC}.${IPADDRESS_MINOR_SRC}.${IPADDRESS_PATCH_SRC})
-set(IPADDRESS_GZ ipaddress-${IPADDRESS_VERSION}.tar.gz)
-set(IPADDRESS_SOURCE ${LLNL_URL}/${IPADDRESS_GZ})
-set(IPADDRESS_MD5 1e27b62aa20f5b6fc200b2bdbf0d0847)
-
-add_cdat_package_dependent(IPADDRESS "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/ipython_deps.cmake b/CMake/cdat_modules/ipython_deps.cmake
deleted file mode 100644
index 0828bdbd9..000000000
--- a/CMake/cdat_modules/ipython_deps.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-if (CDAT_BUILD_ALL)
-  set(IPYTHON_deps ${pip_pkg} ${tornado_pkg} ${numpy_pkg} ${numexpr_pkg})
-else ()
-  set(IPYTHON_deps ${pip_pkg} ${numpy_pkg})
-endif()
diff --git a/CMake/cdat_modules/ipython_external.cmake b/CMake/cdat_modules/ipython_external.cmake
deleted file mode 100644
index eab083a8e..000000000
--- a/CMake/cdat_modules/ipython_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm IPYTHON)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/ipython_pkg.cmake b/CMake/cdat_modules/ipython_pkg.cmake
deleted file mode 100644
index ce9193f5c..000000000
--- a/CMake/cdat_modules/ipython_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(IPYTHON_MAJOR 3)
-set(IPYTHON_MINOR 0)
-set(IPYTHON_PATCH 0)
-set(IPYTHON_VERSION ${IPYTHON_MAJOR}.${IPYTHON_MINOR}.${IPYTHON_PATCH})
-set(IPYTHON_URL ${LLNL_URL})
-set(IPYTHON_GZ ipython-${IPYTHON_VERSION}.tar.gz)
-set(IPYTHON_MD5 b3f00f3c0be036fafef3b0b9d663f27e)
-set(IPYTHON_SOURCE ${IPYTHON_URL}/${IPYTHON_GZ})
-
-add_cdat_package(IPYTHON "" "" ON)
diff --git a/CMake/cdat_modules/jasper_deps.cmake b/CMake/cdat_modules/jasper_deps.cmake
deleted file mode 100644
index 4e5186952..000000000
--- a/CMake/cdat_modules/jasper_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jasper_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/jasper_external.cmake b/CMake/cdat_modules/jasper_external.cmake
deleted file mode 100644
index 81c9f5f8d..000000000
--- a/CMake/cdat_modules/jasper_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-
-set(jasper_source "${CMAKE_CURRENT_BINARY_DIR}/build/jasper")
-set(jasper_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jasper_configure_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake
-    @ONLY)
-
-ExternalProject_Add(jasper
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${jasper_source}
-  INSTALL_DIR ${jasper_install}
-  URL ${JASPER_URL}/${JASPER_GZ}
-  URL_MD5 ${JASPER_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake
-  DEPENDS ${jasper_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/jasper_pkg.cmake b/CMake/cdat_modules/jasper_pkg.cmake
deleted file mode 100644
index a4f898723..000000000
--- a/CMake/cdat_modules/jasper_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(JASPER_MAJOR 1)
-set(JASPER_MINOR 900)
-set(JASPER_PATCH 1)
-set(JASPER_VERSION ${JASPER_MAJOR}.${JASPER_MINOR}.${JASPER_PATCH})
-set(JASPER_URL ${LLNL_URL})
-set(JASPER_GZ jasper-${JASPER_VERSION}.tgz)
-set(JASPER_MD5 b5ae85050d034555790a3ccbc2522860)
-set(JASPER_SOURCE ${JASPER_URL}/${JASPER_GZ})
-
-add_cdat_package(jasper "" "" ON)
diff --git a/CMake/cdat_modules/jinja2_deps.cmake b/CMake/cdat_modules/jinja2_deps.cmake
deleted file mode 100644
index a8047b98a..000000000
--- a/CMake/cdat_modules/jinja2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jinja2_deps ${pip_pkg} ${markupsafe_pkg})
diff --git a/CMake/cdat_modules/jinja2_external.cmake b/CMake/cdat_modules/jinja2_external.cmake
deleted file mode 100644
index a50b6c79f..000000000
--- a/CMake/cdat_modules/jinja2_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm jinja2)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/jinja2_pkg.cmake b/CMake/cdat_modules/jinja2_pkg.cmake
deleted file mode 100644
index ffabe31e3..000000000
--- a/CMake/cdat_modules/jinja2_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(jinja2_MAJOR_SRC 2)
-set(jinja2_MINOR_SRC 7)
-set(jinja2_PATCH_SRC )
-set(JINJA2_VERSION ${jinja2_MAJOR_SRC}.${jinja2_MINOR_SRC})
-set(JINJA2_GZ Jinja2-${JINJA2_VERSION}.tar.gz)
-set(JINJA2_SOURCE ${LLNL_URL}/${JINJA2_GZ})
-set(JINJA2_MD5 c2fb12cbbb523c57d3d15bfe4dc0e8fe )
-
-add_cdat_package_dependent(jinja2 "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/jpeg_deps.cmake b/CMake/cdat_modules/jpeg_deps.cmake
deleted file mode 100644
index e7e6b16ba..000000000
--- a/CMake/cdat_modules/jpeg_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(jpeg_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/jpeg_external.cmake b/CMake/cdat_modules/jpeg_external.cmake
deleted file mode 100644
index e5a6f62bf..000000000
--- a/CMake/cdat_modules/jpeg_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-
-set(jpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/jpeg")
-set(jpeg_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jpeg_install_step.cmake.in
-    ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake
-    @ONLY)
-
-set(jpeg_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake)
-
-ExternalProject_Add(jpeg
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${jpeg_source}
-  INSTALL_DIR ${jpeg_install}
-  URL ${JPEG_URL}/${JPEG_GZ}
-  URL_MD5 ${JPEG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  INSTALL_COMMAND ${jpeg_INSTALL_COMMAND}
-  DEPENDS ${jpeg_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/jpeg_pkg.cmake b/CMake/cdat_modules/jpeg_pkg.cmake
deleted file mode 100644
index c30e433f7..000000000
--- a/CMake/cdat_modules/jpeg_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(JPEG_URL ${LLNL_URL})
-set(JPEG_GZ jpegsrc.v8c.tar.gz)
-set(JPEG_MD5 a2c10c04f396a9ce72894beb18b4e1f9)
-
-set(JPEG_VERSION v8c)
-set(JPEG_SOURCE ${JPEG_URL}/${JPEG_GZ})
-
-#grib2/jasper need this therefore cdms2 can't turn off
-#if (CDAT_BUILD_GRAPHICS)
-add_cdat_package(jpeg "" "" OFF)
-#endif()
-
diff --git a/CMake/cdat_modules/lapack_deps.cmake b/CMake/cdat_modules/lapack_deps.cmake
deleted file mode 100644
index cc8174699..000000000
--- a/CMake/cdat_modules/lapack_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(LAPACK_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/lapack_external.cmake b/CMake/cdat_modules/lapack_external.cmake
deleted file mode 100644
index a8b3f6139..000000000
--- a/CMake/cdat_modules/lapack_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-# The LAPACK external project
-
-set(lapack_source "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK")
-set(lapack_binary "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK-build")
-set(lapack_install "${cdat_EXTERNALS}")
-set(NUMPY_LAPACK_binary ${lapack_binary})
-
-ExternalProject_Add(LAPACK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${lapack_source}
-  BINARY_DIR ${lapack_binary}
-  INSTALL_DIR ${lapack_install}
-  URL ${LAPACK_URL}/${LAPACK_GZ}
-  URL_MD5 ${LAPACK_MD5}
-  CMAKE_ARGS
-    -DCMAKE_Fortran_COMPILER:FILEPATH=${CMAKE_Fortran_COMPILER}
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DENABLE_TESTING:BOOL=OFF
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${LAPACK_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/lapack_pkg.cmake b/CMake/cdat_modules/lapack_pkg.cmake
deleted file mode 100644
index 3f5b9b81e..000000000
--- a/CMake/cdat_modules/lapack_pkg.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(LAPACK_MAJOR_SRC 3)
-set(LAPACK_MINOR_SRC 4)
-set(LAPACK_PATCH_SRC 2)
-
-set(LAPACK_URL ${LLNL_URL})
-set(LAPACK_GZ lapack-${LAPACK_MAJOR_SRC}.${LAPACK_MINOR_SRC}.${LAPACK_PATCH_SRC}.tgz)
-set(LAPACK_MD5 61bf1a8a4469d4bdb7604f5897179478 )
-
-set (nm LAPACK)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-#Removing apple exclusion for now
-set(LAPACK_SOURCE ${LAPACK_URL}/${LAPACK_GZ})
-
-if(NOT APPLE)
-  if(CMAKE_Fortran_COMPILER)
-    add_cdat_package(LAPACK "" "" OFF)
-  endif()
-endif()
diff --git a/CMake/cdat_modules/lats_deps.cmake b/CMake/cdat_modules/lats_deps.cmake
deleted file mode 100644
index 4f7aee7aa..000000000
--- a/CMake/cdat_modules/lats_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lats_deps ${netcdf_pkg})
diff --git a/CMake/cdat_modules/lats_external.cmake b/CMake/cdat_modules/lats_external.cmake
deleted file mode 100644
index 519fc3a2e..000000000
--- a/CMake/cdat_modules/lats_external.cmake
+++ /dev/null
@@ -1,44 +0,0 @@
-
-set(lats_source "${CMAKE_CURRENT_BINARY_DIR}/build/lats")
-set(lats_install "${cdat_EXTERNALS}")
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lats_Makefile.gfortran.in
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LATS )
-    message("[INFO] [lats] Installing ${nm} from ${GIT_CMD_STR_LATS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [lats] Installed ${nm} from tarball ${LATS_GZ}")
-    set(URL_STR URL ${LATS_URL}/${LATS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LATS_MD5})
-    set(GIT_CMD_STR_LATS )
-    set(GIT_TAG )
-endif()
-set(LATS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile)
-set(LATS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile^^install)
-set(LATS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(lats
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${lats_source}
-  INSTALL_DIR ${lats_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LATS}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${lats_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LATS)
-  unset(GIT_CMD_STR_LATS)
-endif()
diff --git a/CMake/cdat_modules/lats_pkg.cmake b/CMake/cdat_modules/lats_pkg.cmake
deleted file mode 100644
index 545c0fe09..000000000
--- a/CMake/cdat_modules/lats_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(LATS_VERSION 1.0.0)
-set(LATS_BRANCH master)
-set(LATS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/lats.git )
-
-set(GIT_CMD_STR_LATS GIT_REPOSITORY ${LATS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LATS_BRANCH}")
-
-if (CDAT_BUILD_PCMDI)
-  add_cdat_package(lats "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/lepl_deps.cmake b/CMake/cdat_modules/lepl_deps.cmake
deleted file mode 100644
index 0643a8540..000000000
--- a/CMake/cdat_modules/lepl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lepl_deps ${cdat_pkg} ${numexpr_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/lepl_external.cmake b/CMake/cdat_modules/lepl_external.cmake
deleted file mode 100644
index 80b680e29..000000000
--- a/CMake/cdat_modules/lepl_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm lepl)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/lepl_pkg.cmake b/CMake/cdat_modules/lepl_pkg.cmake
deleted file mode 100644
index 9551ef522..000000000
--- a/CMake/cdat_modules/lepl_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(lepl_MAJOR_SRC 5)
-set(lepl_MINOR_SRC 1)
-set(lepl_PATCH_SRC 3)
-
-set (nm lepl)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LEPL_GZ LEPL-${LEPL_VERSION}.tar.gz)
-set(LEPL_SOURCE ${LLNL_URL}/${LEPL_GZ})
-set(LEPL_MD5 5f653984c57ad8efad828c5153660743 )
-
-add_cdat_package_dependent(lepl "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/libcdms_deps.cmake b/CMake/cdat_modules/libcdms_deps.cmake
deleted file mode 100644
index 904227d3b..000000000
--- a/CMake/cdat_modules/libcdms_deps.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(libcdms_deps ${netcdf_pkg}
-              ${jasper_pkg} ${g2clib_pkg} ${tiff_pkg}
-              ${png_pkg} ${jpeg_pkg} )
-if (CDAT_BUILD_LIBDRS)
-    message("[INFO] ADDING LIBDRS TO LIBCDMS DEPNDENCIES")
-    LIST(APPEND libcdms_deps ${libdrs_pkg})
-endif()
diff --git a/CMake/cdat_modules/libcdms_external.cmake b/CMake/cdat_modules/libcdms_external.cmake
deleted file mode 100644
index 2ed64475c..000000000
--- a/CMake/cdat_modules/libcdms_external.cmake
+++ /dev/null
@@ -1,56 +0,0 @@
-set(libcdms_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcdms")
-set(libcdms_install "${cdat_EXTERNALS}")
-
-if(APPLE)
-    set(WITHPNGLIB "/usr/X11R6/lib")
-else()
-    set(WITHPNGLIB "no")
-endif()
-
-if (CDAT_BUILD_LIBDRS)
-    message("[INFO] ENBLING DRS IN CDMS")
-    set(drs_opt --enable-drs^^--with-drslib=${cdat_EXTERNALS}/lib^^--with-drsinc=${cdat_EXTERNALS}/include^^--with-drsincf=${cdat_EXTERNALS}/include)
-else()
-    set(drs_opt "")
-endif()
-
-set(CONFIGURE_ARGS --srcdir=${libcdms_source}^^--enable-dap^^${drs_opt}^^--enable-hdf=no^^--enable-pp=yes^^--enable-ql=no^^--cache-file=/dev/null^^--prefix=${libcdms_install}^^--with-nclib=${cdat_EXTERNALS}/lib^^--with-ncinc=${cdat_EXTERNALS}/include^^--with-daplib=/lib^^--with-dapinc=/include^^--with-hdfinc=./include^^--with-hdflib=./lib^^--with-hdf5lib=${cdat_EXTERNALS}/lib^^--with-pnglib=${WITHPNGLIB}^^--with-grib2lib=${cdat_EXTERNALS}/lib^^--with-jasperlib=${cdat_EXTERNALS}/lib^^--with-grib2inc=${cdat_EXTERNALS}/include^^--enable-grib2)
-file(MAKE_DIRECTORY ${cdat_EXTERNALS}/man/man3)
-    
-
-if(DEFINED GIT_CMD_STR_LIBCDMS )
-    message("[INFO] [libcdms] Installing ${nm} from ${GIT_CMD_STR_LIBCDMS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libcdms] Installed ${nm} from tarball ${LIBCDMS_GZ}")
-    set(URL_STR URL ${LIBCDMS_URL}/${LIBCDMS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBCDMS_MD5})
-    set(GIT_CMD_STR_LIBCDMS )
-    set(GIT_TAG_LIBCDMS )
-endif()
-set(LIBCDMS_MAKE_ARGS -j1)
-set(LIBCDMS_BUILD_ARGS -fPIC)
-ExternalProject_Add(libcdms
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libcdms_source}
-  INSTALL_DIR ${libcdms_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBCDMS}
-  ${GIT_TAG_LIBCDMS}
-  PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS}  -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBCDMS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libcdms_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR)
-  unset(GIT_CMD_STR)
-endif()
-if (DEFINED GIT_CMD_STR_LIBCDMS)
-  unset(GIT_CMD_STR_LIBCDMS)
-endif()
diff --git a/CMake/cdat_modules/libcdms_pkg.cmake b/CMake/cdat_modules/libcdms_pkg.cmake
deleted file mode 100644
index eb7a722eb..000000000
--- a/CMake/cdat_modules/libcdms_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(LIBCDMS_VERSION 1.0.0)
-set(LIBCDMS_URL ${LLNL_URL})
-set(LIBCDMS_GZ libcdms-${LIBCDMS_VERSION}.tar.gz)
-set(LIBCDMS_SOURCE ${LIBCDMS_URL}/${LIBCDMS_GZ})
-set(LIBCDMS_MD5 ce71f54616f755d67fbbb6c81ca4fd62)
-set(LIBCDMS_BRANCH master)
-set(LIBCDMS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libcdms.git )
-
-set(GIT_CMD_STR_LIBCDMS GIT_REPOSITORY ${LIBCDMS_REPOSITORY})
-set(GIT_TAG_LIBCDMS GIT_TAG "${LIBCDMS_BRANCH}")
-
-add_cdat_package(libcdms "" "" OFF)
diff --git a/CMake/cdat_modules/libcf_deps.cmake b/CMake/cdat_modules/libcf_deps.cmake
deleted file mode 100644
index 5673f4b88..000000000
--- a/CMake/cdat_modules/libcf_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libcf_deps ${pkgconfig_pkg} ${python_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${uuid_pkg} ${clapack_pkg} ${lapack_pkg} )
diff --git a/CMake/cdat_modules/libcf_external.cmake b/CMake/cdat_modules/libcf_external.cmake
deleted file mode 100644
index 9eeca6839..000000000
--- a/CMake/cdat_modules/libcf_external.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-
-set(libcf_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcf")
-set(libcf_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake
-  @ONLY)
-  
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake
-  @ONLY)
-
-set(libcf_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake)
-set(libcf_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake)
-
-ExternalProject_Add(libcf
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libcf_source}
-  INSTALL_DIR ${libcf_install}
-  URL ${LIBCF_URL}/${LIBCF_GZ}
-  URL_MD5 ${LIBCF_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  PATCH_COMMAND ""
-  BUILD_COMMAND ${libcf_build_command}
-  INSTALL_COMMAND ${libcf_install_command}
-  DEPENDS ${libcf_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/libcf_pkg.cmake b/CMake/cdat_modules/libcf_pkg.cmake
deleted file mode 100644
index eed8c3497..000000000
--- a/CMake/cdat_modules/libcf_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(LIBCF_MAJOR 1)
-set(LIBCF_MINOR 0)
-set(LIBCF_PATCH beta11)
-set(LIBCF_VERSION ${LIBCF_MAJOR}.${LIBCF_MINOR}-${LIBCF_PATCH})
-set(LIBCF_URL ${LLNL_URL})
-set(LIBCF_GZ libcf-${LIBCF_VERSION}.tar.gz)
-set(LIBCF_MD5 aba4896eab79d36c7283fc7b75fb16ee)
-set(LIBCF_SOURCE ${LIBCF_URL}/${LIBCF_GZ})
-
-add_cdat_package_dependent(libcf "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/libdrs_deps.cmake b/CMake/cdat_modules/libdrs_deps.cmake
deleted file mode 100644
index 1958c1f35..000000000
--- a/CMake/cdat_modules/libdrs_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libdrs_deps ${netcdf_pkg} ${g2clib_pkg})
diff --git a/CMake/cdat_modules/libdrs_external.cmake b/CMake/cdat_modules/libdrs_external.cmake
deleted file mode 100644
index f1b289678..000000000
--- a/CMake/cdat_modules/libdrs_external.cmake
+++ /dev/null
@@ -1,49 +0,0 @@
-set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs")
-set(libdrs_install "${cdat_EXTERNALS}")
-
-if (APPLE)
-    set(libdrs_make_file libdrs_Makefile.Mac.gfortran.in)
-else ()
-    set(libdrs_make_file libdrs_Makefile.Linux.gfortran.in)
-endif ()
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrs_make_file}
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LIBDRS )
-    message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRS}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRS_GZ}")
-    set(URL_STR URL ${LIBDRS_URL}/${LIBDRS_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBDRS_MD5})
-    set(GIT_CMD_STR_LIBDRS )
-    set(GIT_TAG )
-endif()
-set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile)
-set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile^^install)
-set(LIBDRS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(libdrs
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libdrs_source}
-  INSTALL_DIR ${libdrs_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBDRS}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libdrs_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LIBDRS)
-  unset(GIT_CMD_STR_LIBDRS)
-endif()
diff --git a/CMake/cdat_modules/libdrs_pkg.cmake b/CMake/cdat_modules/libdrs_pkg.cmake
deleted file mode 100644
index 6258a08d7..000000000
--- a/CMake/cdat_modules/libdrs_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(LIBDRS_VERSION 1.0.0)
-set(LIBDRS_URL ${LLNL_URL})
-set(LIBDRS_BRANCH master)
-set(LIBDRS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git )
-
-set(GIT_CMD_STR_LIBDRS GIT_REPOSITORY ${LIBDRS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LIBDRS_BRANCH}")
-if (CDAT_BUILD_PCMDI)
-  set(CDAT_BUILD_LIBDRS ON)
-  add_cdat_package(libdrs "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/libdrsfortran_deps.cmake b/CMake/cdat_modules/libdrsfortran_deps.cmake
deleted file mode 100644
index c5db76f4b..000000000
--- a/CMake/cdat_modules/libdrsfortran_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libdrsfortran_deps ${netcdf_pkg} ${g2clib_pkg} ${libcdms_pkg} ${libdrs_pkg})
diff --git a/CMake/cdat_modules/libdrsfortran_external.cmake b/CMake/cdat_modules/libdrsfortran_external.cmake
deleted file mode 100644
index ba6d738a4..000000000
--- a/CMake/cdat_modules/libdrsfortran_external.cmake
+++ /dev/null
@@ -1,46 +0,0 @@
-set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs")
-set(libdrs_install "${cdat_EXTERNALS}")
-
-set(libdrsfortran_make_file libdrs_Makefile.Mac.fwrap.gfortran.in)
-
-configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrsfortran_make_file}
-    ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile
-    )
-
-if(DEFINED GIT_CMD_STR_LIBDRSFORTRAN )
-    message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRSFORTRAN}")
-    include(GetGitRevisionDescription)
-    set(URL_STR )
-    set(URL_MD5_STR )
-else()
-    message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRSFORTRAN_GZ}")
-    set(URL_STR URL ${LIBDRSFORTRAN_URL}/${LIBDRSFORTRAN_GZ})
-    set(URL_MD5_STR URL_MD5 ${LIBDRSFORTRAN_MD5})
-    set(GIT_CMD_STR_LIBDRS )
-    set(GIT_TAG )
-endif()
-
-set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile)
-set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile^^install)
-set(LIBDRS_BUILD_ARGS -fPIC)
-
-ExternalProject_Add(libdrsfortran
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libdrs_source}
-  INSTALL_DIR ${libdrs_install}
-  ${URL_STR}
-  ${URL_MD5_STR}
-  ${GIT_CMD_STR_LIBDRSFORTRAN}
-  ${GIT_TAG}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=<SOURCE_DIR>/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  DEPENDS ${libdrsfortran_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_LIBDRS)
-  unset(GIT_CMD_STR_LIBDRS)
-endif()
diff --git a/CMake/cdat_modules/libdrsfortran_pkg.cmake b/CMake/cdat_modules/libdrsfortran_pkg.cmake
deleted file mode 100644
index 23e8e34a4..000000000
--- a/CMake/cdat_modules/libdrsfortran_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(LIBDRSFORTRAN_VERSION 1.0.0)
-set(LIBDRSFORTRAN_URL ${LLNL_URL})
-set(LIBDRSFORTRAN_BRANCH master)
-set(LIBDRSFORTRAN_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git )
-
-set(GIT_CMD_STR_LIBDRSFORTRAN GIT_REPOSITORY ${LIBDRSFORTRAN_REPOSITORY})
-set(GIT_TAG GIT_TAG "${LIBDRSFORTRAN_BRANCH}")
-if (CDAT_BUILD_PCMDI)
-    if (APPLE)
-        set(CDAT_BUILD_LIBDRSFORTRAN ON)
-        add_cdat_package(libdrsfortran "" "" ON)
-    endif()
-endif()
diff --git a/CMake/cdat_modules/libxml2_deps.cmake b/CMake/cdat_modules/libxml2_deps.cmake
deleted file mode 100644
index cd79834e8..000000000
--- a/CMake/cdat_modules/libxml2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libXML2_deps ${pkgconfig_pkg} ${readline_pkg})
diff --git a/CMake/cdat_modules/libxml2_external.cmake b/CMake/cdat_modules/libxml2_external.cmake
deleted file mode 100644
index 59216b6b5..000000000
--- a/CMake/cdat_modules/libxml2_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-
-set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2")
-set(libXML2_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(libXML2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXML2_source}
-  INSTALL_DIR ${libXML2_install}
-  URL ${XML_URL}/${XML_GZ}
-  URL_MD5 ${XML_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${libXML2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/libxml2_pkg.cmake b/CMake/cdat_modules/libxml2_pkg.cmake
deleted file mode 100644
index fd2f57ad2..000000000
--- a/CMake/cdat_modules/libxml2_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(XML_MAJOR 2)
-set(XML_MINOR 7)
-set(XML_PATCH 8)
-set(XML_MAJOR_SRC 2)
-set(XML_MINOR_SRC 7)
-set(XML_PATCH_SRC 8)
-set(XML_URL ${LLNL_URL})
-set(XML_GZ libxml2-${XML_MAJOR_SRC}.${XML_MINOR_SRC}.${XML_PATCH_SRC}.tar.gz)
-set(XML_MD5 8127a65e8c3b08856093099b52599c86)
-
-set (nm XML)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LIBXML2_VERSION ${XML_VERSION})
-set(LIBXML2_SOURCE ${XML_URL}/${XML_GZ} )
-set(LIBXML2_MD5 ${XML_MD5})
-
-add_cdat_package(libXML2 "" "Bulid libxml2" OFF)
-
diff --git a/CMake/cdat_modules/libxslt_deps.cmake b/CMake/cdat_modules/libxslt_deps.cmake
deleted file mode 100644
index 31ab3ff7f..000000000
--- a/CMake/cdat_modules/libxslt_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(libXSLT_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg})
diff --git a/CMake/cdat_modules/libxslt_external.cmake b/CMake/cdat_modules/libxslt_external.cmake
deleted file mode 100644
index 2064cf209..000000000
--- a/CMake/cdat_modules/libxslt_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-
-set(libXSLT_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXSLT")
-set(libXSLT_install "${cdat_EXTERNALS}")
-
-if(NOT LIBXML2_FOUND)
-  set(libXSLT_configure_args --with-libxml-prefix=${libXSLT_install})
-endif()
-
-ExternalProject_Add(libXSLT
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXSLT_source}
-  INSTALL_DIR ${libXSLT_install}
-  URL ${XSLT_URL}/${XSLT_GZ}
-  URL_MD5 ${XSLT_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${libXSLT_configure_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${libXSLT_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/libxslt_pkg.cmake b/CMake/cdat_modules/libxslt_pkg.cmake
deleted file mode 100644
index d763d76ba..000000000
--- a/CMake/cdat_modules/libxslt_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(XSLT_MAJOR 1)
-set(XSLT_MINOR 1)
-set(XSLT_PATCH 22)
-set(XSLT_MAJOR_SRC 1)
-set(XSLT_MINOR_SRC 1)
-set(XSLT_PATCH_SRC 26)
-set(XSLT_URL ${LLNL_URL})
-set(XSLT_GZ libxslt-${XSLT_MAJOR_SRC}.${XSLT_MINOR_SRC}.${XSLT_PATCH_SRC}.tar.gz)
-set(XSLT_MD5 e61d0364a30146aaa3001296f853b2b9)
-
-set (nm XSLT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LIBXSLT_VERSION ${XSLT_VERSION})
-set(LIBXSLT_SOURCE ${XSLT_URL}/${XSLT_GZ})
-set(LIBXSLT_MD5 ${XSLT_MD5})
-
-add_cdat_package(libXSLT "" "Build xslt" OFF)
-
diff --git a/CMake/cdat_modules/lxml_deps.cmake b/CMake/cdat_modules/lxml_deps.cmake
deleted file mode 100644
index 52670d8f9..000000000
--- a/CMake/cdat_modules/lxml_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(lxml_deps ${cython_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/lxml_external.cmake b/CMake/cdat_modules/lxml_external.cmake
deleted file mode 100644
index 3b8a91e15..000000000
--- a/CMake/cdat_modules/lxml_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-# create an external project to install lxml,
-# and configure and build it
-set(LXML_SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/lxml)
-set(LXML_BINARY_DIR ${LXML_SOURCE_DIR})
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_build_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake @ONLY)
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake @ONLY)
-
-set(LXML_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake)
-set(LXML_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake)
-
-ExternalProject_Add(lxml
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${LXML_SOURCE_DIR}
-  URL ${LXML_URL}/${LXML_GZ}
-  URL_MD5 ${LXML_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${LXML_BUILD_COMMAND}
-  INSTALL_COMMAND ${LXML_INSTALL_COMMAND}
- # INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${lxml_deps}
-  ${ep_log_options}
- )
diff --git a/CMake/cdat_modules/lxml_pkg.cmake b/CMake/cdat_modules/lxml_pkg.cmake
deleted file mode 100644
index df4fb236d..000000000
--- a/CMake/cdat_modules/lxml_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(LXML_MAJOR_SRC 2)
-set(LXML_MINOR_SRC 3)
-set(LXML_PATCH_SRC 5)
-set(LXML_URL ${LLNL_URL})
-set(LXML_GZ lxml-${LXML_MAJOR_SRC}.${LXML_MINOR_SRC}.${LXML_PATCH_SRC}.tar.gz)
-set(LXML_MD5 730bb63383528b65eaa099d64ce276cf)
-
-set (nm LXML)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(LXML_SOURCE ${LXML_URL}/${LXML_GZ})
-
-add_cdat_package_dependent(lxml "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/markupsafe_deps.cmake b/CMake/cdat_modules/markupsafe_deps.cmake
deleted file mode 100644
index 2b76bd653..000000000
--- a/CMake/cdat_modules/markupsafe_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(markupsafe_deps ${pip_pkg} ${pygments_pkg})
diff --git a/CMake/cdat_modules/markupsafe_external.cmake b/CMake/cdat_modules/markupsafe_external.cmake
deleted file mode 100644
index 9ea130d01..000000000
--- a/CMake/cdat_modules/markupsafe_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm markupsafe)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/markupsafe_pkg.cmake b/CMake/cdat_modules/markupsafe_pkg.cmake
deleted file mode 100644
index b4e664655..000000000
--- a/CMake/cdat_modules/markupsafe_pkg.cmake
+++ /dev/null
@@ -1,8 +0,0 @@
-set(markupsafe_MAJOR_SRC 0)
-set(markupsafe_MINOR_SRC 18)
-set(markupsafe_PATCH_SRC )
-set(MARKUPSAFE_VERSION ${markupsafe_MAJOR_SRC}.${markupsafe_MINOR_SRC})
-set(MARKUPSAFE_GZ MarkupSafe-${MARKUPSAFE_VERSION}.tar.gz)
-set(MARKUPSAFE_SOURCE ${LLNL_URL}/${MARKUPSAFE_GZ})
-set(MARKUPSAFE_MD5 f8d252fd05371e51dec2fe9a36890687)
-add_cdat_package_dependent(markupsafe "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/matplotlib_deps.cmake b/CMake/cdat_modules/matplotlib_deps.cmake
deleted file mode 100644
index 794a6a476..000000000
--- a/CMake/cdat_modules/matplotlib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Matplotlib_deps ${pyqt_pkg} ${freetype_pkg} ${cairo_pkg} ${numpy_pkg} ${png_pkg} ${six_pkg} ${dateutils_pkg} ${pyparsing_pkg} ${cycler_pkg})
diff --git a/CMake/cdat_modules/matplotlib_external.cmake b/CMake/cdat_modules/matplotlib_external.cmake
deleted file mode 100644
index 8cbbd53f6..000000000
--- a/CMake/cdat_modules/matplotlib_external.cmake
+++ /dev/null
@@ -1,38 +0,0 @@
-# Matplotlib
-#
-set(matplotlib_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/Matplotlib")
-
-if(CDAT_BUILD_GUI)
-  set(MATPLOTLIB_BACKEND "Qt4Agg")
-else()
-  set(MATPLOTLIB_BACKEND "Agg")
-endif()
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_patch_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake"
-  @ONLY
-)
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake"
-  @ONLY
-)
-
-set(matplotlib_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake)
-set(matplotlib_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake)
-
-ExternalProject_Add(Matplotlib
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${matplotlib_source_dir}
-  URL ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ}
-  URL_MD5 ${MATPLOTLIB_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  PATCH_COMMAND ${matplotlib_patch_command}
-  BUILD_COMMAND ${matplotlib_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${Matplotlib_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/matplotlib_pkg.cmake b/CMake/cdat_modules/matplotlib_pkg.cmake
deleted file mode 100644
index 365a67c93..000000000
--- a/CMake/cdat_modules/matplotlib_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(MATPLOTLIB_MAJOR_MIN 1)
-set(MATPLOTLIB_MINOR_MIN 1)
-set(MATPLOTLIB_PATCH_MIN 0)
-set(MATPLOTLIB_MAJOR 1)
-set(MATPLOTLIB_MINOR 5)
-set(MATPLOTLIB_PATCH 1)
-set(MATPLOTLIB_VERSION ${MATPLOTLIB_MAJOR}.${MATPLOTLIB_MINOR}.${MATPLOTLIB_PATCH})
-set(MATPLOTLIB_URL ${LLNL_URL})
-set(MATPLOTLIB_GZ matplotlib-${MATPLOTLIB_VERSION}.tar.gz)
-set(MATPLOTLIB_MD5 b22dc4962f36aab919a7125b3b35953b)
-
-set(nm MATPLOTLIB)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(MATPLOTLIB_SOURCE ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ})
-
-add_cdat_package_dependent(Matplotlib "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/mccabe_deps.cmake b/CMake/cdat_modules/mccabe_deps.cmake
deleted file mode 100644
index 1d322a353..000000000
--- a/CMake/cdat_modules/mccabe_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(mccabe_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/mccabe_external.cmake b/CMake/cdat_modules/mccabe_external.cmake
deleted file mode 100644
index 79e6561e5..000000000
--- a/CMake/cdat_modules/mccabe_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-ExternalProject_Add(mccabe
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/mccabe"
-  URL "${MCCABE_SOURCE}"
-  URL_MD5 ${MCCABE_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${mccabe_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/mccabe_pkg.cmake b/CMake/cdat_modules/mccabe_pkg.cmake
deleted file mode 100644
index e2e3795a4..000000000
--- a/CMake/cdat_modules/mccabe_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm mccabe)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 0)
-set(${uc_nm}_MINOR 3)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 9a1570c470ff5db678cc0c03d5c0c237 )
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/md5_deps.cmake b/CMake/cdat_modules/md5_deps.cmake
deleted file mode 100644
index 3ba1ef597..000000000
--- a/CMake/cdat_modules/md5_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MD5_deps)
diff --git a/CMake/cdat_modules/md5_external.cmake b/CMake/cdat_modules/md5_external.cmake
deleted file mode 100644
index e69de29bb..000000000
diff --git a/CMake/cdat_modules/md5_pkg.cmake b/CMake/cdat_modules/md5_pkg.cmake
deleted file mode 100644
index a4ac90144..000000000
--- a/CMake/cdat_modules/md5_pkg.cmake
+++ /dev/null
@@ -1,3 +0,0 @@
-find_package(MD5)
-set(MD5PRG ${MD5_EXECUTABLE})
-set(MD5CNT 1)
diff --git a/CMake/cdat_modules/mpi4py_deps.cmake b/CMake/cdat_modules/mpi4py_deps.cmake
deleted file mode 100644
index cbba65f4c..000000000
--- a/CMake/cdat_modules/mpi4py_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Mpi4py_deps ${numpy_pkg} ${mpi_pkg})
diff --git a/CMake/cdat_modules/mpi4py_external.cmake b/CMake/cdat_modules/mpi4py_external.cmake
deleted file mode 100644
index 4c1484d29..000000000
--- a/CMake/cdat_modules/mpi4py_external.cmake
+++ /dev/null
@@ -1,50 +0,0 @@
-# The Mpi4py project 
-
-set(mpi4py_binary "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py")
-
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake @ONLY)
-
-set(mpi4py_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake)
-set(mpi4py_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake)
-
-set(Mpi4py_source "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py")
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(Mpi4py
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Mpi4py_source}
-  URL ${MPI4PY_URL}/${MPI4PY_GZ}
-  URL_MD5 ${MPI4PY_MD5}
-  BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/mpi4py
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${mpi4py_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${mpi4py_INSTALL_COMMAND}
-  DEPENDS 
-    ${Mpi4py_deps}
-  ${ep_log_options}
-  )
-
-# Mpi4py
-#
-
-#ExternalProject_Add(Mpi4py
-#  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-#  SOURCE_DIR ${Mpi4py_source}
-#  URL ${MPI4PY_URL}/${MPI4PY_GZ}
-#  URL_MD5 ${MPI4PY_MD5}
-#  BUILD_IN_SOURCE 1
-#  CONFIGURE_COMMAND ""
-#  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-#  DEPENDS ${Mpi4py_deps}
-#  ${ep_log_options}
-#  )
diff --git a/CMake/cdat_modules/mpi4py_pkg.cmake b/CMake/cdat_modules/mpi4py_pkg.cmake
deleted file mode 100644
index e87d6be26..000000000
--- a/CMake/cdat_modules/mpi4py_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(MPI4PY_MAJOR 1)
-set(MPI4PY_MINOR 3)
-set(MPI4PY_VERSION ${MPI4PY_MAJOR}.${MPI4PY_MINOR})
-set(MPI4PY_URL http://uv-cdat.llnl.gov/cdat/resources)
-set(MPI4PY_GZ mpi4py-${MPI4PY_VERSION}.tar.gz)
-set(MPI4PY_MD5 978472a1a71f3142c866c9463dec7103)
-set(MPI4PY_SOURCE ${MPI4PY_URL}/${MPI4PY_GZ})
-
-add_cdat_package(Mpi4py "" "Bulid Mpi4py" OFF)
-if (CDAT_BUILD_PARALLEL)
-    set_property(CACHE CDAT_BUILD_MPI4PY PROPERTY VALUE ON)
-endif()
-
diff --git a/CMake/cdat_modules/mpi_deps.cmake b/CMake/cdat_modules/mpi_deps.cmake
deleted file mode 100644
index e134e5d1f..000000000
--- a/CMake/cdat_modules/mpi_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MPI_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/mpi_external.cmake b/CMake/cdat_modules/mpi_external.cmake
deleted file mode 100644
index 8fbe6a66f..000000000
--- a/CMake/cdat_modules/mpi_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(MPI_source "${CMAKE_CURRENT_BINARY_DIR}/build/MPI")
-set(MPI_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(MPI
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${MPI_source}
-  INSTALL_DIR ${MPI_install}
-  URL ${MPI_URL}/${MPI_GZ}
-  URL_MD5 ${MPI_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND <SOURCE_DIR>/configure --prefix=<INSTALL_DIR> --disable-vt
-  DEPENDS ${MPI_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/mpi_pkg.cmake b/CMake/cdat_modules/mpi_pkg.cmake
deleted file mode 100644
index c3397cd0c..000000000
--- a/CMake/cdat_modules/mpi_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(MPI_MAJOR 1)
-set(MPI_MINOR 6)
-set(MPI_PATCH 4)
-set(MPI_URL ${LLNL_URL})
-set(MPI_GZ openmpi-${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}.tar.gz)
-set(MPI_MD5 70aa9b6271d904c6b337ca326e6613d1)
-set(MPI_SOURCE ${MPI_URL}/${MPI_GZ})
-set(MPI_VERSION ${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH})
-
-add_cdat_package(MPI "" "Bulid MPI" OFF)
-
-if (CDAT_BUILD_PARALLEL)
-    set_property(CACHE CDAT_BUILD_MPI PROPERTY VALUE ON)
-endif()
diff --git a/CMake/cdat_modules/myproxyclient_deps.cmake b/CMake/cdat_modules/myproxyclient_deps.cmake
deleted file mode 100644
index a94e7aba7..000000000
--- a/CMake/cdat_modules/myproxyclient_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(MyProxyClient_deps  ${cryptography_pkg} ${pyopenssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/myproxyclient_external.cmake b/CMake/cdat_modules/myproxyclient_external.cmake
deleted file mode 100644
index eae57a9c7..000000000
--- a/CMake/cdat_modules/myproxyclient_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm MyProxyClient)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/myproxyclient_pkg.cmake b/CMake/cdat_modules/myproxyclient_pkg.cmake
deleted file mode 100644
index 036b1bd0a..000000000
--- a/CMake/cdat_modules/myproxyclient_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(MYPROXYCLIENT_MAJOR_SRC 1)
-set(MYPROXYCLIENT_MINOR_SRC 3)
-set(MYPROXYCLIENT_PATCH_SRC 0)
-
-set (nm MYPROXYCLIENT)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(MYPROXYCLIENT_GZ MyProxyClient-${MYPROXYCLIENT_VERSION}.tar.gz)
-set(MYPROXYCLIENT_SOURCE ${LLNL_URL}/${MYPROXYCLIENT_GZ})
-set(MYPROXYCLIENT_MD5 829a299157f91f8ff8a6e5bc8ec1c09c )
-
-add_cdat_package_dependent(MyProxyClient "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/netcdf_deps.cmake b/CMake/cdat_modules/netcdf_deps.cmake
deleted file mode 100644
index c8da9fa7b..000000000
--- a/CMake/cdat_modules/netcdf_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(NetCDF_deps ${pkgconfig_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jpeg_pkg} )
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND NetCDF_deps ${mpi_pkg} )
-endif()
diff --git a/CMake/cdat_modules/netcdf_external.cmake b/CMake/cdat_modules/netcdf_external.cmake
deleted file mode 100644
index 3135cff49..000000000
--- a/CMake/cdat_modules/netcdf_external.cmake
+++ /dev/null
@@ -1,31 +0,0 @@
-set(netcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf")
-set(netcdf_install "${cdat_EXTERNALS}")
-set(netcdf_configure_args "--enable-netcdf-4")
-if (CDAT_BUILD_PARALLEL)
-  set(configure_file "cdatmpi_configure_step.cmake")
-else()
-  set(configure_file "cdat_configure_step.cmake")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/netcdf_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake
-  @ONLY)
-  
-set(netcdf_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake)
-
-ExternalProject_Add(NetCDF
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${netcdf_source}
-  INSTALL_DIR ${netcdf_install}
-  URL ${NC4_URL}/${NC4_GZ}
-  URL_MD5 ${NC4_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${netcdf_PATCH_COMMAND}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${netcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${NetCDF_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/netcdf_pkg.cmake b/CMake/cdat_modules/netcdf_pkg.cmake
deleted file mode 100644
index 9ea111ad8..000000000
--- a/CMake/cdat_modules/netcdf_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(NC4_MAJOR_SRC 4)
-set(NC4_MINOR_SRC 3)
-set(NC4_PATCH_SRC 3.1)
-set(NC4_URL ${LLNL_URL})
-set(NC4_GZ netcdf-${NC4_MAJOR_SRC}.${NC4_MINOR_SRC}.${NC4_PATCH_SRC}.tar.gz)
-set(NC4_MD5 5c9dad3705a3408d27f696e5b31fb88c )
-
-set (nm NC4)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(NETCDF_VERSION ${NC4_VERSION})
-set(NETCDF_SOURCE ${NC4_URL}/${NC4_GZ})
-set(NETCDF_MD5 ${NC4_MD5})
-
-add_cdat_package(NetCDF "" "" ON)
diff --git a/CMake/cdat_modules/netcdfplus_deps.cmake b/CMake/cdat_modules/netcdfplus_deps.cmake
deleted file mode 100644
index 7efe4f6ce..000000000
--- a/CMake/cdat_modules/netcdfplus_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(NetCDFPLUS_deps ${netcdf_pkg})
diff --git a/CMake/cdat_modules/netcdfplus_external.cmake b/CMake/cdat_modules/netcdfplus_external.cmake
deleted file mode 100644
index 130b82298..000000000
--- a/CMake/cdat_modules/netcdfplus_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(netcdfplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf-c++")
-set(netcdfplus_install "${cdat_EXTERNALS}")
-set(netcdfplus_configure_args "")
-
-ExternalProject_Add(NetCDFPLUS
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${netcdfplus_source}
-  INSTALL_DIR ${netcdfplus_install}
-  URL ${NC4PLUS_URL}/${NC4PLUS_GZ}
-  URL_MD5 ${NC4PLUS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${NetCDFPLUS_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/netcdfplus_pkg.cmake b/CMake/cdat_modules/netcdfplus_pkg.cmake
deleted file mode 100644
index cec5f82ec..000000000
--- a/CMake/cdat_modules/netcdfplus_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(NC4PLUS_MAJOR_SRC 4)
-set(NC4PLUS_MINOR_SRC 2)
-set(NC4PLUS_PATCH_SRC 1.1)
-set(NC4PLUS_URL ${LLNL_URL})
-set(NC4PLUS_GZ netcdf-cxx-${NC4PLUS_MAJOR_SRC}.${NC4PLUS_MINOR_SRC}.tar.gz)
-set(NC4PLUS_MD5 0b09655cf977d768ced6c0d327dde176)
-
-set (nm NC4PLUS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(NETCDFPLUS_VERSION ${NC4PLUS_VERSION})
-set(NETCDFPLUS_SOURCE ${NC4PLUS_URL}/${NC4PLUS_GZ})
-set(NETCDFPLUS_MD5 ${NC4PLUS_MD5})
-
-add_cdat_package_dependent(NetCDFPLUS "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/numexpr_deps.cmake b/CMake/cdat_modules/numexpr_deps.cmake
deleted file mode 100644
index 5ba77a20d..000000000
--- a/CMake/cdat_modules/numexpr_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Numexpr_deps ${pkgconfig_pkg} ${numpy_pkg} ${myproxyclient_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/numexpr_external.cmake b/CMake/cdat_modules/numexpr_external.cmake
deleted file mode 100644
index a87913eb3..000000000
--- a/CMake/cdat_modules/numexpr_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Numexpr)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/numexpr_pkg.cmake b/CMake/cdat_modules/numexpr_pkg.cmake
deleted file mode 100644
index f8b18ab5c..000000000
--- a/CMake/cdat_modules/numexpr_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(NUMEXPR_MAJOR 2)
-set(NUMEXPR_MINOR 2)
-set(NUMEXPR_PATCH 2)
-#set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR})
-set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}.${NUMEXPR_PATCH})
-# Following not needed any longer using easy_install
-set(NUMEXPR_URL ${LLNL_URL})
-set(NUMEXPR_GZ numexpr-${NUMEXPR_VERSION}.tar.gz)
-set(NUMEXPR_MD5 18103954044b3039c0a74a6006c8e0a7)
-set(NUMEXPR_SOURCE ${NUMEXPR_URL}/${NUMEXPR_GZ})
-
-add_cdat_package(Numexpr "" "" OFF)
diff --git a/CMake/cdat_modules/numpy_deps.cmake b/CMake/cdat_modules/numpy_deps.cmake
deleted file mode 100644
index 551192596..000000000
--- a/CMake/cdat_modules/numpy_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(NUMPY_deps ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg})
diff --git a/CMake/cdat_modules/numpy_external.cmake b/CMake/cdat_modules/numpy_external.cmake
deleted file mode 100644
index 1e4b31349..000000000
--- a/CMake/cdat_modules/numpy_external.cmake
+++ /dev/null
@@ -1,45 +0,0 @@
-# The Numpy external project 
-
-set(NUMPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/NUMPY")
-
-# to configure numpy we run a cmake -P script
-# the script will create a site.cfg file
-# then run python setup.py config to verify setup
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake @ONLY
-)
-
-# to build numpy we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake @ONLY
-)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake @ONLY
-)
-
-set(NUMPY_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake)
-set(NUMPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake)
-set(NUMPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake)
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(NUMPY
-  URL ${NUMPY_URL}/${NUMPY_GZ}
-  URL_MD5 ${NUMPY_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${NUMPY_binary}
-  BINARY_DIR ${NUMPY_binary}
-  CONFIGURE_COMMAND ${NUMPY_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${NUMPY_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${NUMPY_INSTALL_COMMAND}
-  DEPENDS ${NUMPY_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/numpy_pkg.cmake b/CMake/cdat_modules/numpy_pkg.cmake
deleted file mode 100644
index bd67f5633..000000000
--- a/CMake/cdat_modules/numpy_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(NUMPY_MAJOR 1)
-set(NUMPY_MINOR 9)
-set(NUMPY_PATCH 0)
-set(NUMPY_MAJOR_SRC 1)
-set(NUMPY_MINOR_SRC 9)
-set(NUMPY_PATCH_SRC 0)
-set(NUMPY_URL ${LLNL_URL})
-set(NUMPY_GZ numpy-${NUMPY_MAJOR_SRC}.${NUMPY_MINOR_SRC}.${NUMPY_PATCH_SRC}.tar.gz)
-set(NUMPY_MD5 a93dfc447f3ef749b31447084839930b)
-set(NUMPY_SOURCE ${NUMPY_URL}/${NUMPY_GZ})
-
-set (nm NUMPY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(NUMPY "" "" ON)
-set(NUMPY ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg})
diff --git a/CMake/cdat_modules/ocgis_deps.cmake b/CMake/cdat_modules/ocgis_deps.cmake
deleted file mode 100644
index 4968421a1..000000000
--- a/CMake/cdat_modules/ocgis_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ocgis_deps ${shapely_pkg} ${gdal_pkg} ${fiona_pkg} ${pynetcdf4_pkg})
diff --git a/CMake/cdat_modules/ocgis_external.cmake b/CMake/cdat_modules/ocgis_external.cmake
deleted file mode 100644
index db51295ba..000000000
--- a/CMake/cdat_modules/ocgis_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(ocgis_source "${CMAKE_CURRENT_BINARY_DIR}/build/ocgis")
-set(ocgis_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(ocgis
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ocgis_source}
-  INSTALL_DIR ${ocgis_install}
-  BUILD_IN_SOURCE 1
-  ${GIT_CMD_STR_OCGIS}
-  ${GIT_TAG}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${ocgis_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_OCGIS)
-  unset(GIT_CMD_STR_OCGIS)
-endif()
diff --git a/CMake/cdat_modules/ocgis_pkg.cmake b/CMake/cdat_modules/ocgis_pkg.cmake
deleted file mode 100644
index ad6d852ff..000000000
--- a/CMake/cdat_modules/ocgis_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(OCGIS_SOURCE ${OCGIS_URL}/${OCGIS_GZ})
-set(OCGIS_BRANCH next)
-set(OCGIS_REPOSITORY ${GIT_PROTOCOL}github.com/NCPP/ocgis.git )
-
-set(GIT_CMD_STR_OCGIS GIT_REPOSITORY ${OCGIS_REPOSITORY})
-set(GIT_TAG GIT_TAG "${OCGIS_BRANCH}")
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(ocgis "" "" ON)
-else()
-  add_cdat_package(ocgis "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/openssl_deps.cmake b/CMake/cdat_modules/openssl_deps.cmake
deleted file mode 100644
index 22b675b47..000000000
--- a/CMake/cdat_modules/openssl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(OPENSSL_deps )
diff --git a/CMake/cdat_modules/openssl_external.cmake b/CMake/cdat_modules/openssl_external.cmake
deleted file mode 100644
index 752d3395c..000000000
--- a/CMake/cdat_modules/openssl_external.cmake
+++ /dev/null
@@ -1,37 +0,0 @@
-set (OPENSSL_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/openssl")
-set (OPENSSL_INSTALL_DIR "${cdat_EXTERNALS}")
-
-execute_process (COMMAND uname -s COMMAND tr -d '\n'
-                 OUTPUT_VARIABLE HOST)
-STRING (TOLOWER ${HOST} HOST)
-execute_process (COMMAND uname -m COMMAND tr -d '\n'
-                 OUTPUT_VARIABLE ARCHITECTURE)
-
-get_filename_component (COMPILER "${CMAKE_C_COMPILER}" NAME_WE)
-
-if (APPLE)
-  if (ARCHITECTURE MATCHES "64$")
-    set (HOST "${HOST}64")
-  endif ()
-  set (COMPILER "cc")
-endif ()
-
-set (OPENSSL_CONF_ARGS "${HOST}-${ARCHITECTURE}-${COMPILER}")
-set (OPENSSL_CONF_ARGS
-    ${OPENSSL_CONF_ARGS}
-    "--prefix=${OPENSSL_INSTALL_DIR}")
-
-ExternalProject_Add (openssl
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${OPENSSL_SOURCE_DIR}
-  INSTALL_DIR ${OPENSSL_INSTALL_DIR}
-  URL ${OPENSSL_SOURCE_URL}
-  URL_MD5 ${OPENSSL_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${OPENSSL_SOURCE_DIR}/Configure ${OPENSSL_CONF_ARGS}
-  DEPENDS ${OPENSSL_DEPS}
-  ${ep_log_options}
-  )
-
-set (OPENSSL_INCLUDE_DIR "${OPENSSL_INSTALL_DIR}/include")
-set (OPENSSL_LIBRARY_DIR "${OPENSSL_INSTALL_DIR}/lib")
diff --git a/CMake/cdat_modules/openssl_pkg.cmake b/CMake/cdat_modules/openssl_pkg.cmake
deleted file mode 100644
index 440d0f532..000000000
--- a/CMake/cdat_modules/openssl_pkg.cmake
+++ /dev/null
@@ -1,37 +0,0 @@
-option(CDAT_USE_SYSTEM_OPENSSL "Use system OpenSSL, if found." ON)
-mark_as_advanced(CDAT_USE_SYSTEM_OPENSSL)
-if(CDAT_USE_SYSTEM_OPENSSL)
-  find_package(OpenSSL QUIET)
-  if(OPENSSL_FOUND)
-    set(FILENAME_PATH_ARG "DIRECTORY")
-    if(CMAKE_VERSION VERSION_LESS 2.8.12)
-      # Support older version of GET_FILENAME_COMPONENT macro
-      # with legacy PATH argument
-      set(FILENAME_PATH_ARG "PATH")
-    endif(CMAKE_VERSION VERSION_LESS 2.8.12)
-    get_filename_component(OPENSSL_LIBRARY_DIR
-      "${OPENSSL_SSL_LIBRARY}" ${FILENAME_PATH_ARG})
-    message(STATUS "System OpenSSL found. "
-      "OpenSSL library directory: ${OPENSSL_LIBRARY_DIR}. "
-      "OpenSSL Version: ${OPENSSL_VERSION}")
-  endif(OPENSSL_FOUND)
-endif(CDAT_USE_SYSTEM_OPENSSL)
-
-if(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND)
-  set(OPENSSL_MAJOR_SRC 1)
-  set(OPENSSL_MINOR_SRC 0)
-  set(OPENSSL_PATCH_SRC 2e)
-  set(OPENSSL_VERSION
-    ${OPENSSL_MAJOR_SRC}.${OPENSSL_MINOR_SRC}.${OPENSSL_PATCH_SRC})
-  
-  message(STATUS "Compiling OpenSSL from source. Version: ${OPENSSL_VERSION}")
-
-  set(OPENSSL_URL ${LLNL_URL})
-  set(OPENSSL_GZ "openssl-${OPENSSL_VERSION}.tar.gz")
-  set(OPENSSL_MD5 5262bfa25b60ed9de9f28d5d52d77fc5)
-  set(OPENSSL_SOURCE_URL ${OPENSSL_URL}/${OPENSSL_GZ})
-  
-  # We've reached here because we need OpenSSL.
-  # Hence, defaulting to ON
-  add_cdat_package(openssl "" "" ON)
-endif(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND)
diff --git a/CMake/cdat_modules/osmesa_deps.cmake b/CMake/cdat_modules/osmesa_deps.cmake
deleted file mode 100644
index 2ee8b1857..000000000
--- a/CMake/cdat_modules/osmesa_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(osmesa_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/osmesa_external.cmake b/CMake/cdat_modules/osmesa_external.cmake
deleted file mode 100644
index 23f4870a1..000000000
--- a/CMake/cdat_modules/osmesa_external.cmake
+++ /dev/null
@@ -1,26 +0,0 @@
-set(osmesa_source "${CMAKE_CURRENT_BINARY_DIR}/build/osmesa")
-set(osmesa_install "${cdat_EXTERNALS}")
-
-set(osmesa_conf_args "--with-driver=osmesa")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium-intel")
-set(osmesa_conf_args "${osmesa_conf_args}^^--disable-egl")
-
-ExternalProject_Add(OSMesa
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${osmesa_source}
-  INSTALL_DIR ${osmesa_install}
-  URL ${OSMESA_URL}/${OSMESA_GZ}
-  URL_MD5 ${OSMESA_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND
-    "${CMAKE_COMMAND}"
-      "-DCONFIGURE_ARGS=${osmesa_conf_args}"
-      "-DINSTALL_DIR=<INSTALL_DIR>"
-      "-DWORKING_DIR=<SOURCE_DIR>"
-      -P "${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake"
-  DEPENDS ${osmesa_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/osmesa_pkg.cmake b/CMake/cdat_modules/osmesa_pkg.cmake
deleted file mode 100644
index 1080dfb35..000000000
--- a/CMake/cdat_modules/osmesa_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(package OSMesa)
-string(TOUPPER ${package} package_uc)
-
-# We're using an older mesa (7.6.1) as it is known to work well in many
-# supercomputing environments.
-set(${package_uc}_MAJOR_SRC 7)
-set(${package_uc}_MINOR_SRC 6)
-set(${package_uc}_PATCH_SRC 1)
-set(${package_uc}_VERSION "${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}")
-set(${package_uc}_URL ${LLNL_URL})
-set(${package_uc}_GZ "MesaLib-${${package_uc}_VERSION}.tar.gz")
-set(${package_uc}_MD5 e80fabad2e3eb7990adae773d6aeacba)
-set(${package_uc}_SOURCE "${${package_uc}_URL}/${${package_uc}_GZ}")
-
-add_cdat_package(${package} "7.6.1" "" OFF)
diff --git a/CMake/cdat_modules/paraview_deps.cmake b/CMake/cdat_modules/paraview_deps.cmake
deleted file mode 100644
index 6868b8da7..000000000
--- a/CMake/cdat_modules/paraview_deps.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(ParaView_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${png_pkg} ${jpeg_pkg} ${libxml2_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${zlib_pkg})
-
-if (NOT CDAT_BUILD_GUI)
-  list(APPEND ParaView_deps ${qt_pkg})
-endif()
-
-if(CDAT_BUILD_PARALLEL)
-  list(APPEND ParaView_deps "${mpi_pkg}")
-endif()
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND ParaView_deps "${ffmpeg_pkg}")
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND ParaView_deps "${osmesa_pkg}")
-endif()
diff --git a/CMake/cdat_modules/paraview_external.cmake b/CMake/cdat_modules/paraview_external.cmake
deleted file mode 100644
index 5c20dbc2a..000000000
--- a/CMake/cdat_modules/paraview_external.cmake
+++ /dev/null
@@ -1,262 +0,0 @@
-set(ParaView_source "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView")
-set(ParaView_binary "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView-build")
-set(ParaView_install "${cdat_EXTERNALS}")
-
-if(QT_QMAKE_EXECUTABLE)
-  get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH)
-  get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH)
-endif()
-
-if(APPLE)
-  set(MACOSX_APP_INSTALL_PREFIX "${SB_EXTERNALS_DIR}") 
-endif()
-
-# Initialize
-set(ParaView_tpl_args)
-
-# VCS needs projections from GeoVis
-list(APPEND ParaView_tpl_args
-  -DModule_vtkViewsGeovis:BOOL=ON
-)
-list(APPEND ParaView_tpl_args
-  -DModule_vtklibproj4:BOOL=ON
-)
-
-# We would like to see CGM as well
-list(APPEND ParaView_tpl_args
-  -DModule_vtkIOCGM:BOOL=ON
-  )
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND ParaView_tpl_args -DPARAVIEW_ENABLE_FFMPEG:BOOL=ON)
-endif()
-
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_USE_MPI:BOOL=ON)
-  # Mac has issues with MPI4PY of ParaView. Also I don't know if we really need to build it
-  # See this bug: paraview.org/bug/view.php?id=13587
-  list(APPEND ParaView_tpl_args -DENABLE_MPI4PY:BOOL=OFF)
-
-  if(CDAT_BUILD_MPI)
-    if(UNIX)
-      set(ENV{LD_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{LD_LIBRARY_PATH}")
-    elseif(APPLE)
-      set(ENV{DYLD_FALLBACK_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{DYLD_FALLBACK_LIBRARY_PATH}")
-    endif()
-    list(APPEND ParaView_tpl_args
-      -DMPIEXEC:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec
-      -DMPI_CXX_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicxx
-      -DMPI_C_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicc
-      -DMPI_C_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include
-      -DMPI_CXX_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include
-      -DMACOSX_APP_INSTALL_PREFIX:PATH=${MACOSX_APP_INSTALL_PREFIX}
-      -DVTK_MPIRUN_EXE:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec)
-  endif()
-endif()
-
-set(_vtk_modules "vtkRenderingImage;vtkRenderingVolume;vtkRenderingLabel;vtkRenderingFreeType;vtkRenderingFreeTypeOpenGL;vtkRenderingVolumeOpenGL;vtkRenderingCore;vtkRenderingOpenGL;vtkGeovisCore;vtkViewsCore;vtkViewsGeovis;vtkInteractionImage;vtkInteractionStyle;vtkInteractionWidgets;vtkCommonTransforms;vtkCommonCore;vtkCommonComputationalGeometry;vtkCommonExecutionModel;vtkCommonSystem;vtkCommonMisc;vtkFiltersFlowPaths;vtkFiltersStatistics;vtkFiltersAMR;vtkFiltersGeneric;vtkFiltersSources;vtkFiltersModeling;vtkFiltersExtraction;vtkFiltersSelection;vtkFiltersSMP;vtkFiltersCore;vtkFiltersHybrid;vtkFiltersTexture;vtkFiltersGeneral;vtkFiltersImaging;vtkFiltersGeometry;vtkIOImage;vtkIOCore;vtkIOExport;vtkIOImport;vtkIOGeometry;vtkImagingColor;vtkImagingSources;vtkImagingCore;vtkImagingGeneral;vtkImagingMath")
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND _vtk_modules "vtkIOFFMPEG")
-endif()
-# Either we use cdat zlib and libxml or system zlib and libxml
-list(APPEND ParaView_tpl_args
-  -DVTK_USE_SYSTEM_ZLIB:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON
-  -DVTK_USE_SYSTEM_HDF5:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-)
-
-# Turn off testing and other non essential featues
-list(APPEND ParaView_tpl_args
-  -DBUILD_TESTING:BOOL=OFF
-  -DPARAVIEW_BUILD_PLUGIN_MobileRemoteControl:BOOL=OFF
-  -DPQWIDGETS_DISABLE_QTWEBKIT:BOOL=ON
-  -DModule_vtkIOGeoJSON:BOOL=ON
-  -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS}
-)
-
-# Use cdat zlib
-#if(NOT CDAT_USE_SYSTEM_ZLIB)
-#  list(APPEND ParaView_tpl_args
-#    -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-#    -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#  )
-#endif()
-
-# Use cdat libxml
-#if(NOT CDAT_USE_SYSTEM_LIBXML2)
-#  list(APPEND ParaView_tpl_args
-#    -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2
-#    -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX}
-#    -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint
-#  )
-#endif()
-
-# Use cdat hdf5
-if(NOT CDAT_USE_SYSTEM_HDF5)
-  list(APPEND ParaView_tpl_args
-    -DHDF5_DIR:PATH=${cdat_EXTERNALS}/
-    -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-  )
-
-#  if(NOT CDAT_USE_SYSTEM_ZLIB)
-#    list(APPEND ParaView_tpl_args
-#      -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#      -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#    )
-#  endif()
-endif()
-
-# Check if should build GUI
-if(CDAT_BUILD_GUI)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_BUILD_QT_GUI:BOOL=ON
-    -DVTK_QT_USE_WEBKIT:BOOL=OFF
-    -DQT_QMAKE_EXECUTABLE:FILEPATH=${QT_QMAKE_EXECUTABLE}
-    -DQT_QTUITOOLS_INCLUDE_DIR:PATH=${QT_ROOT}/include/QtUiTools
-    -DQT_BINARY_DIR:FILEPATH=${QT_BINARY_DIR})
-else()
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_BUILD_QT_GUI:BOOL=OFF)
-endif()
-
-# Check if using R then only enable R support
-if (CDAT_BUILD_R OR CDAT_USE_SYSTEM_R)
-  list(APPEND ParaView_tpl_args
-    -DPARAVIEW_USE_GNU_R:BOOL=ON
-    -DR_COMMAND:PATH=${R_install}/bin/R
-    -DR_DIR:PATH=${R_install}/lib/R
-    -DR_INCLUDE_DIR:PATH=${R_install}/lib/R/include
-    -DR_LIBRARY_BASE:PATH=${R_install}/lib/R/lib/libR${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_BLAS:PATH=${R_install}/lib/R/lib/libRblas${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_LAPACK:PATH=${R_install}/lib/R/lib/libRlapack${_LINK_LIBRARY_SUFFIX}
-    -DR_LIBRARY_READLINE:PATH=)
-endif()
-
-if(UVCDAT_TESTDATA_LOCATION)
-  list(APPEND ParaView_tpl_args
-    -DUVCDAT_TestData:PATH=${UVCDAT_TESTDATA_LOCATION})
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND ParaView_tpl_args
-    "-DVTK_USE_X:BOOL=OFF"
-    "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON"
-    "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-    "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}"
-    "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-  )
-endif()
-
-include(GetGitRevisionDescription)
-set(paraview_branch ${PARAVIEW_BRANCH})
-
-get_git_head_revision(refspec sha)
-#if("${refspec}" STREQUAL "refs/heads/devel-master")
-#  set(paraview_branch uvcdat-next)
-#endif()
-
-string(REPLACE "//" "" GIT_PROTOCOL_PREFIX ${GIT_PROTOCOL})
-
-if (${GIT_PROTOCOL} STREQUAL "git://")
-  set(REPLACE_GIT_PROTOCOL_PREFIX "http:")
-else()
-  set(REPLACE_GIT_PROTOCOL_PREFIX "git:")
-endif()
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_download.sh.in
-  ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh @ONLY
-  )
-
-if (NOT OFFLINE_BUILD)
-    set(DOWNLOAD_CMD_STR  DOWNLOAD_COMMAND ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh)
-else ()
-    set(DOWNLOAD_CMD_STR)
-endif()
-
-set(_vtk_module_options)
-foreach(_module ${_vtk_modules})
-  list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON")
-endforeach()
-ExternalProject_Add(ParaView
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${ParaView_source}
-  BINARY_DIR ${ParaView_binary}
-  INSTALL_DIR ${ParaView_install}
-  ${DOWNLOAD_CMD_STR}
-  GIT_TAG ${paraview_branch}
-  UPDATE_COMMAND ""
-  PATCH_COMMAND ""
-  CMAKE_CACHE_ARGS
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=${BUILD_TESTING}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-#    -DPARAVIEW_BUILD_AS_APPLICATION_BUNDLE:BOOL=OFF
-#    -DPARAVIEW_DISABLE_VTK_TESTING:BOOL=ON
-#    -DPARAVIEW_INSTALL_THIRD_PARTY_LIBRARIES:BOOL=OFF
- #   -DPARAVIEW_TESTING_WITH_PYTHON:BOOL=OFF
-    -DINCLUDE_PYTHONHOME_PATHS:BOOL=OFF
-    ${cdat_compiler_args}
-    ${ParaView_tpl_args}
-    # Python
-    -DPARAVIEW_ENABLE_PYTHON:BOOL=ON
-    -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE}
-    -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE}
-    -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY}
-    -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON
-    -DVTK_LEGACY_SILENT:BOOL=ON
-    ${_vtk_module_options}
-    -DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${ParaView_deps}
-  ${ep_log_options}
-)
-
-# Install ParaView and VTK python modules via their setup.py files.
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vtk_install_python_module.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake
-#  @ONLY)
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_install_python_module.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake
-#  @ONLY)
-
-#ExternalProject_Add_Step(ParaView InstallParaViewPythonModule
-#  COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#  )
-
-#ExternalProject_Add_Step(ParaView InstallVTKPythonModule
-#  COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#  )
-
-# symlinks of Externals/bin get placed in prefix/bin so we need to symlink paraview
-# libs into prefix/lib as well for pvserver to work.
-if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib)
-  message("making ${ParaView_install}/lib")
-  file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib)
-endif()
-
-#ExternalProject_Add_Step(ParaView InstallParaViewLibSymlink
-#  COMMAND ${CMAKE_COMMAND} -E create_symlink ${ParaView_install}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} ${CMAKE_INSTALL_PREFIX}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}
-#  DEPENDEES install
-#  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-#)
-unset(GIT_CMD_STR)
-
diff --git a/CMake/cdat_modules/paraview_pkg.cmake b/CMake/cdat_modules/paraview_pkg.cmake
deleted file mode 100644
index c5fe1743b..000000000
--- a/CMake/cdat_modules/paraview_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PARAVIEW_MAJOR 4)
-set(PARAVIEW_MINOR 1)
-set(PARAVIEW_PATCH 0)
-set(PARAVIEW_VERSION ${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}.${PARAVIEW_PATCH})
-set(PARAVIEW_URL ${LLNL_URL})
-set(PARAVIEW_GZ ParaView-${PARAVIEW_VERSION}c.tar.gz)
-set(PARAVIEW_MD5)
-set(PARAVIEW_BRANCH uvcdat-master)
-set(PARAVIEW_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/ParaView.git )
-
-add_cdat_package_dependent(ParaView "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/pbmplus_external.cmake b/CMake/cdat_modules/pbmplus_external.cmake
deleted file mode 100644
index 03743c74c..000000000
--- a/CMake/cdat_modules/pbmplus_external.cmake
+++ /dev/null
@@ -1,32 +0,0 @@
-
-set(pbmplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/pbmplus")
-set(pbmplus_install "${cdat_EXTERNALS}")
-
-#cp ../../exsrc/src/pbmplus/pbmplus.h . ; cp ../../exsrc/src/pbmplus/libpbm1.c pbm  ;cp ../../exsrc/src/pbmplus/Makefile .
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pbmplus_configure_step.cmake.in
-    ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake
-    @ONLY)
-
-ExternalProject_Add(pbmplus
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pbmplus_source}
-  INSTALL_DIR ${pbmplus_install}
-  URL ${PBMPLUS_URL}/${PBMPLUS_GZ}
-  URL_MD5 ${PBMPLUS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/libpbm1.c ${pbmplus_source}/pbm/
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake
-  DEPENDS ${pbmplus_deps}
-  ${ep_log_options}
-)
-
-ExternalProject_Add_Step(pbmplus CopyPbmplusHeader
-  COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/pbmplus.h ${pbmplus_source}/
-  DEPENDEES patch
-  DEPENDERS configure
-  )
-
-#pbmplus install fails if this directory doesnt already exist.
-file(MAKE_DIRECTORY ${pbmplus_install}/man/mann)
diff --git a/CMake/cdat_modules/pep8_deps.cmake b/CMake/cdat_modules/pep8_deps.cmake
deleted file mode 100644
index e57f7cf7e..000000000
--- a/CMake/cdat_modules/pep8_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pep8_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pep8_external.cmake b/CMake/cdat_modules/pep8_external.cmake
deleted file mode 100644
index c6dc541c7..000000000
--- a/CMake/cdat_modules/pep8_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# The pep8 project
-
-set(pep8_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pep8")
-
-ExternalProject_Add(pep8
-  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-  SOURCE_DIR ${pep8_binary}
-  URL ${PEP8_SOURCE}
-  URL_MD5 ${PEP8_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${pep8_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pep8_pkg.cmake b/CMake/cdat_modules/pep8_pkg.cmake
deleted file mode 100644
index 723e4b8d3..000000000
--- a/CMake/cdat_modules/pep8_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set( PEP8_MAJOR 1 )
-set( PEP8_MINOR 5 )
-set( PEP8_PATCH 7)
-set( PEP8_VERSION ${PEP8_MAJOR}.${PEP8_MINOR}.${PEP8_PATCH} )
-set( PEP8_URL ${LLNL_URL} )
-set( PEP8_GZ pep8-${PEP8_VERSION}.tar.gz )
-set( PEP8_MD5 f6adbdd69365ecca20513c709f9b7c93 )
-
-set (nm PEP8)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(PEP8_SOURCE ${PEP8_URL}/${PEP8_GZ})
-
-if (BUILD_TESTING)
-  add_cdat_package(pep8 "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/pip_deps.cmake b/CMake/cdat_modules/pip_deps.cmake
deleted file mode 100644
index 35c1383e5..000000000
--- a/CMake/cdat_modules/pip_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pip_deps  ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pip_external.cmake b/CMake/cdat_modules/pip_external.cmake
deleted file mode 100644
index 4c21cd6d3..000000000
--- a/CMake/cdat_modules/pip_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm pip)
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-
-include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake)
-string(TOUPPER ${nm} uc_nm)
-
-ExternalProject_Add(${nm}
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    URL ${${uc_nm}_SOURCE}
-    URL_MD5 ${${uc_nm}_MD5} 
-    BUILD_IN_SOURCE 1
-    CONFIGURE_COMMAND ""
-    BUILD_COMMAND ""
-    INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${CDAT_PACKAGE_CACHE_DIR}/${${uc_nm}_GZ}
-    DEPENDS ${${nm}_deps}
-    ${ep_log_options}
-    )
diff --git a/CMake/cdat_modules/pip_pkg.cmake b/CMake/cdat_modules/pip_pkg.cmake
deleted file mode 100644
index 7e442f2f2..000000000
--- a/CMake/cdat_modules/pip_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(PIP_MAJOR_SRC 7)
-set(PIP_MINOR_SRC 1)
-set(PIP_PATCH_SRC 2)
-
-set (nm PIP)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PIP_URL ${LLNL_URL})
-set(PIP_GZ pip-${PIP_VERSION}.tar.gz)
-set(PIP_SOURCE ${PIP_URL}/${PIP_GZ})
-set(PIP_MD5 3823d2343d9f3aaab21cf9c917710196)
-
-add_cdat_package(pip "" "" OFF)
diff --git a/CMake/cdat_modules/pixman_deps.cmake b/CMake/cdat_modules/pixman_deps.cmake
deleted file mode 100644
index 276a88585..000000000
--- a/CMake/cdat_modules/pixman_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pixman_deps ${pkgconfig_pkg} ${zlib_pkg} ${freetype_pkg})
diff --git a/CMake/cdat_modules/pixman_external.cmake b/CMake/cdat_modules/pixman_external.cmake
deleted file mode 100644
index bd043c7a0..000000000
--- a/CMake/cdat_modules/pixman_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-
-set(pixman_source "${CMAKE_CURRENT_BINARY_DIR}/build/pixman")
-set(pixman_install "${cdat_EXTERNALS}")
-set(pixman_configure_args "--disable-gtk")
-
-ExternalProject_Add(pixman
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pixman_source}
-  INSTALL_DIR ${pixman_install}
-  URL ${PIX_URL}/${PIX_GZ}
-  URL_MD5 ${PIX_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pixman_configure_args} -DINSTALL_DIR=${pixman_install} -DWORKING_DIR=${pixman_source} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${pixman_deps}
-  ${ep_log_options}
-)
-
-set(pixman_DIR "${pixman_binary}" CACHE PATH "pixman binary directory" FORCE)
-mark_as_advanced(pixman_DIR)
diff --git a/CMake/cdat_modules/pixman_pkg.cmake b/CMake/cdat_modules/pixman_pkg.cmake
deleted file mode 100644
index 10590199f..000000000
--- a/CMake/cdat_modules/pixman_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(PIX_MAJOR 0)
-set(PIX_MINOR 22)
-set(PIX_PATCH 2)
-set(PIX_MAJOR_SRC 0)
-set(PIX_MINOR_SRC 30)
-set(PIX_PATCH_SRC 0)
-set(PIX_URL ${LLNL_URL})
-set(PIX_GZ pixman-${PIX_MAJOR_SRC}.${PIX_MINOR_SRC}.${PIX_PATCH_SRC}.tar.gz)
-set(PIX_MD5 ae7ac97921dfa59086ca2231621a79c7 )
-
-
-set (nm PIX)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PIXMAN_VERSION ${PIX_VERSION})
-set(PIXMAN_SOURCE ${PIX_URL}/${PIX_GZ})
-set(PIXMAN_MD5 ${PIX_MD5})
-
-add_cdat_package_dependent(pixman "" "" OFF "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/pkgconfig_deps.cmake b/CMake/cdat_modules/pkgconfig_deps.cmake
deleted file mode 100644
index 106cfb074..000000000
--- a/CMake/cdat_modules/pkgconfig_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pkgconfig_deps ${wget_pkg})
diff --git a/CMake/cdat_modules/pkgconfig_external.cmake b/CMake/cdat_modules/pkgconfig_external.cmake
deleted file mode 100644
index 2b8bd158b..000000000
--- a/CMake/cdat_modules/pkgconfig_external.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-
-set(pkgconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/pkgconfig")
-set(pkgconfig_install "${cdat_EXTERNALS}")
-set(pkgconfig_config_args "--with-internal-glib")
-
-ExternalProject_Add(pkgconfig
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${PKG_URL}/${PKG_GZ}
-  URL_MD5 ${PKG_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  SOURCE_DIR ${pkgconfig_source}
-  INSTALL_DIR ${pkgconfig_install}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pkgconfig_config_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${pkgconfig_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pkgconfig_pkg.cmake b/CMake/cdat_modules/pkgconfig_pkg.cmake
deleted file mode 100644
index ca3927731..000000000
--- a/CMake/cdat_modules/pkgconfig_pkg.cmake
+++ /dev/null
@@ -1,22 +0,0 @@
-set(PKG_MAJOR 0)
-set(PKG_MINOR 9)
-set(PKG_PATCH 0)
-set(PKG_MAJOR_SRC 0)
-set(PKG_MINOR_SRC 28)
-set(PKG_PATCH_SRC 0)
-set(PKG_VERSION ${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.${PKG_PATCH_SRC})
-set(PKG_URL ${LLNL_URL})
-set(PKG_GZ pkg-config-${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.tar.gz)
-set(PKG_MD5 aa3c86e67551adc3ac865160e34a2a0d)
-set(PKGCONFIG_VERSION ${PKG_VERSION})
-set(PKGCONFIG_SOURCE ${PKG_URL}/${PKG_GZ})
-
-add_cdat_package(pkgconfig "" "" OFF)
-
-if(NOT CDAT_USE_SYSTEM_PKGCONFIG)
-  set(cdat_PKG_CONFIG_EXECUTABLE ${cdat_EXTERNALS}/bin/pkg-config)
-  set(ENV{PKG_CONFIG} "${cdat_PKG_CONFIG_EXECUTABLE}")
-  set(ENV{PKG_CONFIG_PATH} "${cdat_EXTERNALS}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-  set(ENV{PKG_CONFIG} ${cdat_PKG_CONFIG_EXECUTABLE})
-endif()
-
diff --git a/CMake/cdat_modules/pmw_deps.cmake b/CMake/cdat_modules/pmw_deps.cmake
deleted file mode 100644
index 8e1435b25..000000000
--- a/CMake/cdat_modules/pmw_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Pmw_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/pmw_external.cmake b/CMake/cdat_modules/pmw_external.cmake
deleted file mode 100644
index 202ed810e..000000000
--- a/CMake/cdat_modules/pmw_external.cmake
+++ /dev/null
@@ -1,30 +0,0 @@
-
-set(Pmw_source "${CMAKE_CURRENT_BINARY_DIR}/build/Pmw")
-set(Pmw_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake
-  @ONLY)
-  
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake
-  @ONLY)
-
-set(Pmw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake)
-set(Pmw_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake)
-
-ExternalProject_Add(Pmw
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${Pmw_source}
-  INSTALL_DIR ${Pmw_install}
-  URL ${PMW_URL}/${PMW_GZ}
-  URL_MD5 ${PMW_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${Pmw_build_command}
-  INSTALL_COMMAND ${Pmw_install_command}
-  DEPENDS ${Pmw_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pmw_pkg.cmake b/CMake/cdat_modules/pmw_pkg.cmake
deleted file mode 100644
index f0a0031b5..000000000
--- a/CMake/cdat_modules/pmw_pkg.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-set(PMW_MAJOR 1)
-set(PMW_MINOR 3)
-set(PMW_MAJOR_SRC 1)
-set(PMW_MINOR_SRC 3)
-set(PMW_PATCH_SRC 2)
-set(PMW_URL ${LLNL_URL})
-set(PMW_GZ Pmw.${PMW_MAJOR_SRC}.${PMW_MINOR_SRC}.${PMW_PATCH_SRC}.tar.gz)
-set(PMW_MD5 7f30886fe9885ab3cf85dac6ce1fbda5)
-set(PMW_SOURCE ${PMW_URL}/${PMW_GZ})
-
-
-set (nm PMW)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_LEAN)
-    add_cdat_package_dependent(Pmw "" "" OFF "CDAT_BUILD_GUI" OFF)
-else()
-    add_cdat_package(Pmw "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/pnetcdf_deps.cmake b/CMake/cdat_modules/pnetcdf_deps.cmake
deleted file mode 100644
index 9b1966cce..000000000
--- a/CMake/cdat_modules/pnetcdf_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PNETCDF_deps ${pkgconfig_pkg} ${mpi_pkg})
diff --git a/CMake/cdat_modules/pnetcdf_external.cmake b/CMake/cdat_modules/pnetcdf_external.cmake
deleted file mode 100644
index 431348b85..000000000
--- a/CMake/cdat_modules/pnetcdf_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(pnetcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/pnetcdf")
-set(pnetcdf_install "${cdat_EXTERNALS}")
-set(pnetcdf_configure_args "--with-mpi=${cdat_EXTERNALS}")
-set(pnetcdf_additional_cflags "-fPIC")
-
-ExternalProject_Add(PNETCDF
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pnetcdf_source}
-  INSTALL_DIR ${pnetcdf_install}
-  URL ${PNETCDF_URL}/${PNETCDF_GZ}
-  URL_MD5 ${PNETCDF_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${pnetcdf_additional_cflags} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -D CONFIGURE_ARGS=${pnetcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${PNETCDF_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/pnetcdf_pkg.cmake b/CMake/cdat_modules/pnetcdf_pkg.cmake
deleted file mode 100644
index 02cf48e19..000000000
--- a/CMake/cdat_modules/pnetcdf_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(PNETCDF_MAJOR_SRC 1)
-set(PNETCDF_MINOR_SRC 6)
-set(PNETCDF_PATCH_SRC 0)
-set(PNETCDF_URL ${LLNL_URL})
-set(PNETCDF_GZ parallel-netcdf-${PNETCDF_MAJOR_SRC}.${PNETCDF_MINOR_SRC}.${PNETCDF_PATCH_SRC}.tar.gz)
-set(PNETCDF_MD5 4893a50ddcd487a312c64383bdeb2631)
-
-set (nm PNETCDF)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(PNETCDF_SOURCE ${PNETCDF_URL}/${PNETCDF_GZ})
-
-add_cdat_package(PNETCDF "" "" OFF)
diff --git a/CMake/cdat_modules/png_deps.cmake b/CMake/cdat_modules/png_deps.cmake
deleted file mode 100644
index 43bad1a48..000000000
--- a/CMake/cdat_modules/png_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(png_deps ${pkgconfig_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/png_external.cmake b/CMake/cdat_modules/png_external.cmake
deleted file mode 100644
index 3ba0b81a6..000000000
--- a/CMake/cdat_modules/png_external.cmake
+++ /dev/null
@@ -1,45 +0,0 @@
-# If Windows we use CMake otherwise ./configure
-if(WIN32)
-
-  set(png_source "${CMAKE_CURRENT_BINARY_DIR}/png")
-  set(png_binary "${CMAKE_CURRENT_BINARY_DIR}/png-build")
-  set(png_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(png
-  URL ${PNG_URL}/${PNG_GZ}
-  URL_MD5 ${PNG_MD5}
-  UPDATE_COMMAND ""
-  SOURCE_DIR ${png_source}
-  BINARY_DIR ${png_binary}
-  CMAKE_CACHE_ARGS
-    -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    ${pv_tpl_compiler_args}
-    -DZLIB_INCLUDE_DIR:STRING=${ZLIB_INCLUDE_DIR}
-    -DZLIB_LIBRARY:STRING=${ZLIB_LIBRARY}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${png_dependencies}
-  ${ep_log_options}
-  )
-
-else()
-
-  set(png_source "${CMAKE_CURRENT_BINARY_DIR}/build/png")
-  set(png_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(png
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${png_source}
-    INSTALL_DIR ${png_install}
-    URL ${PNG_URL}/${PNG_GZ}
-    URL_MD5 ${PNG_MD5}
-    BUILD_IN_SOURCE 1
-    PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/png/pngconf.h ${png_source}/pngconf.h
-    CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-    DEPENDS ${png_deps}
-    ${ep_log_options}
-  )
-
-endif()
diff --git a/CMake/cdat_modules/png_pkg.cmake b/CMake/cdat_modules/png_pkg.cmake
deleted file mode 100644
index 5a9f1e1f4..000000000
--- a/CMake/cdat_modules/png_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(PNG_MAJOR 1)
-set(PNG_MINOR 4)
-set(PNG_PATCH 1)
-set(PNG_MAJOR_SRC 1)
-set(PNG_MINOR_SRC 5)
-set(PNG_PATCH_SRC 1)
-set(PNG_VERSION ${PNG_MAJOR_SRC}.${PNG_MINOR_SRC}.${PNG_PATCH_SRC})
-set(PNG_URL ${LLNL_URL})
-set(PNG_GZ libpng-${PNG_VERSION}.tar.gz)
-set(PNG_MD5 220035f111ea045a51e290906025e8b5)
-set(PNG_SOURCE ${PNG_URL}/${PNG_GZ})
-
-# Turns out grib2 (therefore cdms2 needs it so dont turn this off
-add_cdat_package(png "" "" ON)
diff --git a/CMake/cdat_modules/proj4_deps.cmake b/CMake/cdat_modules/proj4_deps.cmake
deleted file mode 100644
index ec110453c..000000000
--- a/CMake/cdat_modules/proj4_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(proj4_deps ${pkgconfig_pkg})
-if (CDAT_BUILD_PARALLEL)
-      list(APPEND proj4_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/proj4_external.cmake b/CMake/cdat_modules/proj4_external.cmake
deleted file mode 100644
index 9bd122f5b..000000000
--- a/CMake/cdat_modules/proj4_external.cmake
+++ /dev/null
@@ -1,20 +0,0 @@
-set(proj4_source "${CMAKE_CURRENT_BINARY_DIR}/build/proj4")
-set(proj4_install "${cdat_EXTERNALS}/proj4")
-set(proj4_configure_args "")
-
-ExternalProject_Add(proj4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${proj4_source}
-  INSTALL_DIR ${proj4_install}
-  BUILD_IN_SOURCE 1
-  URL ${PROJ4_SOURCE}
-  URL_MD5 ${PROJ4_MD5}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${proj4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file}
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${proj4_deps}
-  ${ep_log_options}
-)
-if (DEFINED GIT_CMD_STR_PROJ4)
-  unset(GIT_CMD_STR_PROJ4)
-endif()
diff --git a/CMake/cdat_modules/proj4_pkg.cmake b/CMake/cdat_modules/proj4_pkg.cmake
deleted file mode 100644
index 8bf542f52..000000000
--- a/CMake/cdat_modules/proj4_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(PROJ4_URL ${LLNL_URL})
-set(PROJ4_MAJOR_SRC 4)
-set(PROJ4_MINOR_SRC 9)
-set(PROJ4_PATCH_SRC 2)
-set(PROJ4_GZ proj.4-${PROJ4_MAJOR_SRC}.${PROJ4_MINOR_SRC}.${PROJ4_PATCH_SRC}.tar.gz)
-set(PROJ4_SOURCE ${PROJ4_URL}/${PROJ4_GZ})
-set(PROJ4_MD5 a6059d05592948d5f205ba432e359bd7)
-if (CDAT_BUILD_ALL)
-  add_cdat_package(proj4 "" "" ON)
-else()
-  add_cdat_package_dependent(proj4 "" "" ON "CDAT_BUILD_PROJ4" OFF)
-endif()
diff --git a/CMake/cdat_modules/pyasn1_deps.cmake b/CMake/cdat_modules/pyasn1_deps.cmake
deleted file mode 100644
index bf438928f..000000000
--- a/CMake/cdat_modules/pyasn1_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYASN1_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pyasn1_external.cmake b/CMake/cdat_modules/pyasn1_external.cmake
deleted file mode 100644
index dd35ee111..000000000
--- a/CMake/cdat_modules/pyasn1_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYASN1)
-
-# Set LDFlags and CFlags to make it easier to find OpenSSL
-list(APPEND USR_ENVS
-  "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}"
-  "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pyasn1_pkg.cmake b/CMake/cdat_modules/pyasn1_pkg.cmake
deleted file mode 100644
index ff69f7c51..000000000
--- a/CMake/cdat_modules/pyasn1_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYASN1_MAJOR_SRC 0)
-set(PYASN1_MINOR_SRC 1)
-set(PYASN1_PATCH_SRC 9)
-
-set(PYASN1_VERSION ${PYASN1_MAJOR_SRC}.${PYASN1_MINOR_SRC}.${PYASN1_PATCH_SRC})
-set(PYASN1_GZ pyasn1-${PYASN1_VERSION}.tar.gz)
-set(PYASN1_SOURCE ${LLNL_URL}/${PYASN1_GZ})
-set(PYASN1_MD5 f00a02a631d4016818659d1cc38d229a)
-
-add_cdat_package_dependent(PYASN1 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyclimate_deps.cmake b/CMake/cdat_modules/pyclimate_deps.cmake
deleted file mode 100644
index ee5768752..000000000
--- a/CMake/cdat_modules/pyclimate_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set(pyclimate_deps ${numpy_pkg} ${pip_pkg})
-
diff --git a/CMake/cdat_modules/pyclimate_external.cmake b/CMake/cdat_modules/pyclimate_external.cmake
deleted file mode 100644
index 4fe52288f..000000000
--- a/CMake/cdat_modules/pyclimate_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-# create an external project to install PyClimate
-# and configure and build it
-set(nm pyclimate)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/pyclimate_pkg.cmake b/CMake/cdat_modules/pyclimate_pkg.cmake
deleted file mode 100644
index e151f3cff..000000000
--- a/CMake/cdat_modules/pyclimate_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PYCLIMATE_VERSION 1.2.3)
-set(PYCLIMATE_URL ${LLNL_URL})
-set(PYCLIMATE_GZ PyClimate-${PYCLIMATE_VERSION}.tar.gz)
-set(PYCLIMATE_SOURCE ${PYCLIMATE_URL}/${PYCLIMATE_GZ})
-set(PYCLIMATE_MD5 094ffd0adedc3ede24736e0c0ff1699f)
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(pyclimate "" "" ON)
-else()
-  add_cdat_package(pyclimate "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/pycparser_deps.cmake b/CMake/cdat_modules/pycparser_deps.cmake
deleted file mode 100644
index 3efd2d4ee..000000000
--- a/CMake/cdat_modules/pycparser_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYCPARSER_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/pycparser_external.cmake b/CMake/cdat_modules/pycparser_external.cmake
deleted file mode 100644
index f9b317f4f..000000000
--- a/CMake/cdat_modules/pycparser_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYCPARSER)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pycparser_pkg.cmake b/CMake/cdat_modules/pycparser_pkg.cmake
deleted file mode 100644
index 873a293dd..000000000
--- a/CMake/cdat_modules/pycparser_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYCPARSER_MAJOR_SRC 2)
-set(PYCPARSER_MINOR_SRC 13)
-set(PYCPARSER_PATCH_SRC )
-
-set(PYCPARSER_VERSION ${PYCPARSER_MAJOR_SRC}.${PYCPARSER_MINOR_SRC})
-set(PYCPARSER_GZ pycparser-${PYCPARSER_VERSION}.tar.gz)
-set(PYCPARSER_SOURCE ${LLNL_URL}/${PYCPARSER_GZ})
-set(PYCPARSER_MD5 e4fe1a2d341b22e25da0d22f034ef32f )
-
-add_cdat_package_dependent(PYCPARSER "" "" ON "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyflakes_deps.cmake b/CMake/cdat_modules/pyflakes_deps.cmake
deleted file mode 100644
index 14a40726d..000000000
--- a/CMake/cdat_modules/pyflakes_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyflakes_deps ${python_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/pyflakes_external.cmake b/CMake/cdat_modules/pyflakes_external.cmake
deleted file mode 100644
index 40a4774f7..000000000
--- a/CMake/cdat_modules/pyflakes_external.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-ExternalProject_Add(pyflakes
-  DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}"
-  SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/pyflakes"
-  URL "${PYFLAKES_SOURCE}"
-  URL_MD5 ${PYFLAKES_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build
-  INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${pyflakes_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyflakes_pkg.cmake b/CMake/cdat_modules/pyflakes_pkg.cmake
deleted file mode 100644
index a83f881c7..000000000
--- a/CMake/cdat_modules/pyflakes_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set(nm pyflakes)
-string(TOUPPER ${nm} uc_nm)
-
-set(${uc_nm}_MAJOR 0)
-set(${uc_nm}_MINOR 8)
-set(${uc_nm}_PATCH 1)
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_URL ${LLNL_URL})
-set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz)
-set(${uc_nm}_MD5 905fe91ad14b912807e8fdc2ac2e2c23 )
-
-set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH})
-set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ})
-
-if(BUILD_TESTING)
-  add_cdat_package(${nm} "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/pygments_deps.cmake b/CMake/cdat_modules/pygments_deps.cmake
deleted file mode 100644
index 8da947cd9..000000000
--- a/CMake/cdat_modules/pygments_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pygments_deps ${pip_pkg} ${lepl_pkg})
diff --git a/CMake/cdat_modules/pygments_external.cmake b/CMake/cdat_modules/pygments_external.cmake
deleted file mode 100644
index 225a8b160..000000000
--- a/CMake/cdat_modules/pygments_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm pygments)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pygments_pkg.cmake b/CMake/cdat_modules/pygments_pkg.cmake
deleted file mode 100644
index 7bd13b2b5..000000000
--- a/CMake/cdat_modules/pygments_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(pygments_MAJOR_SRC 1)
-set(pygments_MINOR_SRC 6)
-set(pygments_PATCH_SRC )
-set(PYGMENTS_VERSION ${pygments_MAJOR_SRC}.${pygments_MINOR_SRC})
-set(PYGMENTS_GZ Pygments-${PYGMENTS_VERSION}.tar.gz)
-set(PYGMENTS_SOURCE ${LLNL_URL}/${PYGMENTS_GZ})
-set(PYGMENTS_MD5 a18feedf6ffd0b0cc8c8b0fbdb2027b1  )
-
-add_cdat_package_dependent(pygments "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/pylibxml2_deps.cmake b/CMake/cdat_modules/pylibxml2_deps.cmake
deleted file mode 100644
index 8ff91e246..000000000
--- a/CMake/cdat_modules/pylibxml2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYLIBXML2_deps ${pkgconfig_pkg} ${python_pkg} ${libxml2_pkg} ${libxslt_pkg})
diff --git a/CMake/cdat_modules/pylibxml2_external.cmake b/CMake/cdat_modules/pylibxml2_external.cmake
deleted file mode 100644
index f3d77fd17..000000000
--- a/CMake/cdat_modules/pylibxml2_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-
-set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2")
-set(libXML2_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(PYLIBXML2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${libXML2_source}
-  INSTALL_DIR ${libXML2_install}
-  URL ${XML_URL}/${XML_GZ}
-  URL_MD5 ${XML_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${PYLIBXML2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pylibxml2_pkg.cmake b/CMake/cdat_modules/pylibxml2_pkg.cmake
deleted file mode 100644
index e374227f7..000000000
--- a/CMake/cdat_modules/pylibxml2_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(PYLIBXML2_MAJOR 2)
-set(PYLIBXML2_MINOR 7)
-set(PYLIBXML2_PATCH 8)
-set(PYLIBXML2_MAJOR_SRC 2)
-set(PYLIBXML2_MINOR_SRC 7)
-set(PYLIBXML2_PATCH_SRC 8)
-set(PYLIBXML2_URL ${LLNL_URL})
-set(PYLIBXML2_GZ libxml2-${PYLIBXML2_MAJOR_SRC}.${PYLIBXML2_MINOR_SRC}.${PYLIBXML2_PATCH_SRC}.tar.gz)
-set(PYLIBXML2_MD5 8127a65e8c3b08856093099b52599c86)
-set(PYLIBXML2_SOURCE ${PYLIBXML2_URL}/${PYLIBXML2_GZ})
-
-set (nm PYLIBXML2)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(PYLIBXML2 "" "" OFF)
diff --git a/CMake/cdat_modules/pynetcdf4_deps.cmake b/CMake/cdat_modules/pynetcdf4_deps.cmake
deleted file mode 100644
index 176f1cd99..000000000
--- a/CMake/cdat_modules/pynetcdf4_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pynetcdf4_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${netcdf_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/pynetcdf4_external.cmake b/CMake/cdat_modules/pynetcdf4_external.cmake
deleted file mode 100644
index f430b9e16..000000000
--- a/CMake/cdat_modules/pynetcdf4_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-# create an external project to install pynetcdf
-# and configure and build it
-
-# pynetcdf4
-#
-set(pynetcdf4_source "${CMAKE_CURRENT_BINARY_DIR}/build/pynetcdf4")
-
-ExternalProject_Add(pynetcdf4
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pynetcdf4_source}
-  URL ${PYNETCDF4_URL}/${PYNETCDF4_GZ}
-  URL_MD5 ${PYNETCDF4_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${pynetcdf4_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/pynetcdf4_pkg.cmake b/CMake/cdat_modules/pynetcdf4_pkg.cmake
deleted file mode 100644
index ebefb167b..000000000
--- a/CMake/cdat_modules/pynetcdf4_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set( PYNETCDF4_MAJOR_SRC 1  )
-set( PYNETCDF4_MINOR_SRC 1 )
-set( PYNETCDF4_PATCH_SRC 9  )
-set(PYNETCDF4_URL ${LLNL_URL})
-set(PYNETCDF4_GZ
-  netCDF4-${PYNETCDF4_MAJOR_SRC}.${PYNETCDF4_MINOR_SRC}.${PYNETCDF4_PATCH_SRC}.tar.gz)
-set(PYNETCDF4_MD5 4ee7399e547f8b906e89da5529fa5ef4)
-set(PYNETCDF4_SOURCE ${PYNETCDF4_URL}/${PYNETCDF4_GZ})
-
-set (nm pynetcdf4)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(pynetcdf4 "" "" ON)
diff --git a/CMake/cdat_modules/pyopengl_external.cmake b/CMake/cdat_modules/pyopengl_external.cmake
deleted file mode 100644
index daf68bf40..000000000
--- a/CMake/cdat_modules/pyopengl_external.cmake
+++ /dev/null
@@ -1,29 +0,0 @@
-
-set(PyOpenGL_source "${CMAKE_CURRENT_BINARY_DIR}/PyOpenGL")
-set(PyOpenGL_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake
-  @ONLY)
-
-set(PyOpenGL_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake)
-set(PyOpenGL_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake)
-
-ExternalProject_Add(PyOpenGL
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyOpenGL_source}
-  URL ${PYOPENGL_URL}/${PYOPENGL_GZ}
-  URL_MD5 ${PYOPENGL_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PyOpenGL_build_command}
-  INSTALL_COMMAND ${PyOpenGL_install_command}
-  DEPENDS ${PyOpenGL_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/pyopenssl_deps.cmake b/CMake/cdat_modules/pyopenssl_deps.cmake
deleted file mode 100644
index 6ab54642e..000000000
--- a/CMake/cdat_modules/pyopenssl_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYOPENSSL_deps ${python_pkg} ${pip_pkg} ${six_pkg} ${cryptography_pkg} ${cffi_pkg} ${ffi_pkg} ${pycparser_pkg} )
diff --git a/CMake/cdat_modules/pyopenssl_external.cmake b/CMake/cdat_modules/pyopenssl_external.cmake
deleted file mode 100644
index c0ed4c238..000000000
--- a/CMake/cdat_modules/pyopenssl_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYOPENSSL)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/pyopenssl_pkg.cmake b/CMake/cdat_modules/pyopenssl_pkg.cmake
deleted file mode 100644
index d9e4d4bc4..000000000
--- a/CMake/cdat_modules/pyopenssl_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYOPENSSL_MAJOR_SRC 0)
-set(PYOPENSSL_MINOR_SRC 14)
-set(PYOPENSSL_PATCH_SRC 0)
-
-set(PYOPENSSL_VERSION ${PYOPENSSL_MAJOR_SRC}.${PYOPENSSL_MINOR_SRC})
-set(PYOPENSSL_GZ pyOpenSSL-${PYOPENSSL_VERSION}.tar.gz)
-set(PYOPENSSL_SOURCE ${LLNL_URL}/${PYOPENSSL_GZ})
-set(PYOPENSSL_MD5 8579ff3a1d858858acfba5f046a4ddf7)
-
-add_cdat_package_dependent(PYOPENSSL "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyparsing_deps.cmake b/CMake/cdat_modules/pyparsing_deps.cmake
deleted file mode 100644
index 79eea7913..000000000
--- a/CMake/cdat_modules/pyparsing_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PYPARSING_deps ${python_pkg} ${pip_pkg} ${dateutils_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/pyparsing_external.cmake b/CMake/cdat_modules/pyparsing_external.cmake
deleted file mode 100644
index b72862852..000000000
--- a/CMake/cdat_modules/pyparsing_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm PYPARSING)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/pyparsing_pkg.cmake b/CMake/cdat_modules/pyparsing_pkg.cmake
deleted file mode 100644
index 8c6e265e0..000000000
--- a/CMake/cdat_modules/pyparsing_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYPARSING_MAJOR_SRC 2)
-set(PYPARSING_MINOR_SRC 0)
-set(PYPARSING_PATCH_SRC 2)
-
-set(PYPARSING_VERSION ${PYPARSING_MAJOR_SRC}.${PYPARSING_MINOR_SRC}.${PYPARSING_PATCH_SRC})
-set(PYPARSING_GZ pyparsing-${PYPARSING_VERSION}.tar.gz)
-set(PYPARSING_SOURCE ${LLNL_URL}/${PYPARSING_GZ})
-set(PYPARSING_MD5 b170c5d153d190df1a536988d88e95c1)
-
-add_cdat_package_dependent(PYPARSING "" "" OFF "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pyqt_deps.cmake b/CMake/cdat_modules/pyqt_deps.cmake
deleted file mode 100644
index 023e6753a..000000000
--- a/CMake/cdat_modules/pyqt_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PyQt_deps ${pkgconfig_pkg} ${qt_pkg} ${sip_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/pyqt_external.cmake b/CMake/cdat_modules/pyqt_external.cmake
deleted file mode 100644
index 5a00060e9..000000000
--- a/CMake/cdat_modules/pyqt_external.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-set(PyQt_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyQt")
-set(PyQt_configure_command
-  env PYTHONPATH=${PYTHONPATH}
-  "${PYTHON_EXECUTABLE}" configure.py
-    -q "${QT_QMAKE_EXECUTABLE}"
-    --confirm-license
-    -b "${CMAKE_INSTALL_PREFIX}/bin"
-    -d "${PYTHON_SITE_PACKAGES}"
-    -v "${CMAKE_INSTALL_PREFIX}/include"
-    -v "${CMAKE_INSTALL_PREFIX}/share"
-    -p "${CMAKE_INSTALL_PREFIX}/share/plugins"
-    -n "${CMAKE_INSTALL_PREFIX}/share/qsci"
-    --assume-shared
-    -e QtGui -e QtHelp -e QtMultimedia -e QtNetwork -e QtDeclarative -e QtOpenGL
-    -e QtScript -e QtScriptTools -e QtSql -e QtSvg -e QtTest -e QtWebKit
-    -e QtXml -e QtXmlPatterns -e QtCore
-)
-
-ExternalProject_Add(PyQt
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyQt_source}
-  URL ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}}
-  URL_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${PyQt_configure_command}
-  DEPENDS ${PyQt_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyqt_pkg.cmake b/CMake/cdat_modules/pyqt_pkg.cmake
deleted file mode 100644
index a049bc728..000000000
--- a/CMake/cdat_modules/pyqt_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(PYQT_MAJOR 4)
-set(PYQT_MINOR 8)
-set(PYQT_PATCH 3)
-set(PYQT_MAJOR_SRC 4)
-set(PYQT_MINOR_SRC 11)
-set(PYQT_PATCH_SRC 3)
-set(PYQT_VERSION ${PYQT_MAJOR_SRC}.${PYQT_MINOR_SRC}.${PYQT_PATCH_SRC})
-set(PYQT_URL ${LLNL_URL})
-set(PYQT_GZ_APPLE PyQt-mac-gpl-${PYQT_VERSION}.tar.gz)
-set(PYQT_GZ_UNIX PyQt-x11-gpl-${PYQT_VERSION}.tar.gz)
-set(PYQT_MD5_APPLE 9bd050f1d0c91510ea8be9f41878144c )
-set(PYQT_MD5_UNIX 997c3e443165a89a559e0d96b061bf70 )
-set(PYQT_SOURCE ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}})
-set(PYQT_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}})
-
-add_cdat_package_dependent(PyQt "" "" ON "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/pyspharm_deps.cmake b/CMake/cdat_modules/pyspharm_deps.cmake
deleted file mode 100644
index 181e0c271..000000000
--- a/CMake/cdat_modules/pyspharm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyspharm_deps ${numpy_pkg})
diff --git a/CMake/cdat_modules/pyspharm_external.cmake b/CMake/cdat_modules/pyspharm_external.cmake
deleted file mode 100644
index 2c1de4f91..000000000
--- a/CMake/cdat_modules/pyspharm_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-
-# Pyspharm
-#
-set(pyspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyspharm")
-
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyspharm_patch_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake
-  @ONLY)
-  
-set(pyspharm_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake)
-
-ExternalProject_Add(pyspharm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pyspharm_source}
-  URL ${PYSPHARM_URL}/${PYSPHARM_GZ}
-  URL_MD5 ${PYSPHARM_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${pyspharm_PATCH_COMMAND}
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build 
-  INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --prefix=${PYTHON_SITE_PACKAGES_PREFIX}
-  DEPENDS ${pyspharm_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pyspharm_pkg.cmake b/CMake/cdat_modules/pyspharm_pkg.cmake
deleted file mode 100644
index c7e8eb166..000000000
--- a/CMake/cdat_modules/pyspharm_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(PYSPHARM_MAJOR 1)
-set(PYSPHARM_MINOR 0)
-set(PYSPHARM_PATCH 8)
-set(PYSPHARM_VERSION ${PYSPHARM_MAJOR}.${PYSPHARM_MINOR}.${PYSPHARM_PATCH})
-set(PYSPHARM_URL ${LLNL_URL})
-set(PYSPHARM_GZ pyspharm-${PYSPHARM_VERSION}.tar.gz)
-set(PYSPHARM_MD5 7b3a33dd3cbeaa4b8bf67ed5bd210931)
-set(PYSPHARM_SOURCE ${PYSPHARM_URL}/${PYSPHARM_GZ})
-
-add_cdat_package_dependent(pyspharm "" "" ${CDAT_BUILD_ALL}
-                           "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/pytables_deps.cmake b/CMake/cdat_modules/pytables_deps.cmake
deleted file mode 100644
index d44617773..000000000
--- a/CMake/cdat_modules/pytables_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(PyTables_deps ${pkgconfig_pkg} ${python_pkg} ${cython_pkg} ${numexpr_pkg} ${numpy_pkg} ${hdf5_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/pytables_external.cmake b/CMake/cdat_modules/pytables_external.cmake
deleted file mode 100644
index 42e7c6074..000000000
--- a/CMake/cdat_modules/pytables_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake)
-
-# PyTables
-#
-set(PyTables_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyTables")
-ExternalProject_Add(PyTables
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${PyTables_source}
-  URL ${PYTABLES_URL}/${PYTABLES_GZ}
-  URL_MD5 ${PYTABLES_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build --hdf5=${cdat_EXTERNALS}
-  INSTALL_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --hdf5=${cdat_EXTERNALS} ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${PyTables_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/pytables_pkg.cmake b/CMake/cdat_modules/pytables_pkg.cmake
deleted file mode 100644
index 22faad22a..000000000
--- a/CMake/cdat_modules/pytables_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(PYTABLES_MAJOR 3)
-set(PYTABLES_MINOR 1)
-set(PYTABLES_PATCH 1)
-set(PYTABLES_VERSION ${PYTABLES_MAJOR}.${PYTABLES_MINOR}.${PYTABLES_PATCH})
-set(PYTABLES_URL ${LLNL_URL} )
-set(PYTABLES_GZ tables-${PYTABLES_VERSION}.tar.gz)
-set(PYTABLES_MD5 38d917f0c6dfb0bc28ce9ea0c3492524)
-set(PYTABLES_SOURCE ${PYTABLES_URL}/${PYTABLES_GZ})
-
-add_cdat_package_dependent(PyTables "" "" OFF "NOT CDAT_BUILD_LEAN" ${CDAT_BUILD_ALL})
diff --git a/CMake/cdat_modules/python_deps.cmake b/CMake/cdat_modules/python_deps.cmake
deleted file mode 100644
index 04864b10c..000000000
--- a/CMake/cdat_modules/python_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Python_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg})
diff --git a/CMake/cdat_modules/python_external.cmake b/CMake/cdat_modules/python_external.cmake
deleted file mode 100644
index 0710a0663..000000000
--- a/CMake/cdat_modules/python_external.cmake
+++ /dev/null
@@ -1,66 +0,0 @@
-#-----------------------------------------------------------------------------
-set(proj Python)
-
-set(python_SOURCE_DIR ${cdat_BINARY_DIR}/build/Python)
-set(python_BUILD_IN_SOURCE 1)
-
-set(python_aqua_cdat no)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake
-  @ONLY)
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake
-  @ONLY)
-
-set(python_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_patch_step.cmake)
-
-if(APPLE)
-  set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake)
-  set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake)
-  set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake)
-else()
-  set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake)
-  set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake)
-  set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake)
-endif()
-
-ExternalProject_Add(${proj}
-  URL ${PYTHON_URL}/${PYTHON_GZ}
-  URL_MD5 ${PYTHON_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${python_SOURCE_DIR}
-  BUILD_IN_SOURCE ${python_BUILD_IN_SOURCE}
-  UPDATE_COMMAND pwd
-  CONFIGURE_COMMAND ${python_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${python_BUILD_COMMAND}
-  INSTALL_COMMAND ${python_INSTALL_COMMAND}
-  DEPENDS ${Python_deps}
-  ${ep_log_options}
-)
-
-#-----------------------------------------------------------------------------
-# Set PYTHON_INCLUDE and PYTHON_LIBRARY variables
-#
-set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages)
-
-if(APPLE)
-  ExternalProject_Add_Step(${proj} change_plist_name
-    COMMAND ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/python ${cdat_CMAKE_SOURCE_DIR}/fixName.py
-    DEPENDEES install
-  )
-  set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Headers)
-  set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Python)
-  set(PYTHON_LIBRARY_DIR ${CMAKE_INSTALL_PREFIX}/lib)
-  set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python)
-  #set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}/bin/python)
-  set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/lib/python${PYVER}/site-packages)
-else()
-  set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/include/python${PYVER})
-  set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libpython${PYVER}.so)
-endif()
diff --git a/CMake/cdat_modules/python_pkg.cmake b/CMake/cdat_modules/python_pkg.cmake
deleted file mode 100644
index 36c97d702..000000000
--- a/CMake/cdat_modules/python_pkg.cmake
+++ /dev/null
@@ -1,59 +0,0 @@
-set(PYTHON_MAJOR_SRC 2)
-set(PYTHON_MINOR_SRC 7)
-set(PYTHON_PATCH_SRC 11)
-set(PYTHON_VERSION ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC})
-set(PYTHON_URL ${LLNL_URL})
-set(PYTHON_GZ Python-${PYTHON_VERSION}.tgz)
-set(PYTHON_MD5 6b6076ec9e93f05dd63e47eb9c15728b )
-set(PYVER ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC})
-set(PYTHON_SOURCE ${PYTHON_URL}/${PYTHON_GZ})
-
-add_cdat_package(Python ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC} "" "")
-
-# FIXME: Name style
-set(CDAT_OS_XTRA_PATH "")
-
-set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX})
-if (APPLE)
-  set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER})
-endif()
-set(PYTHON_SITE_PACKAGES ${PYTHON_SITE_PACKAGES_PREFIX}/lib/python${PYVER}/site-packages)
-set(PYTHONPATH ${PYTHON_SITE_PACKAGES})
-
-if (CDAT_USE_SYSTEM_PYTHON)
-   find_package(PythonInterp)
-   set(PYVER ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR})
-   # \NOTE This is required or else FindPythonLibs may find whatever version is
-   # listed first internally and if that version exists on the system. For example
-   # a system might have python version 2.6 and 2.7 both installed.
-   set(Python_ADDITIONAL_VERSIONS ${PYVER})
-   find_package(PythonLibs)
-   set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages)
-   message("[INFO] Using system python ${PYTHON_EXECUTABLE}")
-   message("[INFO] Putting packages in directory ${PYTHON_SITE_PACKAGES}")
-   set(PYTHON_EXTRA_PREFIX "--prefix=${CMAKE_INSTALL_PREFIX}")
-   message("[INFO] Setting up prefix for installing python packages into: ${PYTHON_EXTRA_PREFIX}")
-   set(ENV{LD_LIBRARY_PATH} $ENV{LD_LIBRARY_PATH})
-   set(PYTHONPATH "${PYTHON_SITE_PACKAGES}:$ENV{PYTHONPATH}")
-   set(ENV{PYTHONPATH} "${PYTHONPATH}")
-   message("[INFO] Set PYTHONPATH to $ENV{PYTHONPATH}")
-   get_filename_component(PYTHON_EXECUTABLE_PATH ${PYTHON_EXECUTABLE} PATH)
-   set(PYTHON_LIBRARY ${PYTHON_LIBRARIES})
-   message("[INFO] set PYTHON_LIBRARY TO" ${PYTHON_LIBRARY})
-   set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIRS})
-   if(APPLE)
-     set(CDAT_OS_XTRA_PATH ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin)
-   endif()
-else ()
-   set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python)
-   message("[INFO] Building python at ${PYTHON_EXECUTABLE}")
-   set(PYTHON_EXTRA_PREFIX "")
-   set(PYVER 2.7)
-   if (NOT APPLE)
-     set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/bin/easy_install)
-     set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/bin/pip)
-   else ()
-     set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/easy_install)
-     set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/pip)
-   endif()
-endif()
diff --git a/CMake/cdat_modules/pyzmq_deps.cmake b/CMake/cdat_modules/pyzmq_deps.cmake
deleted file mode 100644
index 507fc1180..000000000
--- a/CMake/cdat_modules/pyzmq_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(pyzmq_deps ${zmq_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/pyzmq_external.cmake b/CMake/cdat_modules/pyzmq_external.cmake
deleted file mode 100644
index e931ce77e..000000000
--- a/CMake/cdat_modules/pyzmq_external.cmake
+++ /dev/null
@@ -1,50 +0,0 @@
-# The pyzmq project 
-
-set(pyzmq_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq")
-
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake @ONLY)
-
-set(pyzmq_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake)
-set(pyzmq_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake)
-
-set(pyzmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq")
-
-# create an external project to download numpy,
-# and configure and build it
-ExternalProject_Add(pyzmq
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${pyzmq_source}
-  BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq
-  URL ${PYZMQ_SOURCE}
-  URL_MD5 ${PYZMQ_MD5}
-  CONFIGURE_COMMAND ${pyzmq_CONFIGURE_COMMAND}
-  BUILD_COMMAND ""
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${pyzmq_INSTALL_COMMAND}
-  DEPENDS
-    ${pyzmq_deps}
-  ${ep_log_options}
-  )
-
-# pyzmq
-#
-
-#ExternalProject_Add(pyzmq
-#  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-#  SOURCE_DIR ${pyzmq_source}
-#  URL ${PYZMQ_URL}/${PYZMQ_GZ}
-#  URL_MD5 ${PYZMQ_MD5}
-#  BUILD_IN_SOURCE 1
-#  CONFIGURE_COMMAND ""
-#  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-#  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-#  DEPENDS ${pyzmq_deps}
-#  ${ep_log_options}
-#  )
diff --git a/CMake/cdat_modules/pyzmq_pkg.cmake b/CMake/cdat_modules/pyzmq_pkg.cmake
deleted file mode 100644
index dd5f0fa46..000000000
--- a/CMake/cdat_modules/pyzmq_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(PYZMQ_VERSION 14.3.1)
-set(PYZMQ_URL ${LLNL_URL})
-set(PYZMQ_GZ pyzmq-${PYZMQ_VERSION}.tar.gz)
-set(PYZMQ_MD5 7196b4a6fbf98022f17ffa924be3d68d)
-set(PYZMQ_SOURCE ${PYZMQ_URL}/${PYZMQ_GZ})
-
-add_cdat_package(pyzmq "" "" OFF)
diff --git a/CMake/cdat_modules/qt4_deps.cmake b/CMake/cdat_modules/qt4_deps.cmake
deleted file mode 100644
index 8b1378917..000000000
--- a/CMake/cdat_modules/qt4_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/CMake/cdat_modules/qt4_pkg.cmake b/CMake/cdat_modules/qt4_pkg.cmake
deleted file mode 100644
index ee6057e89..000000000
--- a/CMake/cdat_modules/qt4_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-if (CDAT_BUILD_GRAPHICS)
-  find_package(Qt4 4.7.2 REQUIRED)
-
-  if (CDAT_BUILD_GUI)
-    if (NOT DEFINED QT_QTOPENGL_INCLUDE_DIR)
-      message(FATAL_ERROR "QT_QTOPENGL_INCLUDE_DIR is not set but required")
-    endif()
-  endif()
-endif()
-
diff --git a/CMake/cdat_modules/qt_external.cmake b/CMake/cdat_modules/qt_external.cmake
deleted file mode 100644
index 86085efc6..000000000
--- a/CMake/cdat_modules/qt_external.cmake
+++ /dev/null
@@ -1,66 +0,0 @@
-
-set(qt_source "${CMAKE_CURRENT_BINARY_DIR}/build/Qt")
-set(qt_install_dir "${cdat_EXTERNALS}")
-
-if(WIN32)
-  # if jom is in the path use it as it will be faster
-  find_program(JOM jom)
-  mark_as_advanced(JOM)
-  if(JOM)
-    set(qt_build_program "${JOM}")
-  else()
-    set(qt_build_program nmake)
-  endif()
-  set(qt_install_dir ${qt_source})
-  configure_file(${Titan_CMAKE_DIR}/win_config_qt.cmake.in
-    ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake )
-  set(qt_configure ${CMAKE_COMMAND}
-    -P ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake)
-  set(qt_build ${qt_build_program})
-  set(qt_install "")
-else()
-  set(qt_configure echo yes | sh configure --prefix=${qt_install_dir} -release
-    -nomake examples -nomake demos -no-audio-backend -no-multimedia 
-    -phonon -opensource)
-  if ("-m32" STREQUAL "${CMAKE_CXX_FLAGS}")
-    set(qt_configure echo yes | sh ./configure -release
-      -nomake examples -nomake demos -no-audio-backend -no-multimedia 
-      --prefix=${qt_install_dir} -opensource
-      -platform linux-g++-32)
-  endif ()
-  set(qt_build ${MAKE})
-  set(qt_install make install)
-  if(APPLE)
-    exec_program(${CMAKE_C_COMPILER} ARGS --version OUTPUT_VARIABLE
-        _gcc_version_info)
-    string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
-        _gcc_version "${_gcc_version_info}")
-    if(NOT _gcc_version)
-      string (REGEX REPLACE ".*\\(GCC\\).* ([34]\\.[0-9]) .*" "\\1.0"
-        _gcc_version "${_gcc_version_info}")
-    endif()
-    if(${_gcc_version} VERSION_GREATER 4.2.0)
-      # Then Qt should be built 64 bit
-      message(STATUS "Building 64 bit Qt using cocoa.")
-      set(qt_configure ${qt_configure} -arch x86_64 -cocoa)
-    else()
-      # Then Qt should be built 32 bit
-      message(STATUS "Building 32 bit Qt using carbon.")
-      set(qt_configure ${qt_configure} -arch x86 -carbon)
-    endif()
-  endif()
-endif()
-
-ExternalProject_Add(Qt
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${QT_URL}/${QT_GZ}
-  URL_MD5 ${QT_MD5}
-  SOURCE_DIR ${qt_source}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${qt_configure}
-  DEPENDS ${Qt_deps}
-  )
-
-set(QT_QMAKE_EXECUTABLE "${qt_install_dir}/bin/qmake"
-    CACHE FILEPATH "Path to qmake executable" FORCE)
-
diff --git a/CMake/cdat_modules/r_deps.cmake b/CMake/cdat_modules/r_deps.cmake
deleted file mode 100644
index a7016962f..000000000
--- a/CMake/cdat_modules/r_deps.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(R_deps ${readline_pkg})
-if (CDAT_BUILD_PARALLEL)
-  list(APPEND R_deps ${mpi_pkg})
-endif()
diff --git a/CMake/cdat_modules/r_external.cmake b/CMake/cdat_modules/r_external.cmake
deleted file mode 100644
index af1d2d311..000000000
--- a/CMake/cdat_modules/r_external.cmake
+++ /dev/null
@@ -1,51 +0,0 @@
-
-set(R_source "${CMAKE_CURRENT_BINARY_DIR}/build/R")
-set(R_install "${cdat_EXTERNALS}")
-if (APPLE)
-    message("[INFO] Building R without X support for MacOS")
-    set(WITHX "no")
-    set(WITH_AQUA "yes")
-else ()
-    set(WITHX "yes")
-    set(WITH_AQUA "no")
-endif()
-
-if (CDAT_BUILD_PARALLEL)
-  message([INFO] Enabling openmp for R)
-  set(R_OPENMP "--enable-openmp")
-else ()
-  message([INFO] Disabling openmp for R)
-  set(R_OPENMP "--disable-openmp")
-endif ()
-
-list(APPEND USR_ENVS
-  "CPPFLAGS=-I${cdat_EXTERNALS}/include $ENV{CPPFLAGS}"
-  "LDFLAGS=-L${cdat_EXTERNALS}/lib"
-  )
-ExternalProject_Add(R
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${R_source}
-  INSTALL_DIR ${R_install}
-  URL ${R_URL}/${R_GZ}
-  URL_MD5 ${R_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  DEPENDS ${R_deps}
-  CONFIGURE_COMMAND env ${USR_ENVS} <SOURCE_DIR>/configure --prefix=<INSTALL_DIR> LIBnn=lib --without-jpeglib --disable-R-framework --enable-R-shlib ${R_OPENMP} --without-cairo --without-ICU --without-system-xz --with-aqua=${WITH_AQUA} --without-tcltk --with-x=${WITHX}
-  INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM}  install
-  ${ep_log_options}
-)
-if(APPLE)
-    #change id and then change dependencies.. 
-    ExternalProject_Add_Step(R InstallNameToolR 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libR.dylib ${R_install}/lib/R/lib/libR.dylib 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib 
-        COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRlapack.dylib ${R_install}/lib/R/lib/libRlapack.dylib 
-        COMMAND install_name_tool -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libR.dylib 
-        COMMAND install_name_tool -change libR.dylib ${R_install}/lib/R/lib/libR.dylib -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}//lib/R/lib/libRlapack.dylib 
-        DEPENDEES install 
-        WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) 
-endif(APPLE)
-
-set(R_DIR "${R_binary}" CACHE PATH "R binary directory" FORCE)
-mark_as_advanced(R_DIR)
diff --git a/CMake/cdat_modules/r_pkg.cmake b/CMake/cdat_modules/r_pkg.cmake
deleted file mode 100644
index 8f7e53eb4..000000000
--- a/CMake/cdat_modules/r_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(R_MAJOR_SRC 3)
-set(R_MINOR_SRC 2)
-set(R_PATCH_SRC 2)
-set(R_URL ${LLNL_URL})
-set(R_GZ R-${R_MAJOR_SRC}.${R_MINOR_SRC}.${R_PATCH_SRC}.tar.gz)
-set(R_MD5 57cef5c2e210a5454da1979562a10e5b)
-set(R_SOURCE ${R_URL}/${R_GZ})
-
-set (nm R)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package_dependent(R "" "Build R" ${CDAT_BUILD_ALL}
-                           "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/readline_deps.cmake b/CMake/cdat_modules/readline_deps.cmake
deleted file mode 100644
index e347b6dfb..000000000
--- a/CMake/cdat_modules/readline_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(readline_deps ${pkgconfig_pkg} ${curses_pkg})
diff --git a/CMake/cdat_modules/readline_external.cmake b/CMake/cdat_modules/readline_external.cmake
deleted file mode 100644
index 212f96171..000000000
--- a/CMake/cdat_modules/readline_external.cmake
+++ /dev/null
@@ -1,31 +0,0 @@
-set(readline_source "${CMAKE_CURRENT_BINARY_DIR}/build/readline")
-set(readline_install "${cdat_EXTERNALS}")
-set(readline_conf_args)
-
-set(readline_conf_args "--with-curses;--disable-static;--enable-shared")
-# with -fPIC
-IF(UNIX AND NOT WIN32)
-  FIND_PROGRAM(CMAKE_UNAME uname /bin /usr/bin /usr/local/bin )
-  IF(CMAKE_UNAME)
-    EXEC_PROGRAM(uname ARGS -m OUTPUT_VARIABLE CMAKE_SYSTEM_PROCESSOR)
-    SET(CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR} CACHE INTERNAL
-"processor type (i386 and x86_64)")
-    IF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
-      set(readline_conf_args "CFLAGS=-fPIC" ${readline_conf_args})
-    ENDIF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64")
-  ENDIF(CMAKE_UNAME)
-ENDIF(UNIX AND NOT WIN32)
-
-ExternalProject_Add(readline
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${readline_source}
-  INSTALL_DIR ${readline_install}
-  URL ${READLINE_URL}/${READLINE_GZ}
-  URL_MD5 ${READLINE_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/readline/shobj-conf ${readline_source}/support/shobj-conf
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${readline_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${readline_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/readline_pkg.cmake b/CMake/cdat_modules/readline_pkg.cmake
deleted file mode 100644
index 86eb2679c..000000000
--- a/CMake/cdat_modules/readline_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(RL_MAJOR 5)
-set(RL_MINOR 2)
-set(RL_MAJOR_SRC 6)
-set(RL_MINOR_SRC 2)
-set(READLINE_URL ${LLNL_URL})
-set(READLINE_GZ readline-${RL_MAJOR_SRC}.${RL_MINOR_SRC}.tar.gz)
-set(READLINE_MD5 67948acb2ca081f23359d0256e9a271c)
-set(READLINE_VERSION ${RL_MAJOR_SRC}.${RL_MINOR_SRC})
-set(READLINE_SOURCE ${READLINE_URL}/${READLINE_GZ})
-
-add_cdat_package(readline "" "" OFF)
diff --git a/CMake/cdat_modules/rpy2_deps.cmake b/CMake/cdat_modules/rpy2_deps.cmake
deleted file mode 100644
index 3c3d4d90c..000000000
--- a/CMake/cdat_modules/rpy2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(RPY2_deps ${python_pkg} ${pip_pkg} ${r_pkg} ${six_pkg} ${singledispatch_pkg} ${windfield_pkg})
diff --git a/CMake/cdat_modules/rpy2_external.cmake b/CMake/cdat_modules/rpy2_external.cmake
deleted file mode 100644
index d408ae22c..000000000
--- a/CMake/cdat_modules/rpy2_external.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-# create an external project to install RPY2,
-# and configure and build it
-set(nm RPY2)
-
-# Set PATH and R_HOME to find R
-list(APPEND USR_ENVS
-  "R_HOME=${cdat_EXTERNALS}/lib/R"
-  "PATH=${cdat_EXTERNALS}/bin:$ENV{PATH}"
-  )
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/rpy2_pkg.cmake b/CMake/cdat_modules/rpy2_pkg.cmake
deleted file mode 100644
index 5447bd3fb..000000000
--- a/CMake/cdat_modules/rpy2_pkg.cmake
+++ /dev/null
@@ -1,11 +0,0 @@
-set(RPY2_MAJOR_SRC 2)
-set(RPY2_MINOR_SRC 6)
-set(RPY2_PATCH_SRC 0)
-
-set(RPY2_VERSION ${RPY2_MAJOR_SRC}.${RPY2_MINOR_SRC}.${RPY2_PATCH_SRC})
-set(RPY2_GZ rpy2-${RPY2_VERSION}.tar.gz)
-set(RPY2_SOURCE ${LLNL_URL}/${RPY2_GZ})
-set(RPY2_MD5 679898fbc832d4f05a5efcf1a7eb1a68)
-
-add_cdat_package_dependent(RPY2 "" "" ${CDAT_BUILD_ALL}
-                           "NOT CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/sampledata_deps.cmake b/CMake/cdat_modules/sampledata_deps.cmake
deleted file mode 100644
index 785ca373e..000000000
--- a/CMake/cdat_modules/sampledata_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(sampledata_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/sampledata_external.cmake b/CMake/cdat_modules/sampledata_external.cmake
deleted file mode 100644
index 7b9a7027f..000000000
--- a/CMake/cdat_modules/sampledata_external.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-if (CDAT_DOWNLOAD_SAMPLE_DATA)
-  message("[INFO] ------------------------------------------------------------------------------------------------------------------------------")
-  configure_file(
-    "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_download_sample_data.cmake.in"
-    "${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake"
-    @ONLY
-    )
-  set(sampledata_cmd ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake)
-  ExternalProject_Add(sampledata
-    SOURCE_DIR ${cdat_SOURCE_DIR}/Packages/dat
-    CONFIGURE_COMMAND ${sampledata_cmd}
-    BUILD_COMMAND ""
-    INSTALL_COMMAND ""
-    DEPENDS ${sampledata_deps}
-    ${ep_log_options}
-    )
-endif()
diff --git a/CMake/cdat_modules/sampledata_pkg.cmake b/CMake/cdat_modules/sampledata_pkg.cmake
deleted file mode 100644
index 821414e96..000000000
--- a/CMake/cdat_modules/sampledata_pkg.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-
-add_cdat_package(sampledata "" "" ON)
diff --git a/CMake/cdat_modules/scientificpython_deps.cmake b/CMake/cdat_modules/scientificpython_deps.cmake
deleted file mode 100644
index 8116fccd0..000000000
--- a/CMake/cdat_modules/scientificpython_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-set(scientificpython_deps ${numpy_pkg} ${netcdf_pkg} ${cdat_pkg} ${pip_pkg})
-
diff --git a/CMake/cdat_modules/scientificpython_external.cmake b/CMake/cdat_modules/scientificpython_external.cmake
deleted file mode 100644
index ecd5c2c9a..000000000
--- a/CMake/cdat_modules/scientificpython_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install ScientificPython
-# and configure and build it
-set(nm scientificpython)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/scientificpython_pkg.cmake b/CMake/cdat_modules/scientificpython_pkg.cmake
deleted file mode 100644
index 206cdd7ca..000000000
--- a/CMake/cdat_modules/scientificpython_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(SCIENTIFICPYTHON_VERSION 2.8)
-set(SCIENTIFICPYTHON_URL ${LLNL_URL})
-set(SCIENTIFICPYTHON_GZ ScientificPython-${SCIENTIFICPYTHON_VERSION}.tar.gz)
-set(SCIENTIFICPYTHON_SOURCE ${SCIENTIFICPYTHON_URL}/${SCIENTIFICPYTHON_GZ})
-set(SCIENTIFICPYTHON_MD5 b87dd2b2c4be6b5421d906d39bcc59a7 )
-
-add_cdat_package_dependent(scientificpython "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/scikits_deps.cmake b/CMake/cdat_modules/scikits_deps.cmake
deleted file mode 100644
index 858e900f7..000000000
--- a/CMake/cdat_modules/scikits_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(scikits_deps ${pip_pkg} ${scipy_pkg})
diff --git a/CMake/cdat_modules/scikits_external.cmake b/CMake/cdat_modules/scikits_external.cmake
deleted file mode 100644
index eeff0fa01..000000000
--- a/CMake/cdat_modules/scikits_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm scikits)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/scikits_pkg.cmake b/CMake/cdat_modules/scikits_pkg.cmake
deleted file mode 100644
index 83d79a250..000000000
--- a/CMake/cdat_modules/scikits_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SCIKITS_MAJOR_SRC 0)
-set(SCIKITS_MINOR_SRC 12)
-set(SCIKITS_URL ${LLNL_URL})
-set(SCIKITS_GZ scikit-learn-${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}.tar.gz)
-set(SCIKITS_MD5 0e1f6c60b43a4f447bf363583c1fc204 )
-set(SCIKITS_VERSION ${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC})
-set(SCIKITS_SOURCE ${SCIKITS_URL}/${SCIKITS_GZ})
-
-
-add_cdat_package_dependent(scikits "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/scipy_deps.cmake b/CMake/cdat_modules/scipy_deps.cmake
deleted file mode 100644
index f7ca69d03..000000000
--- a/CMake/cdat_modules/scipy_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SCIPY_deps ${numpy_pkg} ${cython_pkg})
diff --git a/CMake/cdat_modules/scipy_external.cmake b/CMake/cdat_modules/scipy_external.cmake
deleted file mode 100644
index ebd0ca9e7..000000000
--- a/CMake/cdat_modules/scipy_external.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-# The Scipy external project 
-
-set(SCIPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/SCIPY")
-
-# to configure scipy we run a cmake -P script
-# the script will create a site.cfg file
-# then run python setup.py config to verify setup
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake @ONLY)
-# to build scipy we also run a cmake -P script.
-# the script will set LD_LIBRARY_PATH so that 
-# python can run after it is built on linux
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_make_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake @ONLY)
-
-configure_file(
-  ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_install_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake @ONLY)
-
-set(SCIPY_CONFIGURE_COMMAND ${CMAKE_COMMAND}
-    -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -DCDAT_USE_SYSTEM_LAPACK:STRING=${CDAT_USE_SYSTEM_LAPACK} -DLAPACK_LIBRARIES:STRING=${LAPACK_LIBRARIES} -DBLAS_LIBRARIES:STRING=${BLAS_LIBRARIES} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake)
-set(SCIPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake)
-set(SCIPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake)
-
-# create an external project to download scipy,
-# and configure and build it
-ExternalProject_Add(SCIPY
-  URL ${SCIPY_URL}/${SCIPY_GZ}
-  URL_MD5 ${SCIPY_MD5}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${SCIPY_binary}
-  BINARY_DIR ${SCIPY_binary}
-  CONFIGURE_COMMAND ${SCIPY_CONFIGURE_COMMAND}
-  BUILD_COMMAND ${SCIPY_BUILD_COMMAND}
-  UPDATE_COMMAND ""
-  INSTALL_COMMAND ${SCIPY_INSTALL_COMMAND}
-  DEPENDS
-    ${SCIPY_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/scipy_pkg.cmake b/CMake/cdat_modules/scipy_pkg.cmake
deleted file mode 100644
index e582aecb6..000000000
--- a/CMake/cdat_modules/scipy_pkg.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-set(SCIPY_MAJOR 0)
-set(SCIPY_MINOR 17)
-set(SCIPY_PATCH 0)
-set(SCIPY_MAJOR_SRC 0)
-set(SCIPY_MINOR_SRC 17)
-set(SCIPY_PATCH_SRC 0)
-set(SCIPY_URL ${LLNL_URL})
-set(SCIPY_GZ scipy-${SCIPY_MAJOR_SRC}.${SCIPY_MINOR_SRC}.${SCIPY_PATCH_SRC}.tar.gz)
-set(SCIPY_MD5 298ca04ade82814b17f5cd2d9d4c7b70)
-set(SCIPY_SOURCE ${SCIPY_URL}/${SCIPY_GZ})
-
-set (nm SCIPY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-add_cdat_package_dependent(SCIPY "" "" OFF "CDAT_BUILD_LEAN" ON)
-#if (CDAT_BUILD_ALL)
-#  add_cdat_package(scipy "" "" ON)
-#else()
-#  add_cdat_package(scipy "" "" OFF)
-#endif()
diff --git a/CMake/cdat_modules/seawater_deps.cmake b/CMake/cdat_modules/seawater_deps.cmake
deleted file mode 100644
index d8ca10270..000000000
--- a/CMake/cdat_modules/seawater_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(seawater_deps ${python_pkg} ${numpy_pkg})
diff --git a/CMake/cdat_modules/seawater_external.cmake b/CMake/cdat_modules/seawater_external.cmake
deleted file mode 100644
index a92c31447..000000000
--- a/CMake/cdat_modules/seawater_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# seawater
-#
-set(seawater_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/seawater")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/seawater_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake"
-  @ONLY
-)
-
-set(seawater_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake)
-
-ExternalProject_Add(seawater
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${seawater_source_dir}
-  URL ${SEAWATER_URL}/${SEAWATER_GZ}
-  URL_MD5 ${SEAWATER_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${seawater_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${seawater_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/seawater_pkg.cmake b/CMake/cdat_modules/seawater_pkg.cmake
deleted file mode 100644
index 81bde3ba7..000000000
--- a/CMake/cdat_modules/seawater_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(SEAWATER_MAJOR 3)
-set(SEAWATER_MINOR 3)
-set(SEAWATER_PATCH 4)
-set(SEAWATER_VERSION ${SEAWATER_MAJOR}.${SEAWATER_MINOR}.${SEAWATER_PATCH})
-set(SEAWATER_URL ${LLNL_URL})
-set(SEAWATER_GZ python-seawater-${SEAWATER_VERSION}.tar.gz)
-set(SEAWATER_MD5 0932193350f42c055e7f523578ec1b7c)
-
-set (nm SEAWATER)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(SEAWATER_SOURCE ${SEAWATER_URL}/${SEAWATER_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(seawater "" "" ON)
-else()
-  add_cdat_package(seawater "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/setuptools_deps.cmake b/CMake/cdat_modules/setuptools_deps.cmake
deleted file mode 100644
index 9e3879e6b..000000000
--- a/CMake/cdat_modules/setuptools_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(setuptools_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/setuptools_external.cmake b/CMake/cdat_modules/setuptools_external.cmake
deleted file mode 100644
index cbea071a4..000000000
--- a/CMake/cdat_modules/setuptools_external.cmake
+++ /dev/null
@@ -1,38 +0,0 @@
-set(setuptools_source "${CMAKE_CURRENT_BINARY_DIR}/build/setuptools")
-set(setuptools_install "${cdat_EXTERNALS}")
-
-# 2012-03-19 C. Doutriaux Commented this out seems to not be able to pick pythonpath and ldlibrarypath
-# Seems to be way too complicated for what's  really needed
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_make_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake
-#  @ONLY)
-
-#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_install_step.cmake.in
-#  ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake
-#  @ONLY)
-
-#set(setuptools_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake)
-#set(setuptools_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake)
-  
-
-# old cmnd
-#  BUILD_COMMAND 
-#  INSTALL_COMMAND ${setuptools_install_command}
-
-ExternalProject_Add(setuptools
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${setuptools_source}
-  INSTALL_DIR ${setuptools_install}
-  URL ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ}
-  URL_MD5 ${SETUPTOOLS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${setuptools_deps}
-  ${ep_log_options}
-)
-
-set(setuptools_DIR "${setuptools_binary}" CACHE PATH "setuptools binary directory" FORCE)
-mark_as_advanced(setuptools_DIR)
diff --git a/CMake/cdat_modules/setuptools_pkg.cmake b/CMake/cdat_modules/setuptools_pkg.cmake
deleted file mode 100644
index 97c8e93f7..000000000
--- a/CMake/cdat_modules/setuptools_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SETUPTOOLS_MAJOR_SRC 19)
-set(SETUPTOOLS_MINOR_SRC 2)
-set(SETUPTOOLS_PATCH_SRC '')
-set(SETUPTOOLS_URL ${LLNL_URL})
-set(SETUPTOOLS_GZ setuptools-${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}.tar.gz)
-set(SETUPTOOLS_MD5 78353b1f80375ca5e088f4b4627ffe03)
-set(SETUPTOOLS_VERSION ${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC})
-set(SETUPTOOLS_SOURCE ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ})
-
-add_cdat_package(setuptools "" "" OFF)
diff --git a/CMake/cdat_modules/shapely_deps.cmake b/CMake/cdat_modules/shapely_deps.cmake
deleted file mode 100644
index e4cf1bcff..000000000
--- a/CMake/cdat_modules/shapely_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Shapely_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${geos_pkg})
diff --git a/CMake/cdat_modules/shapely_external.cmake b/CMake/cdat_modules/shapely_external.cmake
deleted file mode 100644
index a04192050..000000000
--- a/CMake/cdat_modules/shapely_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Shapely)
-set(USR_ENVS "GEOS_CONFIG=${cdat_EXTERNALS}/bin/geos-config")
-include(pipinstaller)
diff --git a/CMake/cdat_modules/shapely_pkg.cmake b/CMake/cdat_modules/shapely_pkg.cmake
deleted file mode 100644
index 115520652..000000000
--- a/CMake/cdat_modules/shapely_pkg.cmake
+++ /dev/null
@@ -1,17 +0,0 @@
-set( SHAPELY_MAJOR_SRC 1  )
-set( SHAPELY_MINOR_SRC 5 )
-set( SHAPELY_PATCH_SRC 13  )
-set(SHAPELY_URL ${LLNL_URL})
-set(SHAPELY_GZ
-    Shapely-${SHAPELY_MAJOR_SRC}.${SHAPELY_MINOR_SRC}.${SHAPELY_PATCH_SRC}.tar.gz)
-set(SHAPELY_MD5 5ee549862ae84326f5f5525bbd0b8a50)
-set(SHAPELY_SOURCE ${SHAPELY_URL}/${SHAPELY_GZ})
-
-set (nm SHAPELY)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-if (CDAT_BUILD_ALL)
-  add_cdat_package(Shapely "" "" ON)
-else()
-  add_cdat_package(Shapely "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/singledispatch_deps.cmake b/CMake/cdat_modules/singledispatch_deps.cmake
deleted file mode 100644
index 5ad0c5ed4..000000000
--- a/CMake/cdat_modules/singledispatch_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(singledispatch_deps ${python_pkg} ${setuptools_pkg} ${six_pkg})
diff --git a/CMake/cdat_modules/singledispatch_external.cmake b/CMake/cdat_modules/singledispatch_external.cmake
deleted file mode 100644
index 893edf6ae..000000000
--- a/CMake/cdat_modules/singledispatch_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# The singledispatch project
-
-set(singledispatch_binary "${CMAKE_CURRENT_BINARY_DIR}/build/singledispatch")
-
-ExternalProject_Add(singledispatch
-  DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR}
-  SOURCE_DIR ${singledispatch_binary}
-  URL ${SINGLEDISPATCH_SOURCE}
-  URL_MD5 ${SINGLEDISPATCH_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${singledispatch_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/singledispatch_pkg.cmake b/CMake/cdat_modules/singledispatch_pkg.cmake
deleted file mode 100644
index c5eb273ac..000000000
--- a/CMake/cdat_modules/singledispatch_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set( SINGLEDISPATCH_MAJOR 3 )
-set( SINGLEDISPATCH_MINOR 4 )
-set( SINGLEDISPATCH_PATCH 0.3)
-set( SINGLEDISPATCH_VERSION ${SINGLEDISPATCH_MAJOR}.${SINGLEDISPATCH_MINOR}.${SINGLEDISPATCH_PATCH} )
-set( SINGLEDISPATCH_URL ${LLNL_URL} )
-set( SINGLEDISPATCH_GZ singledispatch-${SINGLEDISPATCH_VERSION}.tar.gz )
-set( SINGLEDISPATCH_MD5 af2fc6a3d6cc5a02d0bf54d909785fcb )
-
-set (nm SINGLEDISPATCH)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(SINGLEDISPATCH_SOURCE ${SINGLEDISPATCH_URL}/${SINGLEDISPATCH_GZ})
-
-if (BUILD_TESTING)
-  add_cdat_package(singledispatch "" "" ON)
-endif()
diff --git a/CMake/cdat_modules/sip_deps.cmake b/CMake/cdat_modules/sip_deps.cmake
deleted file mode 100644
index ee888d435..000000000
--- a/CMake/cdat_modules/sip_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SIP_deps ${pkgconfig_pkg} ${python_pkg})
diff --git a/CMake/cdat_modules/sip_external.cmake b/CMake/cdat_modules/sip_external.cmake
deleted file mode 100644
index bbf4f461f..000000000
--- a/CMake/cdat_modules/sip_external.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(SIP_configure_command ${PYTHON_EXECUTABLE} ${cdat_BINARY_DIR}/build/SIP/configure.py -b ${CMAKE_INSTALL_PREFIX}/bin -d ${PYTHON_SITE_PACKAGES} -e ${CMAKE_INSTALL_PREFIX}/include -v ${CMAKE_INSTALL_PREFIX}/share CC=${CMAKE_C_COMPILER} CXX=${CMAKE_CXX_COMPILER})
-
-ExternalProject_Add(SIP
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  URL ${SIP_URL}/${SIP_GZ}
-  URL_MD5 ${SIP_MD5}
-  SOURCE_DIR ${cdat_BINARY_DIR}/build/SIP
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${SIP_configure_command}
-  DEPENDS ${SIP_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/sip_pkg.cmake b/CMake/cdat_modules/sip_pkg.cmake
deleted file mode 100644
index c2beefbd3..000000000
--- a/CMake/cdat_modules/sip_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(SIP_MAJOR 4)
-set(SIP_MINOR 12)
-set(SIP_PATCH 1)
-set(SIP_MAJOR_SRC 4)
-set(SIP_MINOR_SRC 16)
-set(SIP_PATCH_SRC 4)
-set(SIP_VERSION ${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC})
-set(SIP_URL http://www.riverbankcomputing.com/static/Downloads/sip${SIP_MAJOR_SRC})
-set(SIP_URL ${LLNL_URL})
-set(SIP_GZ sip-${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}.tar.gz)
-set(SIP_MD5 a9840670a064dbf8f63a8f653776fec9 )
-set(SIP_SOURCE ${SIP_URL}/${SIP_GZ})
-
-add_cdat_package_dependent(SIP "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/six_deps.cmake b/CMake/cdat_modules/six_deps.cmake
deleted file mode 100644
index 20fb4f54f..000000000
--- a/CMake/cdat_modules/six_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(SIX_deps ${python_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/six_external.cmake b/CMake/cdat_modules/six_external.cmake
deleted file mode 100644
index 5a1ae27de..000000000
--- a/CMake/cdat_modules/six_external.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm SIX)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/six_pkg.cmake b/CMake/cdat_modules/six_pkg.cmake
deleted file mode 100644
index e8daac58a..000000000
--- a/CMake/cdat_modules/six_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(SIX_MAJOR_SRC 1)
-set(SIX_MINOR_SRC 9)
-set(SIX_PATCH_SRC 0)
-
-set(SIX_VERSION ${SIX_MAJOR_SRC}.${SIX_MINOR_SRC}.${SIX_PATCH_SRC})
-set(SIX_GZ six-${SIX_VERSION}.tar.gz)
-set(SIX_SOURCE ${LLNL_URL}/${SIX_GZ})
-set(SIX_MD5 476881ef4012262dfc8adc645ee786c4)
-
-add_cdat_package_dependent(SIX "" "" ON "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/sphinx_deps.cmake b/CMake/cdat_modules/sphinx_deps.cmake
deleted file mode 100644
index 8e0e9f2a1..000000000
--- a/CMake/cdat_modules/sphinx_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(Sphinx_deps ${pip_pkg} ${docutils_pkg})
diff --git a/CMake/cdat_modules/sphinx_external.cmake b/CMake/cdat_modules/sphinx_external.cmake
deleted file mode 100644
index 41cf3d2c1..000000000
--- a/CMake/cdat_modules/sphinx_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm Sphinx)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/sphinx_pkg.cmake b/CMake/cdat_modules/sphinx_pkg.cmake
deleted file mode 100644
index 536d6e042..000000000
--- a/CMake/cdat_modules/sphinx_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(SPHINX_MAJOR_SRC 1)
-set(SPHINX_MINOR_SRC 2)
-set(SPHINX_PATCH_SRC 2)
-
-set (nm SPHINX)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(SPHINX_URL ${LLNL_URL})
-set(SPHINX_GZ Sphinx-${SPHINX_VERSION}.tar.gz)
-set(SPHINX_SOURCE ${SPHINX_URL}/${SPHINX_GZ})
-set(SPHINX_MD5 3dc73ccaa8d0bfb2d62fb671b1f7e8a4)
-
-add_cdat_package_dependent(Sphinx "" "" OFF "CDAT_BUILD_GUI" OFF)
-
diff --git a/CMake/cdat_modules/spyder_deps.cmake b/CMake/cdat_modules/spyder_deps.cmake
deleted file mode 100644
index b543e68ad..000000000
--- a/CMake/cdat_modules/spyder_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(spyder_deps ${pyzmq_pkg} ${pyqt_pkg} ${python_pkg} ${pip_pkg} ${numpy_pkg} ${scipy_pkg} ${sphinx_pkg} ${matplotlib_pkg})
diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake
deleted file mode 100644
index dede73c00..000000000
--- a/CMake/cdat_modules/spyder_external.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-# create an external project to install spyder,
-# and configure and build it
-
-set (nm spyder)
-set(OLD "OFF")
-include(pipinstaller)
-
diff --git a/CMake/cdat_modules/spyder_pkg.cmake b/CMake/cdat_modules/spyder_pkg.cmake
deleted file mode 100644
index 664f2c319..000000000
--- a/CMake/cdat_modules/spyder_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(SPYDER_MAJOR_SRC 2)
-set(SPYDER_MINOR_SRC 3)
-set(SPYDER_PATCH_SRC 8)
-set(SPYDER_URL ${LLNL_URL})
-set(SPYDER_ZIP spyder-${SPYDER_MAJOR_SRC}.${SPYDER_MINOR_SRC}.${SPYDER_PATCH_SRC}.zip)
-set(SPYDER_SOURCE ${SPYDER_URL}/${SPYDER_ZIP})
-set(SPYDER_MD5 fb890dc956f606c43d560558159f3491)
-
-add_cdat_package_dependent(spyder "" "" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/tcltk_deps.cmake b/CMake/cdat_modules/tcltk_deps.cmake
deleted file mode 100644
index 4f4bf38e9..000000000
--- a/CMake/cdat_modules/tcltk_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(TclTk_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg})
diff --git a/CMake/cdat_modules/tcltk_external.cmake b/CMake/cdat_modules/tcltk_external.cmake
deleted file mode 100644
index 9c8baa5f6..000000000
--- a/CMake/cdat_modules/tcltk_external.cmake
+++ /dev/null
@@ -1,62 +0,0 @@
-
-set(tcl_source "${CMAKE_CURRENT_BINARY_DIR}/build/tcl")
-set(tk_source "${CMAKE_CURRENT_BINARY_DIR}/build/tk")
-set(tcltk_install "${cdat_EXTERNALS}")
-
-set(tcltk_configure_args --enable-shared)
-
-# tcl
-#
-set(proj tcl-${TCLTK_MAJOR}.${TCLTK_MINOR})
-
-ExternalProject_Add(${proj}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tcl_source}
-  INSTALL_DIR ${tcltk_install}
-  URL ${TCLTK_URL}/${TCL_GZ}
-  URL_MD5 ${TCL_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR>/unix -DCONFIGURE_ARGS=${tcltk_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS ${TclTk_deps}
-  ${ep_log_options}
-)
-
-# tk
-#
-set(proj tk-${TCLTK_MAJOR}.${TCLTK_MINOR})
-
-ExternalProject_Add(${proj}
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tk_source}
-  INSTALL_DIR ${tcltk_install}
-  URL ${TCLTK_URL}/${TK_GZ}
-  URL_MD5 ${TK_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake
-  INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=<SOURCE_DIR>/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake
-  DEPENDS tcl-${TCLTK_MAJOR}.${TCLTK_MINOR}
-  ${ep_log_options}
-)
-
-ExternalProject_Add_Step(${proj} symlink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink "wish${TCLTK_MAJOR}.${TCLTK_MINOR}" wish
-  WORKING_DIRECTORY ${tcltk_install}/bin
-  COMMENT "Linking wish${TCLTK_MAJOR}.${TCLTK_MINOR} to wish"
-  DEPENDEES install
-)
-
-# tcltk
-#
-
-ExternalProject_Add(TclTk
-  DOWNLOAD_COMMAND ""
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ""
-  DEPENDS tk-${TCLTK_MAJOR}.${TCLTK_MINOR}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/tcltk_pkg.cmake b/CMake/cdat_modules/tcltk_pkg.cmake
deleted file mode 100644
index 1296043e2..000000000
--- a/CMake/cdat_modules/tcltk_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(TCLTK_MAJOR 8)
-set(TCLTK_MINOR 5)
-set(TCLTK_PATCH 9)
-set(TCLTK_VERSION ${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH})
-set(TCLTK_URL ${LLNL_URL})
-set(TCL_GZ tcl${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz)
-set(TK_GZ tk${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz)
-set(TCL_MD5 8512d8db3233041dd68a81476906012a)
-set(TK_MD5 7cdeb9feb61593f58a0ae61f2497580e)
-# Two sources here, need to fake it
-set(TCLTK_SOURCE "${TCLTK_URL}/${TCL_GZ} ${TCL_MD5}")
-set(TCLTK_MD5 "${TCLTK_URL}/${TK_GZ} ${TK_MD5}")
-
-if (CDAT_BUILD_ESGF)
-    add_cdat_package(TclTk "" "" OFF)
-else()
-    add_cdat_package_dependent(TclTk "" "" OFF "CDAT_BUILD_GUI" OFF)
-endif()
diff --git a/CMake/cdat_modules/termcap_deps.cmake b/CMake/cdat_modules/termcap_deps.cmake
deleted file mode 100644
index 3c9a6f3af..000000000
--- a/CMake/cdat_modules/termcap_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(termcap_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/termcap_external.cmake b/CMake/cdat_modules/termcap_external.cmake
deleted file mode 100644
index cf57c940a..000000000
--- a/CMake/cdat_modules/termcap_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(termcap_source "${CMAKE_CURRENT_BINARY_DIR}/build/termcap")
-set(termcap_install "${cdat_EXTERNALS}")
-set(termcap_conf_args)
-
-ExternalProject_Add(termcap
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${termcap_source}
-  INSTALL_DIR ${termcap_install}
-  URL ${TCAP_URL}/${TCAP_GZ}
-  URL_MD5 ${TCAP_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${termcap_conf_args} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${termcap_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/termcap_pkg.cmake b/CMake/cdat_modules/termcap_pkg.cmake
deleted file mode 100644
index 11e6a0e92..000000000
--- a/CMake/cdat_modules/termcap_pkg.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(TCAP_MAJOR_SRC 1)
-set(TCAP_MINOR_SRC 3)
-set(TCAP_PATCH_SRC 1)
-set(TCAP_URL ${LLNL_URL})
-set(TCAP_GZ termcap-${TCAP_MAJOR_SRC}.${TCAP_MINOR_SRC}.${TCAP_PATCH_SRC}.tar.gz)
-set(TCAP_MD5 ffe6f86e63a3a29fa53ac645faaabdfa)
-set(TERMCAP_SOURCE ${TCAP_URL}/${TCAP_GZ})
-set(TERMCAP_MD5 ${TCAP_MD5})
-
-set (nm TCAP)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-set(TERMCAP_VERSION ${TCAP_VERSION})
-
-add_cdat_package(termcap "" "" OFF)
-
diff --git a/CMake/cdat_modules/tiff_deps.cmake b/CMake/cdat_modules/tiff_deps.cmake
deleted file mode 100644
index 3a05e71e9..000000000
--- a/CMake/cdat_modules/tiff_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(tiff_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg})
diff --git a/CMake/cdat_modules/tiff_external.cmake b/CMake/cdat_modules/tiff_external.cmake
deleted file mode 100644
index 248a9929d..000000000
--- a/CMake/cdat_modules/tiff_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(tiff_source "${CMAKE_CURRENT_BINARY_DIR}/build/tiff")
-set(tiff_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(tiff
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${tiff_source}
-  INSTALL_DIR ${tiff_install}
-  URL ${TIFF_URL}/${TIFF_GZ}
-  URL_MD5 ${TIFF_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${tiff_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/tiff_pkg.cmake b/CMake/cdat_modules/tiff_pkg.cmake
deleted file mode 100644
index 09a6a191a..000000000
--- a/CMake/cdat_modules/tiff_pkg.cmake
+++ /dev/null
@@ -1,12 +0,0 @@
-set(TIFF_MAJOR 3)
-set(TIFF_MINOR 9)
-set(TIFF_PATCH 4)
-set(TIFF_URL ${LLNL_URL})
-set(TIFF_GZ tiff-${TIFF_MAJOR}.${TIFF_MINOR}.${TIFF_PATCH}.tar.gz)
-set(TIFF_MD5 2006c1bdd12644dbf02956955175afd6)
-set(TIFF_SOURCE ${TIFF_URL}/${TIFF_GZ})
-
-set (nm TIFF)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-add_cdat_package_dependent(tiff "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/tornado_deps.cmake b/CMake/cdat_modules/tornado_deps.cmake
deleted file mode 100644
index 6c8e9f67d..000000000
--- a/CMake/cdat_modules/tornado_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(tornado_deps ${spyder_pkg} ${pyzmq_deps} ${pip_pkg})
diff --git a/CMake/cdat_modules/tornado_external.cmake b/CMake/cdat_modules/tornado_external.cmake
deleted file mode 100644
index 3531582b0..000000000
--- a/CMake/cdat_modules/tornado_external.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-# create an external project to install MyProxyClient,
-# and configure and build it
-set(nm tornado)
-
-include(pipinstaller)
diff --git a/CMake/cdat_modules/tornado_pkg.cmake b/CMake/cdat_modules/tornado_pkg.cmake
deleted file mode 100644
index a40c77381..000000000
--- a/CMake/cdat_modules/tornado_pkg.cmake
+++ /dev/null
@@ -1,7 +0,0 @@
-set(TORNADO_VERSION 3.1)
-set(TORNADO_URL ${LLNL_URL})
-set(TORNADO_GZ tornado-${TORNADO_VERSION}.tar.gz)
-set(TORNADO_SOURCE ${TORNADO_URL}/${TORNADO_GZ})
-set(TORNADO_MD5 2348d626095c5675753287e9af0c321f )
-
-add_cdat_package(tornado "" "" OFF)
diff --git a/CMake/cdat_modules/udunits2_deps.cmake b/CMake/cdat_modules/udunits2_deps.cmake
deleted file mode 100644
index b032ce41d..000000000
--- a/CMake/cdat_modules/udunits2_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(udunits2_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/udunits2_external.cmake b/CMake/cdat_modules/udunits2_external.cmake
deleted file mode 100644
index c70b20fd9..000000000
--- a/CMake/cdat_modules/udunits2_external.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-set(udunits_source "${CMAKE_CURRENT_BINARY_DIR}/build/udunits2")
-set(udunits_install "${cdat_EXTERNALS}")
-
-set(udunits_patch_command "")
-if(APPLE)
-  # Modified configure file to workaround random flex failures
-  set(udunits_patch_command
-    ${CMAKE_COMMAND} -E copy_if_different
-      "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/udunits2_apple_configure.in"
-      "${udunits_source}/configure")
-endif()
-
-ExternalProject_Add(udunits2
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${udunits_source}
-  INSTALL_DIR ${udunits_install}
-  URL ${UDUNITS2_URL}/${UDUNITS2_GZ}
-  URL_MD5 ${UDUNITS2_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ${udunits_patch_command}
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${udunits2_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/udunits2_pkg.cmake b/CMake/cdat_modules/udunits2_pkg.cmake
deleted file mode 100644
index b114ac770..000000000
--- a/CMake/cdat_modules/udunits2_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set(UDUNITS2_URL ${LLNL_URL})
-set(UDUNITS2_MAJOR_SRC 2)
-set(UDUNITS2_MINOR_SRC 2)
-set(UDUNITS2_PATCH_SRC 17)
-set(UDUNITS2_URL ${LLNL_URL})
-set(UDUNITS2_GZ udunits-${UDUNITS2_MAJOR_SRC}.${UDUNITS2_MINOR_SRC}.${UDUNITS2_PATCH_SRC}.tar.gz)
-set(UDUNITS2_MD5 b81ab8f24125ce18702ab7b3ca4d566f )
-set(UDUNITS2_SOURCE ${UDUNITS2_URL}/${UDUNITS2_GZ})
-
-set (nm UDUNITS2)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-
-add_cdat_package_dependent(udunits2 "" "" OFF "CDAT_BUILD_LEAN" OFF)
diff --git a/CMake/cdat_modules/uuid_deps.cmake b/CMake/cdat_modules/uuid_deps.cmake
deleted file mode 100644
index 2f2b9e4ba..000000000
--- a/CMake/cdat_modules/uuid_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(uuid_deps ${pkgconfig_pkg} )
diff --git a/CMake/cdat_modules/uuid_external.cmake b/CMake/cdat_modules/uuid_external.cmake
deleted file mode 100644
index a53deeb79..000000000
--- a/CMake/cdat_modules/uuid_external.cmake
+++ /dev/null
@@ -1,19 +0,0 @@
-
-set(uuid_source "${CMAKE_CURRENT_BINARY_DIR}/build/uuid")
-set(uuid_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(uuid
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${uuid_source}
-  INSTALL_DIR ${uuid_install}
-  URL ${UUID_URL}/${UUID_GZ}
-  URL_MD5 ${UUID_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${uuid_deps}
-  ${ep_log_options}
-)
-
-set(uuid_DIR "${uuid_binary}" CACHE PATH "uuid binary directory" FORCE)
-mark_as_advanced(uuid_DIR)
diff --git a/CMake/cdat_modules/uuid_pkg.cmake b/CMake/cdat_modules/uuid_pkg.cmake
deleted file mode 100644
index d05bfb620..000000000
--- a/CMake/cdat_modules/uuid_pkg.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(UUID_MAJOR_SRC 1)
-set(UUID_MINOR_SRC 6)
-set(UUID_PATCH_SRC 2)
-set(UUID_URL ${LLNL_URL})
-set(UUID_GZ uuid-${UUID_MAJOR_SRC}.${UUID_MINOR_SRC}.${UUID_PATCH_SRC}.tar.gz)
-set(UUID_MD5 5db0d43a9022a6ebbbc25337ae28942f)
-set(UUID_SOURCE ${UUID_URL}/${UUID_GZ})
-
-set (nm UUID)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-#apparently libcf needs it
-add_cdat_package_dependent(uuid "" "" OFF "CDAT_BUILD_LEAN" ON)
-#add_cdat_package(uuid "" "" OFF)
-
diff --git a/CMake/cdat_modules/uvcmetrics_deps.cmake b/CMake/cdat_modules/uvcmetrics_deps.cmake
deleted file mode 100644
index a01e906ae..000000000
--- a/CMake/cdat_modules/uvcmetrics_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(UVCMETRICS_deps ${cdat_pkg} )
diff --git a/CMake/cdat_modules/uvcmetrics_external.cmake b/CMake/cdat_modules/uvcmetrics_external.cmake
deleted file mode 100644
index 4a9ad2d1e..000000000
--- a/CMake/cdat_modules/uvcmetrics_external.cmake
+++ /dev/null
@@ -1,42 +0,0 @@
-
-if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA)
-  set(UVCMETRICS_DOWNLOAD_FILES "")
-
-  file(READ "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcmetrics_test_data_md5s.txt" UVCMETRICS_FILES)
-  string(REGEX REPLACE ";" "\\\\;" UVCMETRICS_FILES "${UVCMETRICS_FILES}")
-  string(REGEX REPLACE "\n" ";" UVCMETRICS_FILES "${UVCMETRICS_FILES}")
-
-  foreach(line ${UVCMETRICS_FILES})
-    string(REGEX REPLACE " +" ";" line "${line}")
-    list(GET line 1 base_file_path)
-    list(GET line 0 FILE_MD5)
-
-    string(STRIP "${base_file_path}" base_file_path)
-    string(STRIP "${FILE_MD5}" FILE_MD5)
-
-    set(FILE_PATH "${UVCMETRICS_TEST_DATA_DIRECTORY}/${base_file_path}")
-    list(APPEND UVCMETRICS_DOWNLOAD_FILES "${FILE_PATH}")
-
-    set(FILE_URL "${LLNL_URL}/../sample_data/uvcmetrics_2.4.1/${base_file_path}")
-
-    add_custom_command(
-      OUTPUT "${FILE_PATH}"
-      COMMAND "${CMAKE_COMMAND}"
-        -D FILE_URL="${FILE_URL}"
-        -D FILE_MD5="${FILE_MD5}"
-        -D FILE_PATH="${FILE_PATH}"
-        -P "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake"
-      DEPENDS "${uvcmetrics_data_keyfile}"
-      COMMENT "Downloading ${base_file_path}"
-    )
-  endforeach()
-
-  add_custom_target(uvcmetrics_test_data ALL DEPENDS ${UVCMETRICS_DOWNLOAD_FILES})
-endif()
-
-set(GIT_CMD_STR GIT_REPOSITORY "${UVCMETRICS_SOURCE}")
-set(GIT_TAG GIT_TAG "${UVCMETRICS_BRANCH}")
-set(nm UVCMETRICS)
-set(OLD OFF)
-include(pipinstaller)
-unset(OLD)
diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake
deleted file mode 100644
index 2f8294042..000000000
--- a/CMake/cdat_modules/uvcmetrics_pkg.cmake
+++ /dev/null
@@ -1,14 +0,0 @@
-set (nm UVCMETRICS)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_TAG})
-set(UVCMETRICS_URL ${LLNL_URL})
-set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip)
-#set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP})
-set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git )
-set(UVCMETRICS_MD5)
-set(UVCMETRICS_BRANCH master)
-
-if (NOT CDAT_BUILD_LEAN)
-  add_cdat_package(UVCMETRICS "" "" ON)
-endif()
-
diff --git a/CMake/cdat_modules/vacumm_deps.cmake b/CMake/cdat_modules/vacumm_deps.cmake
deleted file mode 100644
index 9472871dd..000000000
--- a/CMake/cdat_modules/vacumm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(vacumm_deps ${python_pkg} ${numpy_pkg} ${scipy_pkg} ${matplotlib_pkg} ${basemap_pkg} ${configobj_pkg} ${setuptools_pkg})
diff --git a/CMake/cdat_modules/vacumm_external.cmake b/CMake/cdat_modules/vacumm_external.cmake
deleted file mode 100644
index 0cf4556ff..000000000
--- a/CMake/cdat_modules/vacumm_external.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-# vacumm
-#
-set(vacumm_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/vacumm")
-
-configure_file(
-  "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vacumm_build_step.cmake.in"
-  "${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake"
-  @ONLY
-  )
-
-set(vacumm_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake)
-
-ExternalProject_Add(vacumm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${vacumm_source_dir}
-  URL ${VACUMM_URL}/${VACUMM_GZ}
-  URL_MD5 ${VACUMM_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${vacumm_build_command}
-  INSTALL_COMMAND ""
-  DEPENDS ${vacumm_deps}
-  ${ep_log_options}
-  )
diff --git a/CMake/cdat_modules/vacumm_pkg.cmake b/CMake/cdat_modules/vacumm_pkg.cmake
deleted file mode 100644
index 7dea0632e..000000000
--- a/CMake/cdat_modules/vacumm_pkg.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(VACUMM_MAJOR 3)
-set(VACUMM_MINOR 0)
-set(VACUMM_PATCH 0)
-set(VACUMM_VERSION ${VACUMM_MAJOR}.${VACUMM_MINOR}.${VACUMM_PATCH})
-set(VACUMM_URL ${LLNL_URL} )
-set(VACUMM_GZ vacumm-${VACUMM_VERSION}.tar.gz)
-set(VACUMM_MD5 b468fa72ddba9d0cd39d51164bef1dd4)
-
-set (nm VACUMM)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH})
-set(VACUMM_SOURCE ${VACUMM_URL}/${VACUMM_GZ})
-
-if (CDAT_BUILD_ALL)
-  add_cdat_package(vacumm "" "" ON)
-else()
-  add_cdat_package(vacumm "" "" OFF)
-endif()
diff --git a/CMake/cdat_modules/visit_deps.cmake b/CMake/cdat_modules/visit_deps.cmake
deleted file mode 100644
index 023429df2..000000000
--- a/CMake/cdat_modules/visit_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(VisIt_deps ${pkgconfig_pkg} ${sip_pkg} ${pyqt_pkg} ${paraview_pkg} ${r_pkg})
diff --git a/CMake/cdat_modules/visit_external.cmake b/CMake/cdat_modules/visit_external.cmake
deleted file mode 100644
index 7fbdb404c..000000000
--- a/CMake/cdat_modules/visit_external.cmake
+++ /dev/null
@@ -1,173 +0,0 @@
-set(VisIt_source "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt")
-set(VisIt_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt")
-set(VisIt_install "${CMAKE_INSTALL_PREFIX}")
-
-if(QT_QMAKE_EXECUTABLE)
-  get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH)
-  get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH)
-endif()
-
-GET_FILENAME_COMPONENT(CMAKE_PATH_VAR ${CMAKE_COMMAND} PATH)
-SET(VISIT_C_FLAGS "${CMAKE_C_FLAGS} -I${cdat_EXTERNALS}/include")
-GET_FILENAME_COMPONENT(VISIT_C_COMPILER ${CMAKE_C_COMPILER} NAME)
-SET(VISIT_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${cdat_EXTERNALS}/include")
-GET_FILENAME_COMPONENT(VISIT_CXX_COMPILER ${CMAKE_CXX_COMPILER} NAME)
-SET(VISIT_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${cdat_EXTERNALS}/lib")
-
-MACRO(DETERMINE_VISIT_ARCHITECTURE ARCH)
-    IF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-        IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-            SET(${ARCH} linux-ppc)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc64")
-            SET(${ARCH} linux-ppc64)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86_64")
-            SET(${ARCH} linux-x86_64)
-        ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ia64")
-            SET(${ARCH} linux-ia64)
-        ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-            SET(${ARCH} linux-intel)
-        ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "AIX")
-        IF($ENV{OBJECT_MODE} STREQUAL "32")
-            SET(${ARCH} "ibm-aix-pwr")
-        ELSE($ENV{OBJECT_MODE} STREQUAL "32")
-            SET(${ARCH} "ibm-aix-pwr64")
-        ENDIF($ENV{OBJECT_MODE} STREQUAL "32")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Darwin")
-        IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-            EXECUTE_PROCESS(COMMAND uname -r
-               OUTPUT_STRIP_TRAILING_WHITESPACE
-               OUTPUT_VARIABLE _OSX_VERSION)
-            STRING(SUBSTRING ${_OSX_VERSION} 0 1 _OSX_MAJOR_VERSION)
-            IF(${_OSX_MAJOR_VERSION} STREQUAL "1")
-                # This will match 10, 11, 12, ...
-                SET(${ARCH} darwin-x86_64)
-            ELSE(${_OSX_MAJOR_VERSION} STREQUAL "1")
-                SET(${ARCH} darwin-i386)
-            ENDIF(${_OSX_MAJOR_VERSION} STREQUAL "1")
-        ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-            SET(${ARCH} darwin-x86_64)
-        ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD")
-        SET(${ARCH} "freebsd-${CMAKE_SYSTEM_VERSION}")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "IRIX")
-        SET(${ARCH} sgi-irix6-mips2)
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "SunOS")
-        SET(${ARCH} "sun4-${CMAKE_SYSTEM_VERSION}-sparc")
-    ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Tru64")
-        SET(${ARCH} dec-osf1-alpha)
-    ELSE(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-        # Unhandled case. Make up a string.
-        SET(VISITARCHTMP "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}")
-        STRING(TOLOWER ${VISITARCHTMP} ${ARCH})
-    ENDIF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux")
-ENDMACRO(DETERMINE_VISIT_ARCHITECTURE ARCH)
-
-# Note this is a workaround to handle build on APPLE
-IF(APPLE)
-  SET(VISIT_INSTALL_PLATFORM "darwin-x86_64")
-ELSE(APPLE)
-  DETERMINE_VISIT_ARCHITECTURE(VISIT_INSTALL_PLATFORM)
-ENDIF(APPLE)
-
-SET(VISIT_HOSTNAME "visit-uvcdat-build")
-
-
-#Add VisIt to ExternalProject
-ExternalProject_Add(VisIt
-  #DOWNLOAD_DIR ${VisIt_source} #${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${VisIt_source}
-  BINARY_DIR ${VisIt_binary}
-  INSTALL_DIR ${VisIt_install}
-  #SVN_REPOSITORY ${VISIT_SVN}
-  URL ${VISIT_URL}/${VISIT_GZ}
-  #URL_MD5 ${VISIT_MD5}
-  PATCH_COMMAND ""
-  #CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${VisIt_install} -DCMAKE_INSTALL_NAME_DIR=${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib -DVISIT_CONFIG_SITE:FILEPATH=${VisIt_source}/${VISIT_HOSTNAME}.cmake
-  DEPENDS ${VisIt_deps}
-  ${ep_log_options}
-)
-
-if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib)
-  file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib)
-endif()
-
-#add references to VisIt's cmake
-SET(TMP_STR1 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_OSX_USE_RPATH TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR2 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_QT_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR3 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_PYTHON_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR4 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_HEADERS_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR5 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_VTK_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR6 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_R_SKIP_INSTALL TYPE BOOL ON)\\n\")\n")
-SET(TMP_STR7 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"add_definitions(-DEXTERNAL_VTK_BUILD)\\n\")\n")
-SET(TMP_STR8 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(CMAKE_EXE_LINKER_FLAGS \\\"\\\${CMAKE_EXE_LINKER_FLAGS} ${VISIT_LINKER_FLAGS}\\\")\\n\")\n")
-SET(TMP_STR9 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_C_FLAGS \\\"\\\${VISIT_C_FLAGS} ${VISIT_C_FLAGS}\\\")\\n\")\n")
-SET(TMP_STR10 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_CXX_FLAGS \\\"\\\${VISIT_CXX_FLAGS} ${VISIT_CXX_FLAGS}\\\")\\n\")\n")
-
-FILE(WRITE   ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR1})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR2})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR3})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR4})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR5})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR6})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR7})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR8})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR9})
-FILE(APPEND  ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR10})
-
-# Before install step
-#load VisIt installation 
-ExternalProject_Add_Step(VisIt BuildVisItPatch_Step1
- COMMAND sed -e s/<object.h>/"object.h"/g ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C > ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp
- COMMAND mv ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C
-  COMMAND echo yes | svn_bin/build_visit --gpl --console --cc ${VISIT_C_COMPILER} --cxx ${VISIT_CXX_COMPILER} --alt-vtk-dir ${ParaView_binary}/VTK --alt-pyqt-dir ${CMAKE_INSTALL_PREFIX} --alt-R-dir ${cdat_EXTERNALS} --alt-netcdf-dir ${cdat_EXTERNALS} --alt-hdf5-dir ${cdat_EXTERNALS} --thirdparty-path ${CMAKE_CURRENT_BINARY_DIR}/visit-thirdparty --cmake-bin-dir ${CMAKE_PATH_VAR} --alt-python-dir ${CMAKE_INSTALL_PREFIX} --alt-qt-dir ${QT_ROOT} --no-visit --makeflags -j${VISIT_PARALLEL_PROCESSORS} --log-file ${CMAKE_BINARY_DIR}/logs/VisIt-build-out.log --no-mesa --visit-build-hostname ${VisIt_source}/${VISIT_HOSTNAME}.cmake
-  COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/visit.cmake 
-  DEPENDEES patch
-  DEPENDERS configure
-  WORKING_DIRECTORY ${VisIt_source})
-
-#After installation
-#Make symlinks of VisIt's lib, plugins, 
-#move pyqt_pyqtviewer.so and plugin into python site-packages
-message("COMMAND1: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}")
-
-message("COMMAND2: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins")
-
-ExternalProject_Add_Step(VisIt InstallVisItLibSymLink
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}
-  COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins
-  DEPENDEES install
-  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR})
-
-FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "MESSAGE(STATUS \"Executing VisIt post installation steps\")\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB hdf5_files ${HDF5_install}/lib/libhdf5*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${hdf5_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB netcdf_files ${netcdf_install}/lib/libnetcdf*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${netcdf_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB z_files ${zlib_install}/lib/libz*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${z_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB curl_files ${curl_install}/lib/libcurl*${_LINK_LIBRARY_SUFFIX}*)\n")
-FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${curl_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n")
-
-ExternalProject_Add_Step(VisIt InstallVisItExternalLibraries
-  COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch
-  DEPENDEES InstallVisItLibSymLink
-  WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}
-  )
-
-# clean up un-necessary database readers
-ExternalProject_Add_Step(VisIt RemoveUnnecessaryDatabaseReaders
-  COMMAND find . ! \( -iname "*netcdf*" -o -iname "*image*" -o -iname "*hdf5*" -o -iname "*pixie*" -o -iname "*vtk*" -o -iname "*mtk*" -o -iname "*xdmf*" \) -type f -delete
-  DEPENDEES install
-  WORKING_DIRECTORY ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins/databases)
-
-FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package "r = getOption('repos'); r['CRAN'] = 'http://cran.us.r-project.org'; options(repos = r); rm(r); install.packages('ismev')")
-
-ExternalProject_Add_Step(VisIt AddRDependencies
-  COMMAND ${cdat_EXTERNALS}/bin/Rscript ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package
-  DEPENDEES install)
diff --git a/CMake/cdat_modules/visit_pkg.cmake b/CMake/cdat_modules/visit_pkg.cmake
deleted file mode 100644
index df8c7fab1..000000000
--- a/CMake/cdat_modules/visit_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(VISIT_MAJOR 2)
-set(VISIT_MINOR 6)
-set(VISIT_PATCH 0)
-set(VISIT_VERSION ${VISIT_MAJOR}.${VISIT_MINOR}.${VISIT_PATCH})
-set(VISIT_URL http://vis.lbl.gov/~visit)
-set(VISIT_GZ visit${VISIT_VERSION}.tar.gz)
-set(VISIT_MD5 cb7ff3e7d6e487a11786644a3b49331e )
-set(VISIT_SOURCE ${VISIT_URL}/${VISIT_GZ})
-
-add_cdat_package_dependent(VisIt "" "Build VisIt" OFF "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/vistrails_deps.cmake b/CMake/cdat_modules/vistrails_deps.cmake
deleted file mode 100644
index 98ae7150f..000000000
--- a/CMake/cdat_modules/vistrails_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(vistrails_deps ${python_pkg} ${cython_pkg} ${scipy_pkg})
diff --git a/CMake/cdat_modules/vistrails_external.cmake b/CMake/cdat_modules/vistrails_external.cmake
deleted file mode 100644
index ae8027c26..000000000
--- a/CMake/cdat_modules/vistrails_external.cmake
+++ /dev/null
@@ -1,92 +0,0 @@
-# Create an external project to clone vistrails,
-# and configure and build it
-
-include(GetGitRevisionDescription)
-set(vistrails_branch ${VISTRAILS_BRANCH})
-
-get_git_head_revision(refspec sha)
-
-string(REGEX REPLACE ".+/(.+)" "\\1" _branch "${refspec}")
-
-# Did we extract out the branch?
-if (NOT _branch STREQUAL "${refspec}")
-    # Get the remote the branh if from
-    get_git_remote_for_branch(${_branch} _remote)
-
-    if (_remote)
-        git_remote_url(${_remote} _url)
-
-        if (_url)
-            if(_url MATCHES "^.*uvcdat.git")
-              if(_branch STREQUAL "master")
-                set(vistrails_branch ${VISTRAILS_BRANCH})
-              elseif(_branch STREQUAL "release")
-                set(vistrails_branch ${VISTRAILS_BRANCH})
-              endif()
-            elseif(_url MATCHES "^.*uvcdat-devel.git")
-              set(vistrails_branch uvcdat-next)
-            endif()
-        endif()
-    endif()
-else()
-    message(WARNING "Unable to branch from '${refspec}' using default VisTrails branch")
-endif()
-
-if("${refspec}" STREQUAL "refs/heads/devel-master")
-  set(vistrails_branch uvcdat-next)
-endif()
-
-message("[INFO] Using vistrails branch: ${vistrails_branch}")
-
-set(vistrails_tag_point_message "Specify branch of vistrails to be used for UVCDAT")
-set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}")
-set(vistrails_url "${VISTRAILS_SOURCE}")
-
-if(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT)
-  set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}" FORCE)
-endif()
-
-# For configure purposes
-set(SOURCE_DIR "${CMAKE_INSTALL_PREFIX}/vistrails")
-set(BRANCH ${VISTRAILS_TAG_POINT})
-set(GIT_URL "${vistrails_url}")
-set(GIT_TARGET "vistrails")
-
-option(CDAT_DELETE_VISTRAILS_HISTORY "Delete GIT history of vistrails" OFF)
-option(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT "Delete GIT history of vistrails" ON)
-
-set(vistrails_install_command ${cdat_BINARY_DIR}/git_clone_vistrails.sh)
-if(EXISTS "${SOURCE_DIR}")
-  configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_update.sh.in
-    ${cdat_BINARY_DIR}/git_update_vistrails.sh
-    @ONLY
-  )
-  set(vistrails_install_command ${cdat_BINARY_DIR}/git_update_vistrails.sh)
-else()
-  configure_file(
-    ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_clone.sh.in
-    ${cdat_BINARY_DIR}/git_clone_vistrails.sh
-    @ONLY
-  )
-endif()
-
-ExternalProject_Add(vistrails
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${CMAKE_INSTALL_PREFIX}
-  BUILD_IN_SOURCE 0
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ""
-  INSTALL_COMMAND ${vistrails_install_command}
-  DEPENDS ${vistrails_DEPENDENCIES}
-  ${EP_LOG_OPTIONS}
-)
-
-if(CDAT_DELETE_VISTRAILS_HISTORY)
-  ExternalProject_Add_Step(vistrails after_install
-    COMMAND ${CMAKE_COMMAND} -E remove_directory ${CMAKE_INSTALL_PREFIX}/vistrails/.git
-    DEPENDEES install
-    WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/vistrails
-  )
-endif()
-
diff --git a/CMake/cdat_modules/vistrails_pkg.cmake b/CMake/cdat_modules/vistrails_pkg.cmake
deleted file mode 100644
index de4704436..000000000
--- a/CMake/cdat_modules/vistrails_pkg.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-set(VISTRAILS_VERSION ${VISTRAILS_TAG_POINT})
-set(VISTRAILS_SOURCE "${GIT_PROTOCOL}github.com/UV-CDAT/VisTrails.git")
-set(VISTRAILS_VERSION uvcdat-2.4.0)
-set(VISTRAILS_MD5)
-set(VISTRAILS_BRANCH uvcdat-2.4.0)
-add_cdat_package_dependent(vistrails "" "" ON "CDAT_BUILD_GUI" OFF)
diff --git a/CMake/cdat_modules/vtk_deps.cmake b/CMake/cdat_modules/vtk_deps.cmake
deleted file mode 100644
index 015636f1f..000000000
--- a/CMake/cdat_modules/vtk_deps.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(VTK_deps ${pkgconfig_pkg} ${python_pkg} ${tiff_pkg} ${hdf5_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${netcdf_pkg} ${proj4_pkg})
-
-if (NOT CDAT_BUILD_GUI)
-  list(APPEND VTK_deps ${qt_pkg})
-endif()
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND VTK_deps ${ffmpeg_pkg})
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND VTK_deps ${osmesa_pkg})
-endif()
diff --git a/CMake/cdat_modules/vtk_external.cmake b/CMake/cdat_modules/vtk_external.cmake
deleted file mode 100644
index 8e926a66c..000000000
--- a/CMake/cdat_modules/vtk_external.cmake
+++ /dev/null
@@ -1,184 +0,0 @@
-set(vtk_source "${CMAKE_CURRENT_BINARY_DIR}/build/VTK")
-set(vtk_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VTK-build")
-set(vtk_install "${cdat_EXTERNALS}")
-
-set(GIT_CMD_STR GIT_REPOSITORY "${VTK_SOURCE}")
-
-set(_vtk_modules
-  vtkCommonComputationalGeometry
-  vtkCommonCore
-  vtkCommonExecutionModel
-  vtkCommonMisc
-  vtkCommonSystem
-  vtkCommonTransforms
-  vtkFiltersAMR
-  vtkFiltersCore
-  vtkFiltersExtraction
-  vtkFiltersFlowPaths
-  vtkFiltersGeneral
-  vtkFiltersGeneric
-  vtkFiltersGeometry
-  vtkFiltersHybrid
-  vtkFiltersImaging
-  vtkFiltersModeling
-  vtkFiltersSelection
-  vtkFiltersSMP
-  vtkFiltersSources
-  vtkFiltersStatistics
-  vtkFiltersTexture
-  vtkGeovisCore
-  vtkImagingColor
-  vtkImagingCore
-  vtkImagingGeneral
-  vtkImagingMath
-  vtkImagingSources
-  vtkImagingStencil
-  vtkInteractionImage
-  vtkInteractionStyle
-  vtkInteractionWidgets
-  vtkIOCore
-  vtkIOExport
-  vtkIOExportOpenGL
-  vtkIOGeometry
-  vtkIOImage
-  vtkIOImport
-  vtkRenderingCore
-  vtkRenderingFreeType
-  vtkRenderingFreeTypeOpenGL
-  vtkRenderingImage
-  vtkRenderingLabel
-  vtkRenderingOpenGL
-  vtkRenderingVolume
-  vtkRenderingVolumeOpenGL
-  vtkViewsCore
-  vtkViewsGeovis
-)
-
-if(NOT CDAT_BUILD_LEAN)
-  list(APPEND _vtk_modules "vtkIOFFMPEG")
-endif()
-
-# Either we use cdat zlib and libxml or system zlib and libxml
-list(APPEND vtk_build_args
-  -DVTK_USE_SYSTEM_ZLIB:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON
-  -DVTK_USE_SYSTEM_HDF5:BOOL=ON
-  -DVTK_USE_SYSTEM_NETCDF:BOOL=ON
-  -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON
-  -DVTK_USE_SYSTEM_LIBPROJ4:BOOL=ON
-  -DVTK_RENDERING_BACKEND:STRING=OpenGL
-  -DLIBPROJ4_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/proj4/include
-  -DLIBPROJ4_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/proj4/lib/libproj${_LINK_LIBRARY_SUFFIX}
-#
-)
-if (APPLE)
-  list(APPEND vtk_build_args
-    -DVTK_USE_SYSTEM_PNG:BOOL=OFF
-    -DVTK_USE_SYSTEM_JPEG:BOOL=OFF
-    )
-else()
-  list(APPEND vtk_build_args
-    -DVTK_USE_SYSTEM_PNG:BOOL=ON
-    -DVTK_USE_SYSTEM_JPEG:BOOL=ON
-  )
-endif()
-
-# Turn off testing and other non essential featues
-list(APPEND vtk_build_args
-  -DBUILD_TESTING:BOOL=OFF
-  -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS}
-)
-
-# Use cdat zlib
-#if(NOT CDAT_USE_SYSTEM_ZLIB)
-#  list(APPEND vtk_build_args
-#    -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-#       -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#  )
-#endif()
-
-# Use cdat libxml
-#if(NOT CDAT_USE_SYSTEM_LIBXML2)
-#  list(APPEND vtk_build_args
-#    -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2
-#    -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX}
-#    -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint
-#  )
-#endif()
-
-# Use cdat hdf5
-if(NOT CDAT_USE_SYSTEM_HDF5)
-  list(APPEND vtk_build_args
-    -DHDF5_DIR:PATH=${cdat_EXTERNALS}/
-    -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include
-    -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-    -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}
-  )
-
-#  if(NOT CDAT_USE_SYSTEM_ZLIB)
-#    list(APPEND vtk_build_args
-#      -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#      -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX}
-#    )
-#  endif()
-endif()
-
-if(CDAT_BUILD_OFFSCREEN)
-  list(APPEND vtk_build_args
-    "-DVTK_USE_X:BOOL=OFF"
-    "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON"
-    "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-    "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}"
-    "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include"
-    "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}"
-  )
-endif()
-
-if(CDAT_BUILD_WEB)
-  list(APPEND vtk_build_args
-    "-DVTK_Group_Web:BOOL=ON"
-  )
-endif()
-
-set(_vtk_module_options)
-foreach(_module ${_vtk_modules})
-  list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON")
-endforeach()
-
-ExternalProject_Add(VTK
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${vtk_source}
-  BINARY_DIR ${vtk_binary}
-  INSTALL_DIR ${vtk_install}
-  ${GIT_CMD_STR}
-  GIT_TAG ${VTK_BRANCH}
-  UPDATE_COMMAND ""
-  PATCH_COMMAND ""
-  CMAKE_CACHE_ARGS
-    -DBUILD_SHARED_LIBS:BOOL=ON
-    -DBUILD_TESTING:BOOL=OFF
-    -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags}
-    -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags}
-    -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-    ${cdat_compiler_args}
-    ${vtk_build_args}
-    -DVTK_WRAP_PYTHON:BOOL=ON
-    -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE}
-    -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE}
-    -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY}
-    -DPYTHON_MAJOR_VERSION:STRING=${PYTHON_MAJOR}
-    -DPYTHON_MINOR_VERSION:STRING=${PYTHON_MINOR}
-    -DVTK_Group_Rendering:BOOL=OFF
-    -DVTK_Group_StandAlone:BOOL=OFF
-    -DVTK_LEGACY_SILENT:BOOL=ON
-    ${_vtk_module_options}
-  CMAKE_ARGS
-    -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-  DEPENDS ${VTK_deps}
-  ${ep_log_options}
-)
-
-unset(GIT_CMD_STR)
diff --git a/CMake/cdat_modules/vtk_pkg.cmake b/CMake/cdat_modules/vtk_pkg.cmake
deleted file mode 100644
index 35504cbac..000000000
--- a/CMake/cdat_modules/vtk_pkg.cmake
+++ /dev/null
@@ -1,4 +0,0 @@
-set(VTK_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/VTK.git )
-set(VTK_MD5)
-set(VTK_BRANCH uvcdat-master)
-add_cdat_package_dependent(VTK "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/wget_deps.cmake b/CMake/cdat_modules/wget_deps.cmake
deleted file mode 100644
index 5c0406531..000000000
--- a/CMake/cdat_modules/wget_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(wget_deps)
diff --git a/CMake/cdat_modules/wget_external.cmake b/CMake/cdat_modules/wget_external.cmake
deleted file mode 100644
index 157c00038..000000000
--- a/CMake/cdat_modules/wget_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-set(wget_source "${CMAKE_CURRENT_BINARY_DIR}/build/wget")
-set(wget_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(Wget
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${wget_source}
-  INSTALL_DIR ${wget_install}
-  URL ${WGET_URL}/${WGET_GZ}
-  URL_MD5 ${WGET_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${wget_deps}
-  ${ep_log_options}
-)
-
diff --git a/CMake/cdat_modules/wget_pkg.cmake b/CMake/cdat_modules/wget_pkg.cmake
deleted file mode 100644
index 879dfc87d..000000000
--- a/CMake/cdat_modules/wget_pkg.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-set(LLNL_URL http://uv-cdat.llnl.gov/cdat/resources)
-set(WGET_MAJOR 1)
-set(WGET_MINOR 12)
-set(WGET_PATCH)
-set(WGET_URL ${LLNL_URL})
-set(WGET_GZ wget-${WGET_MAJOR}.${WGET_MINOR}.tar.gz)
-set(WGET_MD5 141461b9c04e454dc8933c9d1f2abf83)
-set(WGET_SOURCE ${WGET_URL}/${WGET_GZ})
-
-add_cdat_package(Wget "" "Build Wget" SYSTEM)
-
-set (nm WGET)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR})
-if(CDAT_BUILD_WGET)
-  if(WIN32)
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget.exe)
-  else()
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget)
-  endif()
-endif()
-if (${WGET_EXECUTABLE} STREQUAL "WGET_EXECUTABLE-NOTFOUND")
-    set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget)
-endif()
-message("[INFO] WGET_EXECUTABLE is set to ${WGET_EXECUTABLE}")
-
-set(HASWGET ${WGET_EXECUTABLE})
-
diff --git a/CMake/cdat_modules/windfield_deps.cmake b/CMake/cdat_modules/windfield_deps.cmake
deleted file mode 100644
index bef69919b..000000000
--- a/CMake/cdat_modules/windfield_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(windfield_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/windfield_external.cmake b/CMake/cdat_modules/windfield_external.cmake
deleted file mode 100644
index 0be2b03c2..000000000
--- a/CMake/cdat_modules/windfield_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# Windfield`
-#
-set(windfield_source "${CMAKE_CURRENT_BINARY_DIR}/build/windfield")
-
-ExternalProject_Add(windfield
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${windfield_source}
-  URL ${windfield_URL}/${windfield_GZ}
-  URL_MD5 ${windfield_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX}
-  DEPENDS ${windfield_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/windfield_pkg.cmake b/CMake/cdat_modules/windfield_pkg.cmake
deleted file mode 100644
index 1296543c6..000000000
--- a/CMake/cdat_modules/windfield_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(windfield_MAJOR )
-set(windfield_MINOR )
-set(windfield_VERSION 547534c636efc)
-set(windfield_URL ${LLNL_URL} )
-set(windfield_GZ windfield-${windfield_VERSION}.tar.bz2)
-set(windfield_MD5 48989935760da881424b6adb2cb96f44 )
-set(windfield_SOURCE ${windfield_URL}/${windfield_GZ})
-
-add_cdat_package_dependent(windfield "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/windspharm_deps.cmake b/CMake/cdat_modules/windspharm_deps.cmake
deleted file mode 100644
index a6a45a3a9..000000000
--- a/CMake/cdat_modules/windspharm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(windspharm_deps ${cdat_pkg})
diff --git a/CMake/cdat_modules/windspharm_external.cmake b/CMake/cdat_modules/windspharm_external.cmake
deleted file mode 100644
index a1c93750e..000000000
--- a/CMake/cdat_modules/windspharm_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-# windspharm
-#
-set(windspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/windspharm")
-
-ExternalProject_Add(windspharm
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${windspharm_source}
-  URL ${windspharm_URL}/${windspharm_GZ}
-  URL_MD5 ${windspharm_MD5}
-  BUILD_IN_SOURCE 1
-  CONFIGURE_COMMAND ""
-  BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build
-  INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}"
-  DEPENDS ${windspharm_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/windspharm_pkg.cmake b/CMake/cdat_modules/windspharm_pkg.cmake
deleted file mode 100644
index 4293b1a1c..000000000
--- a/CMake/cdat_modules/windspharm_pkg.cmake
+++ /dev/null
@@ -1,9 +0,0 @@
-set(windspharm_MAJOR )
-set(windspharm_MINOR )
-set(windspharm_VERSION 76a47fca1a)
-set(windspharm_URL ${LLNL_URL} )
-set(windspharm_GZ windspharm-${windspharm_VERSION}.zip)
-set(windspharm_MD5 8456da340724d332955f2ec946204cad)
-set(windspharm_SOURCE ${windspharm_URL}/${windspharm_GZ})
-
-add_cdat_package_dependent(windspharm "" "" OFF "CDAT_BUILD_LEAN" ON)
diff --git a/CMake/cdat_modules/x264_deps.cmake b/CMake/cdat_modules/x264_deps.cmake
deleted file mode 100644
index c4169909e..000000000
--- a/CMake/cdat_modules/x264_deps.cmake
+++ /dev/null
@@ -1,2 +0,0 @@
-# Not necessary in theory, but fixes race condition that was being experienced on Ubuntu
-set(X264_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/x264_external.cmake b/CMake/cdat_modules/x264_external.cmake
deleted file mode 100644
index ad75bd4b9..000000000
--- a/CMake/cdat_modules/x264_external.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-# The X264 external project for ParaView
-set(x264_source "${CMAKE_CURRENT_BINARY_DIR}/build/X264")
-set(x264_install "${cdat_EXTERNALS}")
-set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin)
-
-find_program(YASM_BIN "yasm")
-
-if (NOT YASM_BIN)
-  set(x264_conf_args --disable-asm^^--enable-shared)
-else()
-  set(x264_conf_args --enable-shared)
-endif()
-
-ExternalProject_Add(X264
-  LIST_SEPARATOR ^^
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${x264_source}
-  INSTALL_DIR ${x264_install}
-  URL ${X264_URL}/${X264_GZ}
-  URL_MD5 ${X264_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -DCONFIGURE_ARGS=${x264_conf_args} -DBASH_CONFIGURE=ON -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${X264_deps}
-  ${ep_log_options}
-  )
-
-set(X264_INCLUDE_DIR ${x264_install}/include)
diff --git a/CMake/cdat_modules/x264_pkg.cmake b/CMake/cdat_modules/x264_pkg.cmake
deleted file mode 100644
index ba832b026..000000000
--- a/CMake/cdat_modules/x264_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(X264_DATE 20151006)
-set(X264_TIME 2245)
-set(X264_ADDENDUM "")
-set(X264_URL ${LLNL_URL})
-set(X264_GZ x264-snapshot-${X264_DATE}-${X264_TIME}${X264_ADDENDUM}.tar.gz)
-set(X264_MD5 e8f5a0fc8db878bcdd256715472fe379)
-
-set (nm X264)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_DATE}.${${nm}_TIME})
-set(X264_SOURCE ${X264_URL}/${X264_GZ})
-
-add_cdat_package_dependent(X264 "" "" ON "CDAT_BUILD_GRAPHICS" OFF)
diff --git a/CMake/cdat_modules/xgks_external.cmake b/CMake/cdat_modules/xgks_external.cmake
deleted file mode 100644
index 1dcf22297..000000000
--- a/CMake/cdat_modules/xgks_external.cmake
+++ /dev/null
@@ -1,21 +0,0 @@
-
-set(xgks_source "${CMAKE_CURRENT_BINARY_DIR}/build/xgks")
-set(xgks_install "${cdat_EXTERNALS}")
-
-configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/xgks_configure_step.cmake.in
-  ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake
-  @ONLY)
-
-#cp -f build/xgks*/port/misc/udposix.h /home/partyd/Projects/uv-cdat/make-install/Externals/include
-
-ExternalProject_Add(xgks
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${xgks_source}
-  INSTALL_DIR ${xgks_install}
-  URL ${XGKS_URL}/${XGKS_GZ}
-  URL_MD5 ${XGKS_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/yasm_deps.cmake b/CMake/cdat_modules/yasm_deps.cmake
deleted file mode 100644
index 86ac65b48..000000000
--- a/CMake/cdat_modules/yasm_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(YASM_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/yasm_external.cmake b/CMake/cdat_modules/yasm_external.cmake
deleted file mode 100644
index 9c1744b2d..000000000
--- a/CMake/cdat_modules/yasm_external.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-set(YASM_source "${CMAKE_CURRENT_BINARY_DIR}/build/YASM")
-set(YASM_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(YASM
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${YASM_source}
-  INSTALL_DIR ${YASM_install}
-  URL ${YASM_URL}/${YASM_GZ}
-  URL_MD5 ${YASM_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND}  -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${YASM_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/yasm_pkg.cmake b/CMake/cdat_modules/yasm_pkg.cmake
deleted file mode 100644
index d4669fe88..000000000
--- a/CMake/cdat_modules/yasm_pkg.cmake
+++ /dev/null
@@ -1,13 +0,0 @@
-set(YASM_MAJOR_SRC 1)
-set(YASM_MINOR_SRC 2)
-set(YASM_PATCH_SRC 0)
-set(YASM_URL ${LLNL_URL})
-set(YASM_GZ yasm-${YASM_MAJOR_SRC}.${YASM_MINOR_SRC}.${YASM_PATCH_SRC}.tar.gz)
-set(YASM_MD5 4cfc0686cf5350dd1305c4d905eb55a6)
-set(YASM_SOURCE ${YASM_URL}/${YASM_GZ})
-
-set (nm YASM)
-string(TOUPPER ${nm} uc_nm)
-set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC})
-add_cdat_package(YASM "" "" OFF)
-
diff --git a/CMake/cdat_modules/zlib_deps.cmake b/CMake/cdat_modules/zlib_deps.cmake
deleted file mode 100644
index 3f2626fb6..000000000
--- a/CMake/cdat_modules/zlib_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(zlib_deps ${pkgconfig_pkg})
diff --git a/CMake/cdat_modules/zlib_external.cmake b/CMake/cdat_modules/zlib_external.cmake
deleted file mode 100644
index 09b6fd533..000000000
--- a/CMake/cdat_modules/zlib_external.cmake
+++ /dev/null
@@ -1,55 +0,0 @@
-
-# If Windows we use CMake otherwise ./configure
-if(WIN32)
-
-  set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/zlib")
-  set(zlib_binary "${CMAKE_CURRENT_BINARY_DIR}/zlib-build")
-  set(zlib_install "${cdat_EXTERNALS}")
-
-  ExternalProject_Add(zlib
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${zlib_source}
-    BINARY_DIR ${zlib_build}
-    INSTALL_DIR ${zlib_install}
-    URL ${ZLIB_URL}/${ZLIB_GZ}
-    URL_MD5 ${ZLIB_MD5}
-    PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-    CMAKE_CACHE_ARGS
-      -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags}
-      -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags}
-      -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR}
-      ${pv_tpl_compiler_args}
-      ${zlib_EXTRA_ARGS}
-    CMAKE_ARGS
-      -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
-    ${ep_log_options}
-  )
-
-else()
-
-  set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/build/zlib")
-  set(zlib_install "${cdat_EXTERNALS}")
-  set(CONFIGURE_ARGS --shared)
-
-  ExternalProject_Add(zlib
-    DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-    SOURCE_DIR ${zlib_source}
-    INSTALL_DIR ${zlib_install}
-    URL ${ZLIB_URL}/${ZLIB_GZ}
-    URL_MD5 ${ZLIB_MD5}
-    PATCH_COMMAND ${CMAKE_COMMAND} -E remove <SOURCE_DIR>/zconf.h
-    BUILD_IN_SOURCE 1
-    CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake
-    DEPENDS ${zlib_deps}
-    ${ep_log_options}
-  )
-
-endif()
-
-set(ZLIB_INCLUDE_DIR ${zlib_install}/include)
-
-if(WIN32)
-  set(ZLIB_LIBRARY optimized ${zlib_install}/lib/zlib${_LINK_LIBRARY_SUFFIX} debug ${zlib_install}/lib/zlibd${_LINK_LIBRARY_SUFFIX})
-else()
-  set(ZLIB_LIBRARY ${ZLIB_LIBRARY_PATH}/libz${_LINK_LIBRARY_SUFFIX})
-endif()
diff --git a/CMake/cdat_modules/zlib_pkg.cmake b/CMake/cdat_modules/zlib_pkg.cmake
deleted file mode 100644
index a34c30885..000000000
--- a/CMake/cdat_modules/zlib_pkg.cmake
+++ /dev/null
@@ -1,24 +0,0 @@
-set(ZLIB_PATCH_SRC 5 CACHE INTEGER "Version of ZLIB to use")
-MARK_AS_ADVANCED(ZLIB_PATCH_SRC)
-if (ZLIB_PATCH_SRC EQUAL 3)
-  set(ZLIB_MD5 debc62758716a169df9f62e6ab2bc634)
-elseif (ZLIB_PATCH_SRC EQUAL 5)
-  set(ZLIB_MD5 c735eab2d659a96e5a594c9e8541ad63)
-elseif (ZLIB_PATCH_SRC EQUAL 7)
-  set(ZLIB_MD5 60df6a37c56e7c1366cca812414f7b85)
-elseif (ZLIB_PATCH_SRC EQUAL 8)
-  set(ZLIB_MD5 44d667c142d7cda120332623eab69f40)
-else ()
-  message(FATAL_ERROR "error: invalid zlib patch number: '${ZLIB_PATCH_SRC}' valid: 3, 5, 7 or 8")
-endif()
-
-set(ZLIB_MAJOR_SRC 1)
-set(ZLIB_MINOR_SRC 2)
-#ZLIB_PATH_SRC and md5 is configured in CMakeLists.txt because on some RedHat system we need to change it ; # I don't believe this is true anymore durack1 23 Nov 2014
-#set(ZLIB_PATCH_SRC 8)
-set(ZLIB_VERSION ${ZLIB_MAJOR_SRC}.${ZLIB_MINOR_SRC}.${ZLIB_PATCH_SRC})
-set(ZLIB_URL ${LLNL_URL})
-set(ZLIB_GZ zlib-${ZLIB_VERSION}.tar.gz)
-set(ZLIB_SOURCE ${ZLIB_URL}/${ZLIB_GZ})
-
-add_cdat_package(zlib "" "" OFF)
diff --git a/CMake/cdat_modules/zmq_deps.cmake b/CMake/cdat_modules/zmq_deps.cmake
deleted file mode 100644
index 1ef560a28..000000000
--- a/CMake/cdat_modules/zmq_deps.cmake
+++ /dev/null
@@ -1 +0,0 @@
-set(ZMQ_deps ${sphinx_pkg} ${pip_pkg})
diff --git a/CMake/cdat_modules/zmq_external.cmake b/CMake/cdat_modules/zmq_external.cmake
deleted file mode 100644
index c4637b4b4..000000000
--- a/CMake/cdat_modules/zmq_external.cmake
+++ /dev/null
@@ -1,16 +0,0 @@
-
-set(zmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/ZMQ")
-set(zmq_install "${cdat_EXTERNALS}")
-
-ExternalProject_Add(ZMQ
-  DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR}
-  SOURCE_DIR ${zmq_source}
-  INSTALL_DIR ${zmq_install}
-  URL ${ZMQ_URL}/${ZMQ_GZ}
-  URL_MD5 ${ZMQ_MD5}
-  BUILD_IN_SOURCE 1
-  PATCH_COMMAND ""
-  CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR=<INSTALL_DIR> -DWORKING_DIR=<SOURCE_DIR> -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake
-  DEPENDS ${ZMQ_deps}
-  ${ep_log_options}
-)
diff --git a/CMake/cdat_modules/zmq_pkg.cmake b/CMake/cdat_modules/zmq_pkg.cmake
deleted file mode 100644
index 26776a9f9..000000000
--- a/CMake/cdat_modules/zmq_pkg.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(ZMQ_MAJOR 4)
-set(ZMQ_MINOR 0)
-set(ZMQ_PATCH 4)
-set(ZMQ_VERSION ${ZMQ_MAJOR}.${ZMQ_MINOR}.${ZMQ_PATCH})
-set(ZMQ_URL ${LLNL_URL})
-set(ZMQ_GZ zeromq-${ZMQ_VERSION}.tar.gz)
-set(ZMQ_MD5 f3c3defbb5ef6cc000ca65e529fdab3b)
-set(ZMQ_SOURCE ${ZMQ_URL}/${ZMQ_GZ})
-
-add_cdat_package(ZMQ "" "" OFF)
diff --git a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in b/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in
deleted file mode 100644
index cd9a7e539..000000000
--- a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in
+++ /dev/null
@@ -1,38 +0,0 @@
-
-if(WIN32)
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/Release/lapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/Release/blas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/Release/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include"
-    )
-
-else()
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/liblapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/libblas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/"
-    )
-
-  EXECUTE_PROCESS(
-    COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include"
-    )
-
-endif()
diff --git a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in b/CMake/cdat_modules_extra/ESMF_install_step.cmake.in
deleted file mode 100644
index 8e754914f..000000000
--- a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in
+++ /dev/null
@@ -1,35 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{ESMF_DIR} @ESMF_source@/esmf)
-set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@)
-set(ENV{ESMF_PTHREADS} @ESMF_pthreads@)
-set(ENV{ESMF_OS} @ESMF_os@)
-set(ENV{ESMF_COMPILER} @ESMF_compiler@)
-set(ENV{ESMF_COMM} @ESMF_comm@)
-set(ENV{ESMF_ABI} @ESMF_abi@)
-set(ENV{ESMF_OPENMP} @ESMF_openmp@)
-set(ENV{ESMF_MOAB} OFF)
-set(ENV{ESMF_ARRAYLITE} TRUE)
-set(ENV{CFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran})
-set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{FFLAGS} ${cdat_osx_flags_fortran})
-
-execute_process(
-  COMMAND make install
-  WORKING_DIRECTORY @ESMF_source@/esmf
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-set(ESMF_source @ESMF_source@)
-set(ESMF_install @ESMF_install@)
-set(ESMF_COMM @mpiuni@)
-set(ESMF_pthreads @ESMF_pthreads@)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in b/CMake/cdat_modules_extra/ESMF_make_step.cmake.in
deleted file mode 100644
index 224067164..000000000
--- a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in
+++ /dev/null
@@ -1,45 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{ESMF_DIR} @ESMF_source@/esmf)
-set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@)
-set(ENV{ESMF_PTHREADS} @ESMF_pthreads@)
-set(ENV{ESMF_OS} @ESMF_os@)
-set(ENV{ESMF_COMPILER} @ESMF_compiler@)
-set(ENV{ESMF_COMM} @ESMF_comm@)
-set(ENV{ESMF_ABI} @ESMF_abi@)
-set(ENV{ESMF_OPENMP} @ESMF_openmp@)
-set(ENV{ESMF_MOAB} OFF)
-set(ENV{ESMF_ARRAYLITE} TRUE)
-set(ENV{CFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran})
-set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran})
-set(ENV{FFLAGS} ${cdat_osx_flags_fortran})
-
-
-
-## Store the configuration used to build ESMF
-set(outfile @ESMF_source@/set_esmf_env_ser.sh)
-file(WRITE ${outfile} "# ESMF compiled with these environment variables\n\n")
-file(APPEND ${outfile} "export ESMF_DIR=@ESMF_source@/esmf\n")
-file(APPEND ${outfile} "export ESMF_INSTALL_PREFIX=@ESMF_install@\n")
-file(APPEND ${outfile} "export ESMF_THREADS=@ESMF_pthreads@\n")
-file(APPEND ${outfile} "export ESMF_COMM=@ESMF_COMM@\n")
-file(APPEND ${outfile} "\n")
-file(APPEND ${outfile} "# Full information regarding the install is found in:\n")
-file(GLOB_RECURSE ESMF_mkfile "@ESMF_install@/lib/libO/e*.mk")
-file(APPEND ${outfile} "# "${ESMF_mkfile}"\n")
-
-# make should be detected by CMAKE at some point
-execute_process(
-  COMMAND make 
-  WORKING_DIRECTORY @ESMF_source@/esmf
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make:\n" ${res})
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in b/CMake/cdat_modules_extra/ESMP_install_step.cmake.in
deleted file mode 100644
index 3d5d01f42..000000000
--- a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in
+++ /dev/null
@@ -1,34 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-file(GLOB_RECURSE ESMP_esmfmkfile "@cdat_EXTERNALS@/lib/libO/*.mk")
-
-set(ENV{ESMFMKFILE} ${ESMP_esmfmkfile})
-foreach( item ${ESMP_esmfmkfile})
-  message("item " ${item})
-endforeach( item ${ESMP_esmfmkfile})
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" generateESMP_Config.py
-  WORKING_DIRECTORY @ESMP_source@
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-
-if(NOT EXISTS @ESMP_source@/src/ESMP_Config.py)
-  message(FATAL_ERROR "ESMP_Config.py not created")
-endif()
-
-file(INSTALL @ESMP_source@ DESTINATION @PYTHON_SITE_PACKAGES@)
-
-if(NOT EXISTS @PYTHON_SITE_PACKAGES@/ESMP/src/ESMP_Config.py)
-  message(FATAL_ERROR "Install process failed")
-endif()
-
-
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in b/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in
deleted file mode 100644
index 823fcdb8a..000000000
--- a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-# Patch ESMP_LoadESMF.py to allow relative loading of config file
-if(NOT WIN32)
-  execute_process(
-    WORKING_DIRECTORY @ESMP_source@/src
-    COMMAND patch -p1 
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/ESMP.patch
-  )
-endif()
-
diff --git a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in
deleted file mode 100644
index cee8497ed..000000000
--- a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in
+++ /dev/null
@@ -1,42 +0,0 @@
-message("Configuring NUMPY:\n@NUMPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-# As explained in site.cfg.example - See http://projects.scipy.org/numpy/browser/trunk/site.cfg.example
-# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep
-# On windows, the separator is ";" and ":" on unix-like platform
-set(path_sep ":")
-if(WIN32)
-  set(path_sep ";")
-endif()
-
-# As explained in site.cfg.example, the library name without the prefix "lib" should be used.
-# Nevertheless, on windows, only "libf2c" leads to a successful configuration and
-# installation of NUMPY
-set(f2c_libname "f2c")
-if(WIN32)
-  set(f2c_libname "libf2c")
-endif()
-
-# setup the site.cfg file
-file(WRITE "@NUMPY_binary@/site.cfg"
-"
-[blas]
-library_dirs = @cdat_EXTERNALS@/lib${path_sep}@cdat_EXTERNALS@/lib
-libraries = blas,${f2c_libname}
-
-[lapack]
-library_dirs = @cdat_EXTERNALS@/lib
-lapack_libs = lapack
-")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py config
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-  )
-
-if(NOT ${res} EQUAL 0)
-  message(FATAL_ERROR "Error in config of NUMPY")
-endif()
-message("Numpy config worked.")
diff --git a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in
deleted file mode 100644
index feac845d5..000000000
--- a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in
+++ /dev/null
@@ -1,19 +0,0 @@
-message("Installing NUMPY:\n@NUMPY_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}")
-  message(FATAL_ERROR "Error in config of NUMPY")
-endif()
-message("Numpy install succeeded.")
diff --git a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in
deleted file mode 100644
index 94b92ebd2..000000000
--- a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-    set(ENV{LDFLAGS} "$ENV{LDFLAGS}")
-else()
-    set(ENV{LDFLAGS} "$ENV{LDFLAGS} -shared")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@NUMPY_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE NUMPY_OUT
-  OUTPUT_VARIABLE NUMPY_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("[ERROR] NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}")
-  message(FATAL_ERROR "[ERROR] Error in config of NUMPY")
-endif()
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in
deleted file mode 100644
index 4a6e82762..000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing PYLIBXML2:\n@PYLIBXML2_PREFIX_ARGS@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@PYLIBXML2_binary@/python"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE PYLIBXML2_OUT
-  OUTPUT_VARIABLE PYLIBXML2_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}")
-  message(FATAL_ERROR "Error in config of PYLIBXML2")
-endif()
-message("libxml2-python bindings install succeeded.")
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in
deleted file mode 100644
index 562cb24ff..000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in
+++ /dev/null
@@ -1,24 +0,0 @@
-message("Building libxml2 python bindings:\n@PYLIBXML2_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-set(cdat_EXTERNALS @cdat_EXTERNALS@)
-configure_file(@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/PYLIBXML2_setup.py.in
-  @cdat_BINARY_DIR@/build/PYLIBXML2/python/setup.py)
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@PYLIBXML2_binary@/python"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE PYLIBXML2_OUT
-  OUTPUT_VARIABLE PYLIBXML2_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}")
-  message(FATAL_ERROR "Error in config of PYLIBXML2")
-endif()
-message("libxml2_python bindings build worked.")
diff --git a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in b/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in
deleted file mode 100755
index 22fdbfb9b..000000000
--- a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/python -u
-#
-# Setup script for libxml2 and libxslt if found
-#
-import sys, os
-from distutils.core import setup, Extension
-
-# Below ROOT, we expect to find include, include/libxml2, lib and bin.
-# On *nix, it is not needed (but should not harm), 
-# on Windows, it is set by configure.js.
-ROOT = r'/usr' 
-
-# Thread-enabled libxml2
-with_threads = 1
-
-# If this flag is set (windows only), 
-# a private copy of the dlls are included in the package.
-# If this flag is not set, the libxml2 and libxslt
-# dlls must be found somewhere in the PATH at runtime.
-WITHDLLS = 1 and sys.platform.startswith('win')
-
-def missing(file):
-    if os.access(file, os.R_OK) == 0:
-        return 1
-    return 0
-
-try:
-    HOME = os.environ['HOME']
-except:
-    HOME="C:"
-
-if WITHDLLS:
-    # libxml dlls (expected in ROOT/bin)
-    dlls = [ 'iconv.dll','libxml2.dll','libxslt.dll','libexslt.dll' ]
-    dlls = map(lambda dll: os.path.join(ROOT,'bin',dll),dlls)
-
-    # create __init__.py for the libxmlmods package
-    if not os.path.exists("libxmlmods"):
-        os.mkdir("libxmlmods")
-        open("libxmlmods/__init__.py","w").close()
-
-    def altImport(s):
-        s = s.replace("import libxml2mod","from libxmlmods import libxml2mod")
-        s = s.replace("import libxsltmod","from libxmlmods import libxsltmod")
-        return s
-
-if sys.platform.startswith('win'):
-    libraryPrefix = 'lib'
-    platformLibs = []
-else:
-    libraryPrefix = ''
-    platformLibs = ["m","z"]
-
-# those are examined to find 
-# - libxml2/libxml/tree.h
-# - iconv.h 
-# - libxslt/xsltconfig.h
-includes_dir = [
-"/usr/include",
-"/usr/local/include",
-"/opt/include",
-os.path.join(ROOT,'include'),
-HOME,
-"@cdat_EXTERNALS@/include"
-];
-
-xml_includes=""
-for dir in includes_dir:
-    if not missing(dir + "/libxml2/libxml/tree.h"):
-        xml_includes=dir + "/libxml2"
-	break;
-
-if xml_includes == "":
-    print "failed to find headers for libxml2: update includes_dir"
-    sys.exit(1)
-
-iconv_includes=""
-for dir in includes_dir:
-    if not missing(dir + "/iconv.h"):
-        iconv_includes=dir
-	break;
-
-if iconv_includes == "":
-    print "failed to find headers for libiconv: update includes_dir"
-    sys.exit(1)
-
-# those are added in the linker search path for libraries
-libdirs = [
-os.path.join(ROOT,'lib'),
-]
-
-xml_files = ["libxml2-api.xml", "libxml2-python-api.xml",
-             "libxml.c", "libxml.py", "libxml_wrap.h", "types.c",
-	     "xmlgenerator.py", "README", "TODO", "drv_libxml2.py"]
-
-xslt_files = ["libxslt-api.xml", "libxslt-python-api.xml",
-             "libxslt.c", "libxsl.py", "libxslt_wrap.h",
-	     "xsltgenerator.py"]
-
-if missing("libxml2-py.c") or missing("libxml2.py"):
-    try:
-	try:
-	    import xmlgenerator
-	except:
-	    import generator
-    except:
-	print "failed to find and generate stubs for libxml2, aborting ..."
-	print sys.exc_type, sys.exc_value
-	sys.exit(1)
-
-    head = open("libxml.py", "r")
-    generated = open("libxml2class.py", "r")
-    result = open("libxml2.py", "w")
-    for line in head.readlines():
-        if WITHDLLS:
-            result.write(altImport(line))
-        else:
-            result.write(line)
-    for line in generated.readlines():
-	result.write(line)
-    head.close()
-    generated.close()
-    result.close()
-
-with_xslt=0
-if missing("libxslt-py.c") or missing("libxslt.py"):
-    if missing("xsltgenerator.py") or missing("libxslt-api.xml"):
-        print "libxslt stub generator not found, libxslt not built"
-    else:
-	try:
-	    import xsltgenerator
-	except:
-	    print "failed to generate stubs for libxslt, aborting ..."
-	    print sys.exc_type, sys.exc_value
-	else:
-	    head = open("libxsl.py", "r")
-	    generated = open("libxsltclass.py", "r")
-	    result = open("libxslt.py", "w")
-	    for line in head.readlines():
-                if WITHDLLS:
-                    result.write(altImport(line))
-                else:
-                    result.write(line)
-	    for line in generated.readlines():
-		result.write(line)
-	    head.close()
-	    generated.close()
-	    result.close()
-	    with_xslt=1
-else:
-    with_xslt=1
-
-if with_xslt == 1:
-    xslt_includes=""
-    for dir in includes_dir:
-	if not missing(dir + "/libxslt/xsltconfig.h"):
-	    xslt_includes=dir + "/libxslt"
-	    break;
-
-    if xslt_includes == "":
-	print "failed to find headers for libxslt: update includes_dir"
-	with_xslt = 0
-
-
-descr = "libxml2 package"
-modules = [ 'libxml2', 'drv_libxml2' ]
-if WITHDLLS:
-    modules.append('libxmlmods.__init__')
-c_files = ['libxml2-py.c', 'libxml.c', 'types.c' ]
-includes= [xml_includes, iconv_includes]
-libs    = [libraryPrefix + "xml2"] + platformLibs
-macros  = []
-if with_threads:
-    macros.append(('_REENTRANT','1'))
-if with_xslt == 1:
-    descr = "libxml2 and libxslt package"
-    if not sys.platform.startswith('win'):
-        #
-        # We are gonna build 2 identical shared libs with merge initializing
-        # both libxml2mod and libxsltmod
-        #
-        c_files = c_files + ['libxslt-py.c', 'libxslt.c']
-        xslt_c_files = c_files
-        macros.append(('MERGED_MODULES', '1'))
-    else:
-        #
-        # On windows the MERGED_MODULE option is not needed
-        # (and does not work)
-        #
-        xslt_c_files = ['libxslt-py.c', 'libxslt.c', 'types.c']
-    libs.insert(0, libraryPrefix + 'exslt')
-    libs.insert(0, libraryPrefix + 'xslt')
-    includes.append(xslt_includes)
-    modules.append('libxslt')
-
-
-extens=[Extension('libxml2mod', c_files, include_dirs=includes,
-                  library_dirs=libdirs, 
-                  libraries=libs, define_macros=macros)] 
-if with_xslt == 1:
-    extens.append(Extension('libxsltmod', xslt_c_files, include_dirs=includes,
-			    library_dirs=libdirs, 
-                            libraries=libs, define_macros=macros))
-
-if missing("MANIFEST"):
-
-    manifest = open("MANIFEST", "w")
-    manifest.write("setup.py\n")
-    for file in xml_files:
-        manifest.write(file + "\n")
-    if with_xslt == 1:
-	for file in xslt_files:
-	    manifest.write(file + "\n")
-    manifest.close()
-
-if WITHDLLS:
-    ext_package = "libxmlmods"
-    if sys.version >= "2.2":
-        base = "lib/site-packages/"
-    else:
-        base = ""
-    data_files = [(base+"libxmlmods",dlls)]
-else:
-    ext_package = None
-    data_files = []
-
-setup (name = "libxml2-python",
-       # On *nix, the version number is created from setup.py.in
-       # On windows, it is set by configure.js
-       version = "2.7.8",
-       description = descr,
-       author = "Daniel Veillard",
-       author_email = "veillard@redhat.com",
-       url = "http://xmlsoft.org/python.html",
-       licence="MIT Licence",
-       py_modules=modules,
-       ext_modules=extens,
-       ext_package=ext_package,
-       data_files=data_files,
-       )
-
-sys.exit(0)
-
diff --git a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in
deleted file mode 100644
index 00d381497..000000000
--- a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in
+++ /dev/null
@@ -1,62 +0,0 @@
-message("Configuring SCIPY:\n@SCIPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-# As explained in site.cfg.example - See http://projects.scipy.org/scipy/browser/trunk/site.cfg.example
-# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep
-# On windows, the separator is ";" and ":" on unix-like platform
-set(path_sep ":")
-if(WIN32)
-  set(path_sep ";")
-endif()
-
-# As explained in site.cfg.example, the library name without the prefix "lib" should be used.
-# Nevertheless, on windows, only "libf2c" leads to a successful configuration and
-# installation of SCIPY
-set(f2c_libname "f2c")
-if(WIN32)
-  set(f2c_libname "libf2c")
-endif()
-
-set(_blas_dirs)
-set(_lapack_dirs)
-
-if (CDAT_USE_SYSTEM_LAPACK)
-  foreach(_path ${BLAS_LIBRARIES})
-    get_filename_component(_dir ${_path} PATH)
-    list(APPEND _blas_dirs ${_dir})
-  endforeach()
-
-  foreach(_path ${LAPACK_LIBRARIES})
-    get_filename_component(_dir ${_path} PATH)
-    list(APPEND _lapack_dirs ${_dir})
-  endforeach()
-else()
-  set(_blas_dirs @cdat_EXTERNALS@/lib)
-  set(_lapack_dirs @cdat_EXTERNALS@/lib)
-endif()
-
-# setup the site.cfg file
-file(WRITE "@SCIPY_binary@/site.cfg"
-"
-[blas]
-library_dirs = ${_blas_dirs}
-libraries = blas,${f2c_libname}
-
-[lapack]
-library_dirs = ${_lapack_dirs}
-lapack_libs = lapack
-")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py config
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-  )
-
-if(NOT ${res} EQUAL 0)
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy config worked.")
diff --git a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in
deleted file mode 100644
index 34a3e9eda..000000000
--- a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing SCIPY:\n@SCIPY_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}")
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy install succeeded.")
diff --git a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in
deleted file mode 100644
index c8d533cb1..000000000
--- a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in
+++ /dev/null
@@ -1,22 +0,0 @@
-message("Building SCIPY:\n@SCIPY_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{PYTHONPATH} "@PYTHONPATH@")
-set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@SCIPY_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE SCIPY_OUT
-  OUTPUT_VARIABLE SCIPY_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}")
-  message(FATAL_ERROR "Error in config of SCIPY")
-endif()
-message("Scipy build worked.")
diff --git a/CMake/cdat_modules_extra/basemap_install_step.cmake.in b/CMake/cdat_modules_extra/basemap_install_step.cmake.in
deleted file mode 100644
index 95cb49de7..000000000
--- a/CMake/cdat_modules_extra/basemap_install_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Installing basemap:\n@basemap_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{GEOS_DIR} "@cdat_EXTERNALS@")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@basemap_binary@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}")
-  message(FATAL_ERROR "Error in config of basemap")
-endif()
-message("Numpy install succeeded.")
diff --git a/CMake/cdat_modules_extra/basemap_make_step.cmake.in b/CMake/cdat_modules_extra/basemap_make_step.cmake.in
deleted file mode 100644
index 0789e4dda..000000000
--- a/CMake/cdat_modules_extra/basemap_make_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building basemap:\n@basemap_binary@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-set(ENV{GEOS_DIR} "@cdat_EXTERNALS@")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@basemap_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE basemap_OUT
-  OUTPUT_VARIABLE basemap_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}")
-  message(FATAL_ERROR "Error in config of basemap")
-endif()
-message("basemap build worked.")
diff --git a/CMake/cdat_modules_extra/cdat.in b/CMake/cdat_modules_extra/cdat.in
deleted file mode 100755
index 7bfcf620b..000000000
--- a/CMake/cdat_modules_extra/cdat.in
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-python@PYVER@ "$@"
diff --git a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in
deleted file mode 100644
index 5f5674fc1..000000000
--- a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-if(NOT APPLE)
-  include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-endif()
-
-execute_process(
-  COMMAND make "${BUILD_ARGS}"
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE  CDAT_ERR
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in b/CMake/cdat_modules_extra/cdat_common_environment.cmake.in
deleted file mode 100644
index 7a29f8005..000000000
--- a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in
+++ /dev/null
@@ -1,39 +0,0 @@
-message("[INFO] ADDITIONAL CFLAGS ${ADDITIONAL_CFLAGS}")
-set(ENV{PATH} "@SB_BIN_DIR@:@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:@cdat_EXTERNALS@/lib64:@cdat_EXTERNALS@/lib/paraview-@PARAVIEW_MAJOR@.@PARAVIEW_MINOR@:$ENV{@LIBRARY_PATH@}")
-if (NOT DEFINED SKIP_LDFLAGS)
- set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib -L@cdat_EXTERNALS@/lib64 @cdat_external_link_directories@ -Wl,-rpath,@cdat_EXTERNALS@/lib64 @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib")
-endif()
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cxxflags@ ${ADDITIONAL_CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{PYTHONPATH} @PYTHONPATH@)
-set(ENV{CC} @CMAKE_C_COMPILER@)
-
-if(APPLE)
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-endif()
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-set(PYTHONUSERBASE @PYTHON_SITE_PACKAGES_PREFIX@)
-#if ("@EGG_INSTALLER@" STREQUAL "PIP")
-#    # Set python userbase so that pip install packages locally
-#    set(PYTHONUSERBASE @CMAKE_INSTALL_PREFIX@)
-#    set(EGG_CMD env @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PYTHONUSERBASE=${PYTHONUSERBASE} @PIP_BINARY@ install --user -v --download-cache @CDAT_PACKAGE_CACHE_DIR@ )
-#    if (NOT "${PIP_CERTIFICATE}" STREQUAL "")
-#        set(EGG_CMD ${EGG_CMD} --cert=${PIP_CERTIFICATE})
-#    endif()
-#else()
-#    set(EGG_CMD env @LD_LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} @EASY_INSTALL_BINARY@ )
-#endif()
-
diff --git a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in b/CMake/cdat_modules_extra/cdat_configure_step.cmake.in
deleted file mode 100644
index 32ecb43f0..000000000
--- a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in
+++ /dev/null
@@ -1,30 +0,0 @@
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-if (BASH_CONFIGURE)
-	set(CONFIGURE_SHELL "bash")
-else()
-	set(CONFIGURE_SHELL "sh")
-endif()
-
-if (CONF_PATH_XTRA)
-  message("[INFO] configure is in subdirectory: ${CONF_PATH_XTRA}")
-else()
-  set(CONF_PATH_XTRA ".")
-endif()
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}")
-message("LD_ARGS IS $ENV{@LIBRARY_PATH@}")
-message("CFLAGS : $ENV{CFLAGS}")
-
-execute_process(
-  COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} ${CONFIGURE_SHELL} ${CONF_PATH_XTRA}/configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in b/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in
deleted file mode 100644
index f8cf8e0fa..000000000
--- a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in
+++ /dev/null
@@ -1,10 +0,0 @@
-file(READ @cdat_SOURCE_DIR@/Packages/dat/files.txt SAMPLE_FILES)
-string(REPLACE "\n" ";" SAMPLE_LIST ${SAMPLE_FILES})
-foreach(SAMPLE_FILE ${SAMPLE_LIST})
-  STRING(REPLACE "  " ";" DOWNLOAD_LIST ${SAMPLE_FILE})
-  LIST(GET DOWNLOAD_LIST 0 MD5)
-  LIST(GET DOWNLOAD_LIST 1 FILE_NM)
-  message("[INFO] Attempting to download http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} WITH MD5 ${MD5} to @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM}")
-  file(DOWNLOAD http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM} EXPECTED_MD5=${MD5})
-endforeach()
-set(res 0)
diff --git a/CMake/cdat_modules_extra/cdat_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_install_step.cmake.in
deleted file mode 100644
index 62fe3fa78..000000000
--- a/CMake/cdat_modules_extra/cdat_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND make install ${INSTALL_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/cdat_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_make_step.cmake.in
deleted file mode 100644
index 59a4f113e..000000000
--- a/CMake/cdat_modules_extra/cdat_make_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if(NOT ${make})
-  set(make make)
-endif()
-execute_process(
-  COMMAND env CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} CPPFLAGS=$ENV{CPPFLAGS} CXXFLAGS=$ENV{CXXFLAG} ${make} -j ${BUILD_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("[ERROR] Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-
diff --git a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in
deleted file mode 100644
index 87855421a..000000000
--- a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in
+++ /dev/null
@@ -1,30 +0,0 @@
-
-if(NOT APPLE)
-  set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-endif()
-
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CPPFLAGS@")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CXXFLAGS@")
-set(ENV{CFLAGS} "-w -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CFLAGS@")
-set(ENV{LOCNCCONFIG} "@cdat_EXTERNALS@/bin/nc-config")
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-message("Running cdat install with path: " $ENV{PATH})
-
-if(APPLE)
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-endif()
-
-execute_process(
-    COMMAND  "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/install.py @cdat_xtra_flags@ --enable-pp @SAMPLE_DATA@ @PYTHON_EXTRA_PREFIX@ @CDMS_ONLY@
-  WORKING_DIRECTORY "@WORKING_DIR@"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Python Install. ${res}")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdat_python_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_step.cmake.in
deleted file mode 100644
index cf32905a1..000000000
--- a/CMake/cdat_modules_extra/cdat_python_step.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-if(NOT ${make})
-  set(make make)
-endif()
-
-execute_process(
-  COMMAND ${make} ${BUILD_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
-
diff --git a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
deleted file mode 100644
index b8dd0172d..000000000
--- a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in
+++ /dev/null
@@ -1,22 +0,0 @@
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{CC} mpicc)
-set(ENV{CXX} mpicxx)
-
-message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}")
-message("LD_ARGS IS $ENV{@LIBRARY_PATH@}")
-message("CFLAGS : $ENV{CFLAGS}")
-
-execute_process(
-  COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/checked_get.sh.in b/CMake/cdat_modules_extra/checked_get.sh.in
deleted file mode 100755
index 7a38feab4..000000000
--- a/CMake/cdat_modules_extra/checked_get.sh.in
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/usr/bin/env bash
-
-# Does an md5 check between local and remote resource
-# returns 0 (success) iff there is no match and thus indicating that
-# an update is available.
-# USAGE: checked_for_update [file] http://www.foo.com/file
-#
-check_for_update() {
-    local local_file
-    local remote_file
-    if (( $# == 1 )); then
-        remote_file=${1}
-        local_file=$(readlink -f ${1##*/})
-    elif (( $# == 2 )); then
-        local_file="../sources/"${1}
-        remote_file=${2}
-    else
-        echo "function \"checked_for_update\":  Called with incorrect number of args! (fatal)"
-        exit 1
-    fi                      
-    echo "Local file is:",${local_file}
-    [ ! -e ${local_file} ] && echo " WARNING: Could not find local file ${local_file}" && return 0
-    diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null
-    [ $? != 0 ] && echo " Update Available @ ${remote_file}" && return 0
-    echo " ==> ${local_file} is up to date"
-    return 1
-}
-
-# If an update is available then pull it down... then check the md5 sums again!
-#
-#  Yes, this results in 3 network calls to pull down a file, but it
-#  saves total bandwidth and it also allows the updating from the
-#  network process to be cronttab-able while parsimonious with
-#  resources.  It is also very good practice to make sure that code
-#  being executed is the RIGHT code!
-#
-# NOTE: Has multiple return values test for (( $? > 1 )) when looking or errors
-#       A return value fo 1 only means that the file is up-to-date and there
-#       Is no reason to fetch it.
-#
-# USAGE: checked_get [file] http://www.foo.com/file
-#
-checked_get() {
-    check_for_update $@
-    [ $? != 0 ] && return 1
-
-    local local_file
-    local remote_file
-    if (( $# == 1 )); then
-        remote_file=${1}
-        local_file=${1##*/}
-    elif (( $# == 2 )); then
-        local_file="../sources/"${1}
-        remote_file=${2}
-    else
-        echo "function \"checked_get\":  Called with incorrect number of args! (fatal)"
-        exit 1
-    fi                      
-    echo "Local file 2 :",${local_file}
-    if [ -e ${local_file} ]; then
-	cp -v ${local_file} ${local_file}.bak 
-	chmod 600 ${local_file}.bak
-#	return 0
-    fi
-    @HASWGET@ -O ${local_file} ${remote_file}
-    [ $? != 0 ] && echo " ERROR: Problem pulling down [${remote_file}]" && return 2
-    diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null
-#    diff <(md5sum ${local_file} | tr -s " " | cut -d " " -f 1) <(curl ${remote_file}.md5 | tr -s " " | cut -d " " -f 1) >& /dev/null
-    [ $? != 0 ] && echo " WARNING: Could not verify this file!" && return 3
-    echo "[VERIFIED]"
-    return 0
-}
-
-checked_get $@
-echo ${1}" is where i get the tared stuff"
-tar xzf "../sources/"${1}
diff --git a/CMake/cdat_modules_extra/checkout_testdata.cmake b/CMake/cdat_modules_extra/checkout_testdata.cmake
deleted file mode 100644
index d914fa3ca..000000000
--- a/CMake/cdat_modules_extra/checkout_testdata.cmake
+++ /dev/null
@@ -1,256 +0,0 @@
-# Usage:
-# cmake -DGIT_EXECUTABLE=[git executable]
-#       -DTESTDATA_URL=[uvcdat-testdata url]
-#       -DTESTDATA_DIR=[local testdata directory]
-#       -DSOURCE_DIR=[uvcdat source root]
-#       -P checkout_testdata.cmake
-#
-# This script creates and syncs a clone of the uvcdat-testdata directory.
-#
-# In detail:
-#
-# 1) Check if the TESTDATA_DIR exists.
-#    If not, clone the repo and exit.
-# 2) Check if the TESTDATA_DIR is a git repo with TESTDATA_URL as its origin.
-#    If not, abort with a warning message.
-# 3) Check if the TESTDATA_DIR repo is clean.
-#    If not, abort with an warning message.
-# 4) Fetch the current git branch name for the SOURCE_DIR repo.
-#    If the current HEAD is not a named branch, use master.
-# 5) Update the remote branches in the TESTDATA_DIR repo.
-# 6) Check if the desired branch exists in TESTDATA_DIR's origin remote.
-# 7) Check if the desired branch exists in TESTDATA_DIR as a local branch.
-# 8) If the neither the local or remote branch exist, use master.
-# 9) Check out the local <branch> in TESTDATA_DIR repo.
-# 10) If the remote branch exists, or we are using master, run
-#     'git pull origin <branch>:<branch>' to fetch/update the local branch from
-#     the remote.
-#
-# Any failures are handled via non-fatal warnings. This is to allow the project
-# to build when access to the repo is not available.
-
-# 1) Clone and exit if the target directory doesn't exist.
-if(NOT EXISTS "${TESTDATA_DIR}")
-  message("Cloning \"${TESTDATA_URL}\" into \"${TESTDATA_DIR}\"...")
-
-  # Use depth=1 to avoid fetching the full history. Use "git pull --unshallow"
-  # to backfill the history if needed.
-  # --no-single-branch fetches the tip of all remote branches -- this is needed
-  # for auto-updating the testdata when the source branch changes.
-  execute_process(COMMAND
-    "${GIT_EXECUTABLE}"
-      clone --depth=1 --no-single-branch "${TESTDATA_URL}" "${TESTDATA_DIR}"
-    RESULT_VARIABLE RESULT
-    ERROR_VARIABLE OUTPUT
-    OUTPUT_VARIABLE OUTPUT)
-
-  string(STRIP "${OUTPUT}" OUTPUT)
-
-  message("${OUTPUT}")
-
-  if(NOT RESULT EQUAL 0)
-    message("Could not clone test data repo! "
-            "Baseline images will not be available.")
-    return()
-  endif()
-endif()
-
-# bots merge master in and mess the following, always rechecking master
-# bots check out the correct branches
-# following keyword skips the branch checking
-if (CDAT_CHECKOUT_BASELINE_MATCHING_BRANCH)
-    message("[INFO] Trying to find matching branch on baseline repo")
-    # 2) Is TESTDATA_DIR a clone of TESTDATA_URL?
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" config --get remote.origin.url
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Directory exists and is not a git repository. "
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    string(STRIP "${OUTPUT}" OUTPUT)
-
-    if(NOT "${TESTDATA_URL}" STREQUAL "${OUTPUT}")
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Directory is a git clone of \"${OUTPUT}\", not \"${TESTDATA_URL}\". "
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    # 3) Is the current testdata repo clean? Don't want to clobber any local mods.
-    # Update the index first:
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" update-index -q --refresh
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Error updating current index with 'git update-index -q --refresh':\n."
-              "${OUTPUT}\n"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    # Now check if the index is dirty:
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" diff-index --name-only HEAD "--"
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Error checking current index with 'git diff-index --name-only HEAD --':\n."
-              "${OUTPUT}\n"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    string(STRIP "${OUTPUT}" OUTPUT)
-
-    if(NOT "${OUTPUT}" STREQUAL "")
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Current checkout is not clean. The following files have modifications:\n"
-              "${OUTPUT}\n"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    # 4) Get the current branch name of the source repo.
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" rev-parse --abbrev-ref HEAD
-      WORKING_DIRECTORY "${SOURCE_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Cannot determine current branch name of source directory. "
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    string(STRIP "${OUTPUT}" BRANCH)
-
-    # If BRANCH is "HEAD", we're not on a named branch. Just use master in that
-    # case.
-    if("${BRANCH}" STREQUAL "HEAD")
-      message("The current source directory at '${SOURCE_DIR}' is not on a named "
-              "branch. Using the 'master' branch of the testdata repo.")
-      set(BRANCH "master")
-    endif()
-
-    # 5) Update the remote branches available on the testdata repo.
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" fetch --depth=1
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Error updating remote branches with "
-              "'git fetch --depth=1':\n."
-              "${OUTPUT}\n"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    # 6) Check if the desired branch exists in TESTDATA_DIR's origin remote.
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" branch -a --list "*${BRANCH}"
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Error obtaining full branch list:\n${OUTPUT}"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    message("Testing if remote branch 'origin/${BRANCH}' exists...")
-    string(FIND "${OUTPUT}" " remotes/origin/${BRANCH}\n" POS)
-    if(NOT POS EQUAL -1)
-      message("Remote branch exists.")
-      set(REMOTE_EXISTS "YES")
-    else()
-      message("Remote branch does not exist.")
-      set(REMOTE_EXISTS "NO")
-    endif()
-
-    # 7) Check if the desired branch exists locally:
-    message("Testing if local branch '${BRANCH}' exists...")
-    string(FIND "${OUTPUT}" " ${BRANCH}\n" POS) # Leading space in regex intended
-    if(NOT POS EQUAL -1)
-      message("Local branch exists.")
-      set(LOCAL_EXISTS "YES")
-    else()
-      message("Local branch does not exist.")
-      set(LOCAL_EXISTS "NO")
-    endif()
-
-    # 8) If the neither the local or remote branch exist, use master.
-    if(NOT REMOTE_EXISTS AND NOT LOCAL_EXISTS)
-      set(BRANCH "master")
-      set(REMOTE_EXISTS "YES")
-      set(LOCAL_EXISTS "YES")
-    endif()
-
-    # 9) Check out the desired branch in TESTDATA_DIR repo.
-    message("Checking out branch '${BRANCH}' in repo '${TESTDATA_DIR}'.")
-    execute_process(COMMAND
-      "${GIT_EXECUTABLE}" checkout "${BRANCH}"
-      WORKING_DIRECTORY "${TESTDATA_DIR}"
-      RESULT_VARIABLE RESULT
-      ERROR_VARIABLE OUTPUT
-      OUTPUT_VARIABLE OUTPUT)
-
-    if(NOT RESULT EQUAL 0)
-      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
-              "Error executing 'git checkout ${BRANCH}':\n."
-              "${OUTPUT}\n"
-              "Baseline images may be out of date.")
-      return()
-    endif()
-
-    # 10) If the remote branch exists, or we are using master, run
-    #     'git pull origin <branch>:<branch>' to fetch/update the local branch from
-    #     the remote.
-    if(REMOTE_EXISTS)
-      message("Updating \"${TESTDATA_DIR}:${BRANCH}\" from "
-              "\"${TESTDATA_URL}:${BRANCH}\"...")
-      execute_process(COMMAND
-        "${GIT_EXECUTABLE}" pull origin "${BRANCH}:${BRANCH}"
-        WORKING_DIRECTORY "${TESTDATA_DIR}"
-        RESULT_VARIABLE RESULT
-        ERROR_VARIABLE OUTPUT
-        OUTPUT_VARIABLE OUTPUT)
-
-      string(STRIP "${OUTPUT}" OUTPUT)
-
-      message("${OUTPUT}")
-
-      if(NOT RESULT EQUAL 0)
-        message("Error updating testdata repo! "
-                "Baseline images may be out of date.")
-      endif()
-    endif()
-else()
-    message("[INFO] NOT trying to switch branch on baseline (only bots should turn this on)")
-endif()
diff --git a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in b/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in
deleted file mode 100644
index a3872f373..000000000
--- a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in
+++ /dev/null
@@ -1,17 +0,0 @@
-#set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}")
-#set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-
-#include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-#message(CONGIFURE_ARGS IS ${CONFIGURE_ARGS})
-
-execute_process(
-  COMMAND sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/configobj_build_step.cmake.in b/CMake/cdat_modules_extra/configobj_build_step.cmake.in
deleted file mode 100644
index 5edd0af43..000000000
--- a/CMake/cdat_modules_extra/configobj_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@configobj_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/curses_patch_step.cmake.in b/CMake/cdat_modules_extra/curses_patch_step.cmake.in
deleted file mode 100644
index 04c28afdc..000000000
--- a/CMake/cdat_modules_extra/curses_patch_step.cmake.in
+++ /dev/null
@@ -1,5 +0,0 @@
-execute_process(
-  WORKING_DIRECTORY @curses_source@
-  COMMAND patch -Np1
-  INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/curses_gcc5.patch
-)
diff --git a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in b/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in
deleted file mode 100644
index 7e0987908..000000000
--- a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in
+++ /dev/null
@@ -1,78 +0,0 @@
-# EZGet Makefile
-#
-# Usage:
-#
-# Change DEBUG as needed.
-# Check the paths specified here for whether they are right for you.
-# Provide a path to fcddrs.h, or copy it (it's in cdat/libcdms/include)
-# make
-#
-#--------------------------------------------------------------------
-LIBNAME = ezget
-#jfp was CDMSLIB = /usr/local/lib
-#jfp was CDMSINC = /usr/local/include
-CDMSLIB = @cdat_EXTERNALS@/lib
-CDMSINC = @cdat_EXTERNALS@/include
-DEBUG = -O 
-# DEBUG = -g -save-temps -O0
-# Requires Absoft FORTRAN
-FC = gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64 -mtune=native
-# FOPTS = -fcray-pointer $(ARCHOPT) -W
-FOPTS = -fcray-pointer $(ARCHOPT) -W -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic -fPIC -I ../../libdrs/lib -I ../include -I @cdat_EXTERNALS@/include
-FFLAGS = $(DEBUG) $(FOPTS)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-CPPFLAGS = $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = Src/$(LIBNAME).o
-FINCLUDES = drsdef.h drscom.h cycle.h
-# FINCLUDES = 
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = 
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: lib$(LIBNAME).a 
-#lib$(LIBNAME).so
-
-shared: drsdef.h lib$(LIBNAME).so
-
-lib$(LIBNAME).a: $(OBJECTS)
-	ar rv lib$(LIBNAME).a $?
-
-lib$(LIBNAME).so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -lgfortran -L@cdat_EXTERNALS@/lib -L$(CDMSLIB) -I$(CDMSINC) -lcdms  -shared -o lib$(LIBNAME).so $(OBJECTS)
-
-#--------------------------------------------------------------------
-
-install: lib$(LIBNAME).a
-	cp lib$(LIBNAME).a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/lib$(LIBNAME).a
-#	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f Src/*.o
-	-rm -f *~
-	-rm -f core
-	
-.SUFFIXES: .F .o
-
-.F.o:
-	cd Src ; $(FC) $(FFLAGS) -c ../$<
diff --git a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake b/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake
deleted file mode 100644
index 996ae0a28..000000000
--- a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake
+++ /dev/null
@@ -1,25 +0,0 @@
-# used variables:
-#   FILE_URL    The url where the file is available
-#   FILE_PATH   The destination for the file
-#   FILE_MD5    The expected md5
-
-# check if the file already exists
-if(EXISTS "${FILE_PATH}")
-  # check md5sum
-  file(MD5 "${FILE_PATH}" output_md5)
-
-  if(${output_md5} STREQUAL ${FILE_MD5})
-    return() # nothing to do
-  endif()
-endif()
-
-# add a build target to download the file
-file(DOWNLOAD "${FILE_URL}" "${FILE_PATH}" STATUS stat)
-list(GET stat 0 exit_code)
-list(GET stat 1 msg)
-
-# fail on error
-if(NOT exit_code EQUAL 0)
-  file(REMOVE "${FILE_PATH}")
-  message(FATAL_ERROR "Error downloading: ${msg}")
-endif()
diff --git a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in b/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in
deleted file mode 100644
index d0ef31f29..000000000
--- a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in
+++ /dev/null
@@ -1,14 +0,0 @@
-
-if(APPLE)
-  set(ENV{MACOSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-set(ENV{PATH} $ENV{PATH}:@cdat_EXTERNALS@/bin)
-
-EXECUTE_PROCESS(
-  #    COMMAND sh configure --disable-static --disable-network --disable-zlib --disable-ffserver --disable-ffplay --disable-decoders --enable-shared --enable-swscale --prefix=@ffmpeg_install@
-  COMMAND make
-  WORKING_DIRECTORY "@ffmpeg_source@"
-  RESULT_VARIABLE rv
-  )
diff --git a/CMake/cdat_modules_extra/git_clone.sh.in b/CMake/cdat_modules_extra/git_clone.sh.in
deleted file mode 100755
index 05bb4d3fd..000000000
--- a/CMake/cdat_modules_extra/git_clone.sh.in
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-
-cd "@CMAKE_INSTALL_PREFIX@"
-"@GIT_EXECUTABLE@" clone --no-checkout --depth 1 -b @BRANCH@ @GIT_URL@ "@GIT_TARGET@"
-cd "@GIT_TARGET@"
-if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then
-    "@GIT_EXECUTABLE@" checkout @BRANCH@
-else
-    "@GIT_EXECUTABLE@" checkout origin/@BRANCH@
-fi
diff --git a/CMake/cdat_modules_extra/git_update.sh.in b/CMake/cdat_modules_extra/git_update.sh.in
deleted file mode 100755
index a8b3b7954..000000000
--- a/CMake/cdat_modules_extra/git_update.sh.in
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/bin/sh
-cd "@SOURCE_DIR@"
-"@GIT_EXECUTABLE@" fetch origin --prune
-if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then
-    "@GIT_EXECUTABLE@" checkout -f @BRANCH@
-else
-    "@GIT_EXECUTABLE@" checkout -f origin/@BRANCH@
-fi
diff --git a/CMake/cdat_modules_extra/gsw_build_step.cmake.in b/CMake/cdat_modules_extra/gsw_build_step.cmake.in
deleted file mode 100644
index 1a344eb81..000000000
--- a/CMake/cdat_modules_extra/gsw_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@gsw_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/h5py_build_step.cmake.in b/CMake/cdat_modules_extra/h5py_build_step.cmake.in
deleted file mode 100644
index 47e740028..000000000
--- a/CMake/cdat_modules_extra/h5py_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@h5py_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in b/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in
deleted file mode 100644
index 588e26535..000000000
--- a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in
+++ /dev/null
@@ -1,10 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/exsrc/src/h5diff_correct_ansi.c ${WORKING_DIR}/tools/lib/h5diff.c
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "h5diff.c replaced")
-else()
-    message(FATAL_ERROR "Replacing h5diff.c failed: ${errcode}")
-endif()
diff --git a/CMake/cdat_modules_extra/install.py.in b/CMake/cdat_modules_extra/install.py.in
deleted file mode 100644
index 58398539d..000000000
--- a/CMake/cdat_modules_extra/install.py.in
+++ /dev/null
@@ -1,945 +0,0 @@
-import sys, getopt, os, shutil, string, glob, tempfile, hashlib
-from distutils.core import setup
-
-build_dir = os.getcwd()
-logdir = os.path.join(build_dir, 'logs').replace(" ","\ ")
-
-# Create logs directory if it does not exits
-if not os.path.exists(logdir):
-  os.makedirs(logdir)
-
-base_build_dir = os.path.join(build_dir, '..')
-os.environ['BUILD_DIR'] = build_dir
-
-current_dir = os.path.dirname(__file__)
-src_dir = os.path.join(current_dir, '..')
-installation_script_dir = os.path.join(src_dir, 'installation')
-here = installation_script_dir
-
-sys.path.append(src_dir)
-sys.path.append(build_dir)
-sys.path.append(installation_script_dir)
-
-control_script_path = os.path.join(installation_script_dir, 'control.py')
-execfile(control_script_path, globals(), globals())
-
-global target_prefix
-target_prefix = sys.prefix
-for i in range(len(sys.argv)):
-    a = sys.argv[i]
-    if a=='--prefix':
-        target_prefix=sys.argv[i+1]
-    sp = a.split("--prefix=")
-    if len(sp)==2:
-        target_prefix=sp[1]
-
-try:
-    os.makedirs(os.path.join(target_prefix,'bin'))
-except Exception,err:
-    pass
-try:
-    os.makedirs(os.path.join(target_prefix,'include'))
-except Exception,err:
-    pass
-try:
-    os.makedirs(os.path.join(target_prefix,'lib'))
-except Exception,err:
-    pass
-
-cdms_include_directory = os.path.join(target_prefix, 'include', 'cdms')
-cdms_library_directory = os.path.join(target_prefix, 'lib')
-
-version_file_path = os.path.join(base_build_dir, 'version')
-Version = open(version_file_path).read().strip()
-version = Version.split(".")
-for i in range(len(version)):
-    try:
-        version[i]=int(version[i])
-    except:
-        version[i]=version[i].strip()
-
-def norm(path):
-    "normalize a path"
-    return os.path.normpath(os.path.abspath(os.path.expanduser(path)))
-
-def testlib (dir, name):
-    "Test if there is a library in a certain directory with basic name."
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.a')):
-        return 1
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.so')):
-        return 1
-    if os.path.isfile(os.path.join(dir, 'lib' + name + '.sl')):
-        return 1
-    return 0
-
-def configure (configuration_files):
-    global action, target_prefix
-    options={}
-    execfile(os.path.join(installation_script_dir, 'standard.py'), globals(), options)
-    for file in configuration_files:
-        print >>sys.stderr, 'Reading configuration:', file
-        execfile(os.path.join(src_dir, file), globals(), options)
-
-    # Retrieve action
-    action = options['action']
-    # Establish libraries and directories for CDUNIF/CDMS
-    netcdf_directory = norm(options.get('netcdf_directory',os.environ['EXTERNALS']))
-    netcdf_include_directory = norm(options.get('netcdf_include_directory',
-                                           os.path.join(os.environ['EXTERNALS'],'include')))
-
-    #hdf5_library_directory = norm(os.path.join(os.environ.get('HDF5LOC',os.path.join(os.environ["EXTERNALS"])), 'lib'))
-    if (sys.platform in ['mac',]):
-        cdunif_library_directories = [cdms_library_directory,"/usr/X11R6/lib"]
-    else:
-        cdunif_library_directories = [cdms_library_directory]
-
-    options['CDMS_INCLUDE_DAP']="yes"
-##     if options.get('CDMS_INCLUDE_DAP','no')=='yes':
-##         netcdf_include_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'include','libnc-dap'))
-##         netcdf_library_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))
-##         dap_include=[norm(os.path.join(options['CDMS_DAP_DIR'],'include','libdap'))]
-##         dap_lib_dir=[norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))]
-## ##         dap_lib=['dap','stdc++','nc-dap','dap','curl','z','ssl','crypto','dl','z','xml2','rx','z']
-## ##         if (sys.platform in ['linux2',]):
-## ##            dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','xml2']
-## ##         elif (sys.platform in ['darwin',]):
-## ##            dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2','z']
-##         dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2']
-##         dap_lib = ['stdc++']
-##         dap_lib_dir=[]
-##         Libs=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','ncdap-config'))+' --libs').readlines()
-##         Libs+=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','dap-config'))+' --client-libs').readlines()
-##         for libs in Libs:
-##             libs=libs.split()
-##             for l in libs:
-##                 if l[:2]=='-l':
-##                     dap_lib.append(l[2:])
-##                 elif l[:2]=='-L'and l[2:] not in dap_lib_dir:
-##                     dap_lib_dir.append(l[2:])
-##         dap_lib.append("dap")
-##         dap_lib.append("xml2")
-##         netcdfname='nc-dap'
-## ##         print 'daplib:',dap_lib
-##     else:
-    if 1:
-        ## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')]
-        dap_include = []
-        Dirs=os.popen('%s --cflags' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0]
-        for d in Dirs.split():
-            if d[:2]=="-I":
-                dnm = d[2:]
-                if not dnm in dap_include:
-                    dap_include.append(dnm)
-        dap_lib = ['stdc++']
-        dap_lib = []
-        dap_lib_dir=[]
-        ## Libs=os.popen(norm(os.path.join(os.environ['EXTERNALS'],'bin','nc-config'))+' --libs').readlines()
-        Libs=os.popen('%s --libs' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()
-        for libs in Libs:
-            libs=libs.split()
-            for l in libs:
-                if l[:2]=='-l':
-                    dap_lib.append(l[2:])
-                elif l[:2]=='-L'and l[2:] not in dap_lib_dir:
-                    if l[-3:]!='lib':
-                        l+='/lib'
-                    dap_lib_dir.append(l[2:])
-
-##         if enable_netcdf3==True:
-##             dap_include=[]
-##             dap_lib_dir=[]
-##         else:
-##             dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')]
-##             dap_lib_dir = [os.path.join(hdf5path,"lib"),os.path.join(os.environ['EXTERNALS'],'lib')]
-##         if enable_netcdf3 is True:
-##             daplib=[]
-##         else:
-##             dap_lib=['hdf5_hl','hdf5','m','z','dap','nc-dap','dapclient','curl','stdc++','xml2']
-##             # for now turn off the dap crap
-##             dap_lib=['hdf5_hl','hdf5','m','z']
-        netcdfname='netcdf'
-
-    if options.get('CDMS_INCLUDE_HDF','no')=='yes':
-        hdf_libraries = ['mfhdf','df','jpeg','z']
-        hdf_include=[norm(os.path.join(options['CDMS_HDF_DIR'],'include'))]
-        hdf_lib_dir=[norm(os.path.join(options['CDMS_HDF_DIR'],'lib'))]
-    else:
-        hdf_libraries = []
-        hdf_include=[]
-        hdf_lib_dir=[]
-
-    PNG_VERSION=""
-    if (sys.platform in ['darwin']):
-        PNG_VERSION="15"
-
-    grib2_libraries = ["grib2c","png"+PNG_VERSION,"jasper"]
-    ## if netcdf_library_directory not in cdunif_library_directories:
-    ##     cdunif_library_directories.append(netcdf_library_directory)
-    cdunif_include_directories = [cdms_include_directory]
-    ## if netcdf_include_directory not in cdunif_include_directories:
-    ##     cdunif_include_directories.append(netcdf_include_directory)
-
-
-    if sys.platform == "sunos5":
-        cdunif_include_directories.append('/usr/include')
-
-    drs_file = "@cdatEXTERNALS@/lib/libdrs.a"
-
-    # Establish location of X11 include and library directories
-    if options['x11include'] or options['x11libdir']:
-        if options['x11include']:
-            options['x11include'] = norm(options['x11include'])
-        if options['x11libdir']:
-            options['x11libdir'] = norm(options['x11libdir'])
-    else:
-        for x in x11search:
-            if os.path.isdir(x):
-                if options['x11include']:
-                    options['x11include'].append(os.path.join(x, 'include'))
-                    options['x11libdir'].append(os.path.join(x, 'lib'))
-                else:
-                    options['x11include']=[norm(os.path.join(x, 'include'))]
-                    options['x11libdir']=[norm(os.path.join(x, 'lib'))]
-        else:
-            for w in x11OSF1lib:
-                if testlib(w, 'X11'):
-                    if not options['x11libdir']:
-                        options['x11libdir'] = [norm(w),]
-                    else:
-                        options['x11libdir'].append(norm(w))
-            for w in x11OSF1include:
-                if os.path.isdir(w):
-                    if not options['x11include']:
-                        options['x11include'] = [norm(w),]
-                    else:
-                        options['x11include'].append(norm(w))
-    # Check that we have both set correctly.
-    if not (options['x11include'] and \
-            options['x11libdir']
-            ):
-        print >>sys.stderr, """
-Failed to find X11 directories. Please see README.txt for instructions.
-"""
-        print options
-        raise SystemExit, 1
-
-    # Write cdat_info.py
-    os.chdir(installation_script_dir)
-    print 'Version is: ',Version
-    f = open(os.path.join(build_dir, 'cdat_info.py'), 'w')
-    sys.path.append(build_dir)
-    print >> f,"""
-Version = '%s'
-ping_checked = False
-check_in_progress = False
-def version():
-    return %s
-""" % (Version,str(version))
-    if options.get('CDMS_INCLUDE_DRS','no') == 'yes':
-        print >>f, """
-def get_drs_dirs ():
-    #import Pyfort, os
-    import os
-    #c = Pyfort.get_compiler('default')
-    drs_dir, junk = os.path.split(drs_file)
-    #return c.dirlist + [drs_dir]
-    return [drs_dir,"/usr/local/gfortran/lib","/usr/local/lib"]
-
-def get_drs_libs ():
-    #import Pyfort
-    #c = Pyfort.get_compiler('default')
-    return ['drs','gfortran'] + %s
-""" % repr(options.get("COMPILER_EXTRA_LIBS",[]))
-    else:
-        print >>f, """
-def get_drs_dirs ():
-    return []
-def get_drs_libs():
-    return []
-"""
-
-    print >>f, """\
-
-sleep=60 #minutes  (int required)
-
-actions_sent = {}
-
-SOURCE = 'CDAT'
-
-def get_version():
-  return Version
-
-def get_prefix():
-  import os,sys
-  try:
-    uv_setup_pth = os.environ["UVCDAT_SETUP_PATH"]
-    if os.uname()[0] == "Darwin":
-      uv_setup_pth = os.path.join(uv_setup_pth,
-          "Library","Frameworks","Python.framework","Versions",
-          "%%i.%%i" %% (sys.version_info.major,sys.version_info.minor)
-          )
-    return uv_setup_pth
-  except KeyError:
-    raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.")
-
-def get_sampledata_path():
-  import os
-  try:
-    return os.path.join(os.environ["UVCDAT_SETUP_PATH"],
-                        "share", "uvcdat", "sample_data")
-  except KeyError:
-    raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.")
-
-def runCheck():
-    import cdat_info,os
-    if cdat_info.ping_checked is False:
-        check_in_progress = True
-        val = None
-        envanom = os.environ.get("UVCDAT_ANONYMOUS_LOG",None)
-        if envanom is not None:
-            if envanom.lower() in ['true','yes','y','ok']:
-                val = True
-            elif envanom.lower() in ['false','no','n','not']:
-                val = False
-            else:
-                import warnings
-                warnings.warn("UVCDAT logging environment variable UVCDAT_ANONYMOUS_LOG should be set to 'True' or 'False', you have it set to '%%s', will be ignored" %% envanom)
-        if val is None: # No env variable looking in .uvcdat
-            fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog")
-            if os.path.exists(fanom):
-                f=open(fanom)
-                for l in f.readlines():
-                    sp = l.strip().split("UVCDAT_ANONYMOUS_LOG:")
-                    if len(sp)>1:
-                        try:
-                            val = eval(sp[1])
-                        except:
-                            pass
-                f.close()
-
-        reload(cdat_info)
-        return val
-
-def askAnonymous(val):
-        import cdat_info,os
-        while cdat_info.ping_checked is False and not val in [True, False]: # couldn't get a valid value from env or file
-            val2 = raw_input("Allow anonymous logging usage to help improve UV-CDAT? (you can also set the environment variable UVCDAT_ANONYMOUS_LOG to yes or     no) [yes/no]")
-            if val2.lower() in ['y','yes','ok']:
-                val = True
-            elif val2.lower() in ['n','no','not']:
-                val = False
-            if val in [True,False]: # store result for next time
-                try:
-                    fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog")
-                    if not os.path.exists(os.path.join(os.environ["HOME"],".uvcdat")):
-                        os.makedirs(os.path.join(os.environ["HOME"],".uvcdat"))
-                    f=open(fanom,"w")
-                    print >>f, "#Store information about allowing UVCDAT anonymous logging"
-                    print >>f, "# Need sto be True or False"
-                    print >>f, "UVCDAT_ANONYMOUS_LOG: %%s" %% val
-                    f.close()
-                except Exception,err:
-                    pass
-        else:
-            if cdat_info.ping_checked:
-                val = cdat_info.ping
-        cdat_info.ping = val
-        cdat_info.ping_checked = True
-        check_in_progress = False
-
-def pingPCMDIdb(*args,**kargs):
-    import cdat_info,os
-    while cdat_info.check_in_progress:
-       reload(cdat_info)
-    val = cdat_info.runCheck()
-    if val is False:
-      cdat_info.ping_checked = True
-      cdat_info.ping = False
-      return
-    try:
-      if not cdat_info.ping:
-        return
-    except:
-      pass
-    cdat_info.askAnonymous(val)
-    import threading
-    kargs['target']=pingPCMDIdbThread
-    kargs['args']=args
-    t = threading.Thread(**kargs)
-    t.start()
-
-def pingPCMDIdbThread(*args,**kargs):
-    import threading
-    kargs['target']=submitPing
-    kargs['args']=args
-    t = threading.Thread(**kargs)
-    t.start()
-    import time
-    time.sleep(5) # Lets wait 5 seconds top for this ping to work
-    if t.isAlive():
-        try:
-            t._Thread__stop()
-        except:
-            pass
-def submitPing(source,action,source_version=None):
-  try:
-    import urllib2,sys,os,cdat_info,hashlib,urllib
-    if source in ['cdat','auto',None]:
-      source = cdat_info.SOURCE
-    if cdat_info.ping:
-      if not source in actions_sent.keys():
-        actions_sent[source]=[]
-      elif action in actions_sent[source]:
-        return
-      else:
-        actions_sent[source].append(action)
-      data={}
-      uname = os.uname()
-      data['platform']=uname[0]
-      data['platform_version']=uname[2]
-      data['hashed_hostname']=hashlib.sha1(uname[1]).hexdigest()
-      data['source']=source
-      if source_version is None:
-        data['source_version']=cdat_info.get_version()
-      else:
-        data['source_version']=source_version
-      data['action']=action
-      data['sleep']=cdat_info.sleep
-      data['hashed_username']=hashlib.sha1(os.getlogin()).hexdigest()
-      urllib2.urlopen('http://uv-cdat.llnl.gov/UVCDATUsage/log/add/',urllib.urlencode(data))
-  except Exception,err:
-    pass
-
-CDMS_INCLUDE_DAP = %s
-CDMS_DAP_DIR = %s
-CDMS_HDF_DIR = %s
-CDMS_GRIB2LIB_DIR = %s
-CDMS_INCLUDE_GRIB2LIB = %s
-CDMS_INCLUDE_DRS = %s
-CDMS_INCLUDE_HDF = %s
-CDMS_INCLUDE_PP = %s
-CDMS_INCLUDE_QL = %s
-drs_file = %s
-netcdf_directory = %s
-netcdf_include_directory = %s
-cdunif_include_directories = %s + %s + %s
-cdunif_library_directories = %s + %s + %s + get_drs_dirs()
-cdunif_libraries = %s + %s + get_drs_libs() + %s + %s
-x11include = %s
-x11libdir = %s
-mathlibs = %s
-action = %s
-externals = %s
-""" % (
-        repr(options.get('CDMS_INCLUDE_DAP','no')),
-        repr(options.get('CDMS_DAP_DIR','.')),
-        repr(options.get('CDMS_HDF_DIR','.')),
-        repr(options.get('CDMS_GRIB2LIB_DIR',os.environ['EXTERNALS'])),
-        repr(options.get('CDMS_INCLUDE_GRIB2LIB',"yes")),
-        repr(options['CDMS_INCLUDE_DRS']),
-        repr(options['CDMS_INCLUDE_HDF']),
-        repr(options['CDMS_INCLUDE_PP']),
-        repr(options['CDMS_INCLUDE_QL']),
-        repr(drs_file),
-        repr(netcdf_directory),
-        repr(netcdf_include_directory),
-        repr(cdunif_include_directories),repr(dap_include),repr(hdf_include),
-        repr(cdunif_library_directories),repr(dap_lib_dir),repr(hdf_lib_dir),
-        repr(['cdms', netcdfname]),repr(dap_lib),repr(hdf_libraries),repr(grib2_libraries),
-        repr(options['x11include']),
-        repr(options['x11libdir']),
-        repr(options['mathlibs']),
-        repr(options['action']),
-        repr(os.environ['EXTERNALS']),
-        )
-    if enable_aqua:
-	print >> f,'enable_aqua = True'
-    else:
-        print >>f, 'enable_aqua = False'
-    f.close()
-    cdat_info_path = os.path.join(os.environ['BUILD_DIR'], 'cdat_info')
-    if not norun:
-      # Install the configuration
-      #would be best to add 'clean' but it gives stupid warning error
-      sys.argv[1:]=['-q', 'install', '--prefix=%s' % target_prefix]
-      setup (name="cdat_info",
-       version="0.0",
-       package_dir = { 'cdat_info' : os.path.dirname(cdat_info_path)},
-      )
-      os.system('/bin/rm -fr build')
-
-    py_prefix = os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages')
-    cdat_info_src_path = os.path.join(build_dir, 'cdat_info.py')
-    cdat_info_dst_path = os.path.join(py_prefix, 'cdat_info.py')
-    if os.path.isfile(cdat_info_src_path):
-        shutil.copyfile(cdat_info_src_path, cdat_info_dst_path)
-    else:
-       print>>sys.stderr, 'Failed to copy %s to %s' % (cdat_info_src_path, cdat_info_dst_path)
-
-    os.chdir(here)
-    print >>sys.stderr, 'Configuration installed.'
-
-def usage():
-    f = open('HELP.txt')
-    lines = f.readlines()
-    f.close()
-    for line in lines[10:-9]:
-        sys.stdout.write(line)
-    print '\tDefault Packages'
-    print '\t----------------'
-    packages.append('\n\tContributed Packages\n\t--------------------')
-    #execfile('installation/contrib.py',globals(),globals())
-    for p in packages:
-        print '\t\t',p
-
-def main(arglist):
-    global norun, echo, force, do_configure, silent, action, logdir, enable_aqua,target_prefix, enable_netcdf3, hdf5path,zpath
-    enable_aqua = False
-    enable_cdms1 = False
-    enable_netcdf3=False
-    optlist, control_names = getopt.getopt(arglist,
-                       "c:defhnPl",
-                       ["enable-cdms-only",
-                        "configuration=",
-                        "debug",
-                        "prefix=",
-                        "echo",
-                        "force",
-                        "help",
-                        "with-externals=",
-                        "norun",
-                        "PCMDI",
-                        "pcmdi",
-                        "psql","enable-psql",
-                        "enable-hdf4","enable-HDF4",
-                        "with-HDF4=","with-hdf4=",
-                        "disable-hdf4","disable-HDF4",
-                        "disable-contrib",
-                        "enable-pp",
-                        "enable-drs","enable-DRS",
-                        "disable-externals-build",
-                        "disable-pp",
-                        ## Bellow are the arguments that could be passed to exsrc, nothing done with them
-                        "disable-R","disable-r",
-                        #"disable-VTK","disable-vtk",
-                        "disable-XGKS","disable-xgks",
-                        "disable-Pyfort","disable-pyfort",
-                        "disable-NetCDF","disable-netcdf","disable-NETCDF",
-                        "disable-Numeric","disable-numeric",
-                        "disable-gplot","disable-GPLOT","disable-Gplot",
-                        "disable-gifsicle","disable-GIFSICLE",
-                        "disable-gifmerge","disable-GIFMERGE",
-                        "disable-pbmplus","disable-PBMPLUS",
-                        "disable-netpbm","disable-NETPBM",
-                        "disable-Pmw","disable-pmw",
-                        "disable-ioapi",
-                        "disable-cairo",
-                        "disable-ffmpeg",
-                        "disable-freetype",
-                        "disable-sampledata",
-                        "enable-ioapi",
-                        "enable-R","enable-r",
-                        "enable-numpy","disable-numpy",
-                        "enable-scipy","disable-scipy",
-                        "enable-ipython","disable-ipython",
-                        #"enable-VTK","enable-vtk",
-                        "enable-XGKS","enable-xgks",
-                        "enable-Pyfort","enable-pyfort",
-                        "enable-NetCDF","enable-netcdf","enable-NETCDF","enable-netcdf-fortran","enable-NETCDF-Fortran",
-                        "enable-Numeric","enable-numeric",
-                        "enable-gplot","enable-GPlot","enable-GPLOT",
-                        "enable-gifsicle","enable-GIFSICLE",
-                        "enable-gifmerge","enable-GIFMERGE",
-                        "enable-pbmplus","enable-PBMPLUS",
-                        "enable-netpbm","enable-NETPBM",
-                        "enable-Pmw","enable-pmw",
-                        "enable-aqua","enable-Aqua","enable-AQUA",
-                        "enable-cairo",
-                        "enable-ffmpeg",
-                        "enable-freetype",
-                        "enable-cdms1",
-                        "enable-netcdf3",
-                        "enable-spanlib",
-                        "disable-spanlib"
-                        "disable-tkbuild",
-                        "enable-qt",
-                        "enable-vcs-legacy",
-                        "enable-qt-framework",
-                        "with-qt=",
-                        "with-qt-lib=",
-                        "with-qt-inc=",
-                        "with-qt-bin=",
-                        "qt-debug",
-                        "list",
-                       ]
-                    )
-    configuration_files = []
-    nodap=0
-    nopp=0
-    nohdf=0
-    selfhdf=0
-    selfdap=0
-    selfpp=0
-    showlist=0
-    qtfw=False
-    qtinc=None
-    qtlib=None
-    qtbin=None
-    qt=False
-    control_names = ['contrib']
-    sampleData = True
-##     prefix_target = sys.exec_prefix
-    externals = os.environ.get("EXTERNALS",os.path.join(sys.prefix,"Externals"))
-    hdf5path = None
-    zpath = None
-
-    for i in range(len(optlist)):
-        letter=optlist[i][0]
-        if letter == "--enable-vcs-legacy":
-            qt=True
-        if letter == "--enable-qt":
-            qt=True
-        if letter == "--enable-qt-framework":
-            qtfw=True
-        if letter == "--with-qt":
-            qtinc=os.path.join(optlist[i][1],"include")
-            qtlib=os.path.join(optlist[i][1],"lib")
-            qtbin=os.path.join(optlist[i][1],"bin")
-        if letter == "--with-qt-inc":
-            qtinc=optlist[i][1]
-        if letter == "--with-qt-bin":
-            qtbin=optlist[i][1]
-        if letter == "--with-qt-lib":
-            qtlib=optlist[i][1]
-        if letter == "--enable-cdms-only":
-            control_names = ['cdmsonly']+control_names
-            if 'contrib' in control_names:
-                control_names.pop(control_names.index('contrib'))
-        elif letter == "--with-externals":
-            externals = optlist[i][1]
-        elif letter in ["-c",  "--configuration"]:
-            m = False
-            n = optlist[i][1]
-            if os.path.isfile(n):
-                m = n
-            elif os.path.isfile(n + '.py'):
-                m = n + '.py'
-            elif os.path.isfile(os.path.join('installation', n)):
-                m = os.path.join('installation', n)
-            elif os.path.isfile(os.path.join('installation', n + '.py')):
-                m = os.path.join('installation', n + '.py')
-            if m:
-                configuration_files.append(m)
-            else:
-                print >>sys.stderr, "Cannot find configuration file", optlist[i][1]
-            force = 1
-            do_configure = 1
-        elif letter in ["-d", "--debug"]:
-            debug_file = os.path.join('installation','debug.py')
-            configuration_files.append(debug_file)
-            force = 1
-            do_configure = 1
-        elif letter in ["-e", "--echo"]:
-            echo = 1
-        elif letter in ["--enable-cdms1"]:
-            enable_cdms1 = True
-        elif letter in ["--enable-netcdf3"]:
-            enable_netcdf3 = True
-	elif letter in ["--enable-aqua","--enable-Aqua","--enable-AQUA"]:
-	    enable_aqua = True
-        elif letter in ["-f", "--force"]:
-            force = 1
-            do_configure = 1
-        elif letter in ["-h", "--help"]:
-            usage()
-            raise SystemExit, 1
-        elif letter in ["-P", "--PCMDI", "--pcmdi"]:
-            configuration_files.append(os.path.join('installation', 'pcmdi.py'))
-            force=1
-            do_configure=1  # Need libcdms built a certain way too.
-        elif letter in ["--psql", "--enable-psql"]:
-            configuration_files.append(os.path.join('installation', 'psql.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-##         elif letter in ["--with-OpenDAP", "--with-opendap", "--with-OPENDAP","--enable-opendap","--enable-OpenDAP","--enable-OPENDAP"]:
-##             configuration_files.append(os.path.join('installation', 'DAP.py'))
-##             do_configure=1  # Need libcdms built a certain way too.
-##             selfdap=1
-##         elif letter in ["--with-HDF4", "--with-hdf4",'--enable-hdf4','--enable-HDF4']:
-##             configuration_files.append(os.path.join('installation', 'HDF.py'))
-##             do_configure=1  # Need libcdms built a certain way too.
-##             selfhdf=1
-        elif letter in ["--with-hdf5",]:
-            hdf5path = optlist[i][1]
-        elif letter in ["--with-z",]:
-            zpath = optlist[i][1]
-        elif letter in ["--prefix"]:
-            target_prefix = optlist[i][1]
-        elif letter in ['--enable-drs','--enable-DRS']:
-            configuration_files.append(os.path.join('installation', 'pcmdi.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-        elif letter in ['--enable-pp','--enable-PP']:
-            configuration_files.append(os.path.join('installation', 'pp.py'))
-            do_configure=1  # Need libcdms built a certain way too.
-            selfpp=1
-##         elif letter in ["--enable-NetCDF","--enable-NETCDF","--enable-netcdf",
-##                         "--enable-netcdf-fortran",
-##                         "--disable-opendap","--disable-OpenDAP","--disable-OPENDAP"]:
-##             nodap=1
-##         elif letter in ["--disable-hdf4","--disable-HDF4"]:
-##             nohdf=1
-        elif letter in ["--disable-pp","--disable-PP"]:
-            nohdf=1
-        elif letter in ["--disable-sampledata",]:
-            sampleData = False
-        elif letter in ["-n", "--norun"]:
-            norun = 1
-        elif letter in ['--list','-l']:
-            showlist=1
-        elif letter in ['--disable-contrib']:
-            for i in range(len(control_names)):
-                if control_names[i]=='contrib':
-                    control_names.pop(i)
-                    i=i-1
-    CDMS_INCLUDE_DAP='yes'
-    if nopp==1 and selfpp==1:
-        raise "Error you chose to both enable and disable PP support !"
-    if nohdf==1 and selfhdf==1:
-        raise "Error you chose to both enable and disable HDF !"
-##     if (nodap==0 and selfdap==0) and (sys.platform in ['linux2','darwin']):
-##         configuration_files.append(os.path.join('installation', 'DAP.py'))
-##         do_configure=1  # Need libcdms built a certain way too.
-##     if (nohdf==0 and selfhdf==0) and (sys.platform in ['linux2','darwin']):
-##         configuration_files.append(os.path.join('installation', 'HDF.py'))
-##         do_configure=1  # Need libcdms built a certain way too.
-    if (nopp==0 and selfpp==0) and (sys.platform in ['linux2','darwin']):
-        configuration_files.append(os.path.join('installation', 'pp.py'))
-        do_configure=1  # Need libcdms built a certain way too.
-
-    if hdf5path is None: hdf5path= os.path.join(externals)
-    if zpath is None: zpath= externals
-    os.environ['EXTERNALS']=externals
-
-    control_files = []
-    for n in control_names:
-        m = ''
-        if os.path.isfile(n):
-            m = n
-        elif os.path.isfile(n + '.py'):
-            m = n + '.py'
-        elif os.path.isfile(os.path.join('installation', n)):
-            m = os.path.join('installation', n)
-        elif os.path.isfile(os.path.join('installation', n + '.py')):
-            m = os.path.join('installation', n + '.py')
-        elif os.path.isfile(os.path.join(src_dir, 'installation', n + '.py')):
-            m = os.path.join(src_dir, 'installation', n + '.py')
-
-        if m:
-            control_files.append(m)
-        else:
-            print >>sys.stderr, 'Cannot find control file', n
-            raise SystemExit, 1
-
-    for control_file in control_files:
-        print 'Running:',control_file
-        execfile(control_file, globals(), globals())
-
-    if showlist:
-        print 'List of Packages that would be installed:'
-        for p in packages:
-            print p
-        sys.exit()
-    if force:
-        os.system('./scripts/clean_script')
-
-    sys.path.insert(0,os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages'))
-    if do_configure:
-        force = 1
-        if os.path.isfile(os.path.join(build_dir, 'cdat_info.py')):
-            os.unlink(os.path.join(build_dir, 'cdat_info.py'))
-        print >>sys.stderr, 'Configuring & installing scripts.'
-        configure(configuration_files)
-        images_path = os.path.join(src_dir, 'images')
-        os.chdir(images_path)
-        scripts = glob.glob('*')
-        for script in scripts:
-            if script[-1] == '~': continue
-            if script == "README.txt": continue
-            target = os.path.join(target_prefix, 'bin', script)
-            if os.path.isfile(target): os.unlink(target)
-            shutil.copy(script, target)
-        os.chdir(here)
-    else:
-        import cdat_info
-        action = cdat_info.action
-
-    # Install CDMS
-    cdms_library_file = os.path.join(cdms_library_directory, 'libcdms.a')
-    #if force or not os.path.isfile(cdms_library_file):
-    #    install('libcdms', action)
-    #    if (sys.platform in ['darwin',]):
-    #       os.system('ranlib '+os.path.join(target_prefix,'lib','libcdms.a'))
-
-    # Install Packages
-    package_errors=0
-    package_failed=[]
-    if enable_cdms1:
-        packages.append("Packages/regrid")
-        packages.append("Packages/cdms")
-    for p in packages:
-        h = os.getcwd()
-        oldcmd=action["setup.py"]+""
-        action['setup.py'] = action['setup.py'].strip()[:-1]+" build -b "+ os.environ['BUILD_DIR']+"/"+p
-        try:
-            if p == "Packages/vcs":
-                action["setup.py"]=oldcmd.strip()[:-1]+" --old-and-unmanageable; "
-                if qtfw:
-                    action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt-framework ; "
-                if qt:
-                    action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt ; "
-                if qtinc is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-inc=%s ; "%qtinc
-                if qtlib is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-lib=%s ; "%qtlib
-                if qtbin is not None:
-                    action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-bin=%s ; "%qtbin
-            install(p, action)
-        except:
-            package_errors+=1
-            package_failed.append(p)
-            os.chdir(h)
-            print >>sys.stderr, 'Error: Installation of Package:',p,'FAILED'
-        action["setup.py"]=oldcmd
-
-    # Celebrate
-    if echo:
-        print "Simulated build complete."
-    elif not silent:
-        print >>sys.stderr, finish
-        if package_errors!=0:
-            print >>sys.stderr, '\n              --- WARNING ---\n'
-            print >>sys.stderr,package_errors,'Packages reported as FAILED, see logs\n'
-            for p in package_failed:
-                print >>sys.stderr,'\t\t',p
-            print >>sys.stderr
-        print >>sys.stderr, '******************************************************\n'
-        """
-        ******************************************************
-        CDAT has been installed in %s .
-        Please make sure all modules built successfully
-        (see above build messages)
-        ******************************************************
-        """ %(target_prefix,)
-
-def _install(file, action):
-    h = os.getcwd()
-    absfile = os.path.abspath(file)
-    print 'absfile ', absfile
-    dirname, basename = os.path.split(absfile)
-    dirfinal = os.path.split(dirname)[-1]
-    os.chdir(dirname)
-    name, ext = os.path.splitext(basename)
-    if ext.lower() == ".pfp":
-        p1 = action['*.pfp']
-    elif action.has_key(absfile):
-        p1 = action[absfile]
-    elif action.has_key(file):
-        p1 = action[file]
-    elif action.has_key(basename):
-        p1 = action[basename]
-    else:
-        print "Do not know what to do with", file, "in", dirname
-        print >>sys.stderr, "Do not know what to do with", file, "in", dirname
-        raise SystemExit, 1
-
-    if log:
-        logfile = os.path.join(logdir, dirfinal+".LOG")
-        if not silent:
-            print >>sys.stderr, "Processing", dirfinal + ', log =', logfile
-    else:
-        logfile = tempfile.mktemp()
-        if not silent:
-            print >>sys.stderr, "Processing", dirfinal
-    p1 = p1 % { 'filename': file }
-    sep = " > %s 2>&1 ; " % logfile
-    p = sep.join(p1.split(";"))
-##     os.environ["CFLAGS"]="%s -L%s/lib" % (os.environ.get("CFLAGS",""), os.environ["EXTERNALS"])
-    add_lib = "-L%s/lib" % (os.environ["EXTERNALS"],)
-    cflags_current = os.environ.get("CFLAGS","")
-    if cflags_current.find(add_lib) == -1:
-        os.environ["CFLAGS"]="%s %s" % (cflags_current, add_lib)
-    p = 'env CFLAGS="%s" %s' % (os.environ["CFLAGS"],p)
-    if echo:
-        print >> sys.stderr, p
-    print norun
-    if norun:
-        r = 0
-    else:
-        #print '====>executing: ', p
-        r = os.system(p)
-    if r:
-        print >>sys.stderr, "Install failed in directory", dirname
-        print >>sys.stderr, "Log=", logfile
-        raise SystemExit, 1
-    elif not log and not norun:
-        os.unlink(logfile)
-
-    f = open(os.path.join(build_dir, 'rebuild.py'), 'w')
-    print >>f, """
-import os
-j = os.system(%s)
-if j:
-    print 'Compilation failed'
-    raise SystemExit, 1
-""" % (repr(p1+ " 1>LOG.rebuild"),)
-    f.close()
-    os.chdir(h)
-
-def install (arg, action):
-    arg = os.path.normpath(arg)
-    installer = ''
-    arg = os.path.join(src_dir, arg)
-    if os.path.isdir(arg):
-        for x in (glob.glob(os.path.join(arg, '*.pfp')) + \
-                 ['autogen.sh',
-                  'install.py',
-                  'setup.py',
-                  'install_script',
-                  'Makefile',
-                  'makefile'] ):
-            name = os.path.join(arg,x)
-            if os.path.isfile(name):
-                installer = name
-                break
-        else:
-            print >>sys.stderr, "Cannot find installation instructions in", arg
-            raise SystemExit, 1
-    elif os.path.isfile(arg):
-        installer = arg
-        designator, junk = os.path.split(arg)
-    else:
-        print >>sys.stderr, "Cannot find", arg
-        raise SystemExit
-
-    _install(installer, action)
-
-
-if __name__ == "__main__":
-    arglist = sys.argv[1:]
-    main(arglist)
-    ## This parts creates links from Externals...
-    try:
-        import cdat_info
-        externals = cdat_info.externals
-    except:
-        externals = os.path.join(sys.prefix,"Externals")
-    externals = os.environ.get("EXTERNALS",externals)
-    externals_path = os.path.join(externals,'bin')
-    files = os.listdir(externals_path)
-    for file in files:
-        fnm = os.path.join(sys.prefix,'bin',file)
-        if not os.path.exists(fnm) and not os.path.islink(fnm):
-            try:
-                os.symlink(os.path.join(externals_path,file),fnm)
-            except:
-                pass
-
diff --git a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in b/CMake/cdat_modules_extra/jasper_configure_step.cmake.in
deleted file mode 100644
index ff0cccad7..000000000
--- a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in
+++ /dev/null
@@ -1,11 +0,0 @@
-# On linux 64, gdal picks the static jasper library, make sure only shared libraries
-# are built (Alex Pletzer)
-
-# Make sure to pick up image and other libraries built by the superbuild
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-EXECUTE_PROCESS(
-  COMMAND sh configure --enable-shared --disable-static --prefix=@jasper_install@
-  WORKING_DIRECTORY "@jasper_source@"
-  RESULT_VARIABLE rv
-  )
diff --git a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in b/CMake/cdat_modules_extra/jpeg_install_step.cmake.in
deleted file mode 100644
index ab724cc8a..000000000
--- a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in
+++ /dev/null
@@ -1,28 +0,0 @@
-
-execute_process(
-  COMMAND make install ${INSTALL_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-#cp build/jpeg*/lib* /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/lib
-#cp build/jpeg*/*.h /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/include
-
-file(GLOB jpeglibs "@jpeg_source@/lib*")
-file(GLOB jpegheaders "@jpeg_source@/*.h")
-
-
-foreach(lib ${jpeglibs})
-  execute_process(
-    COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${lib} @jpeg_install@/lib
-    RESULT_VARIABLE res
-    OUTPUT_VARIABLE CDAT_OUT
-    OUTPUT_VARIABLE CDAT_ERR)
-endforeach()
-
-foreach(header ${jpegheaders})
-    execute_process(
-      COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${header} @jpeg_install@/include
-      RESULT_VARIABLE res
-      OUTPUT_VARIABLE CDAT_OUT
-      OUTPUT_VARIABLE CDAT_ERR)
-endforeach()
\ No newline at end of file
diff --git a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in b/CMake/cdat_modules_extra/lats_Makefile.gfortran.in
deleted file mode 100644
index 566a6b5bc..000000000
--- a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in
+++ /dev/null
@@ -1,208 +0,0 @@
-# Generated automatically from Makefile.in by configure.
-# -*-Mode: indented-text;-*-
-# Makefile:    LATS makefile
-#
-# Copyright:   1996, Regents of the University of California
-#	       This software may not be distributed to others without
-#	       permission of the author.
-#
-# Author:      Bob Drach, Lawrence Livermore National Laboratory
-#              drach@llnl.gov
-#
-# Version:     $Id: Makefile.in,v 1.12 1996/10/29 00:20:44 drach Exp $
-#
-# Revision History:
-#
-# $Log: Makefile.in,v $
-# Revision 1.12  1996/10/29  00:20:44  drach
-# - Removed name conflicts with CDMS
-#
-# Revision 1.11  1996/10/22 19:04:57  fiorino
-# latsgrib bug in .ctl creator
-#
-# Revision 1.10  1996/10/16 22:09:51  drach
-# - Added automatic gribmap generation
-# - Restricted LATS_GRADS_GRIB convention to one grid per file
-#
-# Revision 1.9  1996/09/30 18:54:46  drach
-# - permit installation without the sources being present
-# - separate FORTRAN debug flag, since -O doesn't work on the Cray
-#
-# Revision 1.8  1996/09/17 16:52:31  drach
-# - Misc. cleanup
-#
-# Revision 1.7  1996/08/29 19:27:17  drach
-# - Cleaned up configuration macros, Makefile.in for portability
-#
-# Revision 1.6  1996/08/27 19:39:03  drach
-# - Added FORTRAN test
-# - Ported to other UNIX platforms
-#
-# Revision 1.5  1996/07/12 00:36:21  drach
-# - (GRIB) use undefined flag only when set via lats_miss_XX
-# - (GRIB) use delta when checking for missing data
-# - (GRIB) define maximum and default precision
-# - fixed lats_vartab to work correctly.
-# - Added report of routine names, vertical dimension types
-#
-# Revision 1.4  1996/06/27 19:19:34  drach
-# - Misc. cleanup
-#
-# Revision 1.3  1996/06/27 01:32:49  drach
-# - Fixed up file permissions on install
-#
-# Revision 1.2  1996/06/27 01:02:38  drach
-# - Added installation directives
-#
-# Revision 1.1  1996/06/12 18:09:23  drach
-# - Initial versions
-#
-#
-#
-# Note: to generate Makefile from Makefile.in:
-#    ./configure --cache-file=/dev/null \
-#        [--with-ncinc=<netCDF include directory>] \
-#        [--with-nclib=<netCDF library directory>] \
-#	 [--prefix=<installation directory]
-
-# Install prefix for architecture-independent files
-prefix=@cdat_EXTERNALS@
-
-# Install prefix for architecture-dependent files
-exec_prefix=    $(prefix)
-
-# Expanded directories
-BINDIR=$(exec_prefix)/bin
-INCLUDEDIR=$(prefix)/include
-LIBDIR=$(exec_prefix)/lib
-MANDIR=$(prefix)/man
-PARMDIR=$(prefix)/lib/lats
-
-CC = cc
-CFLAGS = -I.  -I@cdat_EXTERNALS@/include $(DEBUG) -I/usr/include/malloc
-CPPFLAGS =  -DHAVE_NETCDF -DLATS_CHANGE_GRADS_NAMES -DSTNDALN=1 -DBYTEORDER=1 -DGRADS_CRAY=0
-DEBUG = -O
-DEFS =  -DOS_NAME=Linux -DOS_MAJOR=2 
-FC = gfortran
-FDEBUG =
-FFLAGS = -I.  $(FDEBUG)A -Wno-all
-INSTALL = /usr/bin/install -c
-INSTALL_PROGRAM = /usr/bin/install -c
-LDFLAGS = -L. -llats -L@cdat_EXTERNALS@/lib -lnetcdf  -lm
-OS = linux
-RANLIB = :
-
-# Shell to run make subcommands
-SHELL = /bin/sh
-
-#
-#	mf configuration
-#
-LATSLSVER="1.0"
-
-# Do not change values below this line
-#
-LIBRARY		= liblats.a 
-
-OBJS		= lats.o latsint.o latsnc.o latsgrib.o latstime.o latsstat.o latsfort.o fgbds.o \
-		  fgutil.o latsgribmap.o gaddes.o gagmap.o gamach.o gautil.o
-
-all:		$(LIBRARY) latsls
-
-latsls:	        latsls.o latsint.o
-		$(CC) latsls.o latsint.o $(CPPFLAGS) $(CFLAGS)  -o latsls
-
-$(LIBRARY):	$(OBJS)
-		ar rv $(LIBRARY) $?
-		$(RANLIB) $(LIBRARY)
-
-install:	libinstall
-
-libinstall:
-		-if test ! -d $(PARMDIR); \
-		then mkdir $(PARMDIR); \
-		fi
-		echo "LIBDIR"$(LIBDIR)
-		$(INSTALL_PROGRAM) -m 644 $(LIBRARY) $(LIBDIR)/$(LIBRARY)
-		(cd $(LIBDIR); $(RANLIB) $(LIBRARY))
-		$(INSTALL_PROGRAM) -m 644 lats.h $(INCLUDEDIR)/lats.h
-		$(INSTALL_PROGRAM) -m 644 lats.inc $(INCLUDEDIR)/lats.inc
-		$(INSTALL_PROGRAM) -m 644 amip2.parms $(PARMDIR)/amip2.parms
-
-test:		$(LIBRARY) testnc wgrib testgrib testf
-		@echo "test netCDF"
-		@if test -f testnc.nc; \
-		then rm -f testnc.nc; \
-		fi
-		@testnc
-		@rm -f testnc.nc
-		@echo "test GRIB"
-		@if test -f testgrib.grb; \
-		then rm -f testgrib.grb; \
-		fi
-		@if test -f testgrib.ctl; \
-		then rm -f testgrib.ctl; \
-		fi
-		@if test -f testgrib.bin; \
-		then rm -f testgrib.bin; \
-		fi
-		@testgrib
-		@rm -f testgrib.grb
-		@rm -f testgrib.ctl
-		@rm -f testgrib.bin
-		@echo "test FORTRAN"
-		@if test -f testf.nc; \
-		then rm -f testf.ne; \
-		fi
-		@testf
-		@rm -f testf.nc
-
-lats.o:		lats.c latsint.h lats.h
-latsint.o:	latsint.c latsint.h lats.h latsparm.h
-latsnc.o:	latsnc.c latsint.h lats.h
-latsgrib.o:	latsgrib.c fgrib.h fgrib_init.h latsint.h lats.h
-latstime.o:	latstime.c latsint.h lats.h latstime.h
-latsstat.o:	latsstat.c latsint.h lats.h
-latsfort.o:	latsfort.c lats.h cfortran.h
-fgbds.o:	grads.h fgbds.c fgrib.h
-fgutil.o:	fgutil.c fgrib.h
-latsgribmap.o:	grads.h gagmap.h
-gaddes.o:	grads.h
-gagmap.o:	grads.h gagmap.h
-gamach.o:	grads.h
-gautil.o:	grads.h
-latsls.o:	latsint.h latsls.c latsint.o
-		$(CC) -c latsls.c $(CPPFLAGS) $(DEFS) $(CFLAGS) -DLATSLS_VERSION=\"$(LATSLSVER)\"
-
-.SUFFIXES: .F
-
-.c.o:
-	$(CC) $(CPPFLAGS) $(DEFS) $(CFLAGS) -c $<
-
-.c:
-	$(CC) $(CPPFLAGS) $(DEFS) $(CFLAGS) -o $@ $< $(LDFLAGS)
-
-.F:
-	case $(OS) in \
-	     'aix') \
-		    /usr/ccs/lib/cpp -P $(CPPFLAGS) $(CFLAGS) $(INCLUDES) $(DEFS) $< $@.f; \
-		    $(FC) $(FFLAGS) $(LDFLAGS) $(LIBS) $@.f -o $@; \
-		    rm $@.f; \
-		    ;; \
-	     *) \
-		    $(FC) $(CPPFLAGS) $(INCLUDES) $(DEFS) $(FFLAGS) -o $@ $< $(LDFLAGS) $(LIBS); \
-		    ;; \
-	esac
-	
-clean:
-	-rm -f *.o core *.~*~ testnc testgrib testf testf.f
-
-distclean: clean
-	-rm -f $(LIBRARY) testnc testnc.nc testgrib testgrib.grb \
-	testgrib.ctl testgrib.bin testgrib.gmp testf.nc latsls wgrib
-
-settest:
-	cp liblats.a lib/
-	cp wgrib bin/
-	cp latsls bin/
-
diff --git a/CMake/cdat_modules_extra/libcf_install_step.cmake.in b/CMake/cdat_modules_extra/libcf_install_step.cmake.in
deleted file mode 100644
index f5c293642..000000000
--- a/CMake/cdat_modules_extra/libcf_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @libcf_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/libcf_make_step.cmake.in b/CMake/cdat_modules_extra/libcf_make_step.cmake.in
deleted file mode 100644
index 790efaa63..000000000
--- a/CMake/cdat_modules_extra/libcf_make_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @libcf_source@
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in
deleted file mode 100644
index 77a49c002..000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in
+++ /dev/null
@@ -1,78 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-#
-#--------------------------------------------------------------------
-
-#jfp was DEBUG = -O
-DEBUG = -g -O
-FC = gfortran
-CC = gcc
-ARCHOPT = -mtune=native
-#ARCHOPT = -arch i386
-#ARCHOPT =
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-FFLAGS = $(DEBUG) $(FOPTS) -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-CFLAGS = $(DEBUG) $(ARCHOPT) -Dsun -D__linux -D__linux_gfortran -fpic
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-CPPFLAGS = -D__linux $(ARCHOPT) -fpic
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o cddrsfwrap.o
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o cddrs_fc.o
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -lgfortran  -shared -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in
deleted file mode 100644
index 9cdb8773f..000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in
+++ /dev/null
@@ -1,85 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0
-# but see comments for how to use older Macs and older gfortran/gcc.
-#
-#--------------------------------------------------------------------
-
-# DEBUG = -O
-DEBUG = -g -O -save-temps
-FC = /usr/local/bin/gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64
-
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64
-CFLAGS = $(DEBUG) $(ARCHOPT)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-# Somehow CPPFLAGS ends out on the gcc lines...
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio   note that byteswapio is never referenced
-#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT)
-CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o cddrsfwrap.o
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o cddrs_fc.o
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/X11R6/lib -L/usr/local/gfortran/lib -lgfortran  -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c  -lquadmath -lcdms -shared -lpng15 -ljasper -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in
deleted file mode 100644
index d139f0b29..000000000
--- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in
+++ /dev/null
@@ -1,89 +0,0 @@
-# DRS library Makefile
-#
-# Usage:
-#
-# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN:
-#	% make
-# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0
-# but see comments for how to use older Macs and older gfortran/gcc.
-#
-#--------------------------------------------------------------------
-
-# DEBUG = -O
-DEBUG = -g -O -save-temps
-FC = /usr/local/bin/gfortran
-CC = gcc
-#ARCHOPT = -arch x86_64
-#ARCHOPT = -arch i386
-ARCHOPT = -m64
-
-FOPTS = -fcray-pointer $(ARCHOPT) -W
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio
-# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac
-FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64
-CFLAGS = $(DEBUG) $(ARCHOPT)
-INSTALL_LIB = @cdat_EXTERNALS@/lib
-INSTALL_INC = @cdat_EXTERNALS@/include
-# Somehow CPPFLAGS ends out on the gcc lines...
-#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT)
-#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio   note that byteswapio is never referenced
-#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT)
-CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT)
-CPP = cpp
-
-FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o
-# formerly in FOBJECTS, needed for Fortran->cdms link:  cddrsfwrap.o
-# .. cddrsfwrap.o is a Fortran wrapper for libcdms; not really part of libdrs.
-FINCLUDES = drsdef.h drscom.h cycle.h
-FSOURCES = $(FOBJECTS:.o=.F)
-
-COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o
-# formerly in COBJECTS, needed for Fortran->cdms link: cddrs_fc.o
-# ... cddrs_fc.o is C code to support the Fortran wrapper for libcdms; not really part of libdrs.
-CINCLUDES = drscdf.h
-CSOURCES = $(COBJECTS:.o=.c)
-
-OBJECTS = $(FOBJECTS) $(COBJECTS)
-SOURCES = $(FSOURCES) $(CSOURCES)
-INCLUDES = $(FINCLUDES) $(CINCLUDES)
-#--------------------------------------------------------------------
-
-all: drsdef.h libdrs.a libdrs.so
-
-shared: drsdef.h libdrs.so
-
-libdrs.a: $(OBJECTS)
-	ar rv libdrs.a $?
-
-# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms
-libdrs.so: $(OBJECTS)
-	$(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/local/gfortran/lib -lgfortran  -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c  -lquadmath -shared -o libdrs.so $(OBJECTS)
-
-drsdef.h: drsdef.HH
-	$(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h
-#--------------------------------------------------------------------
-
-install: libdrs.a
-	cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a
-	cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h
-	cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h
-#	install -f $(INSTALL_LIB) -m 644 libdrs.a
-#	install -f $(INSTALL_INC) -m 644 drsdef.h
-#	install -f $(INSTALL_INC) -m 644 drscdf.h
-
-#--------------------------------------------------------------------------
-
-# Miscellaneous junk
-
-tags:
-	etags $(SOURCES) $(INCLUDES)
-
-clean:
-	-rm -f *.o
-	-rm -f *~
-	-rm -f core
-
-.SUFFIXES: .F .o
-
-.F.o:
-	$(FC) $(FFLAGS) -c $<
diff --git a/CMake/cdat_modules_extra/lxml_build_step.cmake.in b/CMake/cdat_modules_extra/lxml_build_step.cmake.in
deleted file mode 100644
index dca0940b9..000000000
--- a/CMake/cdat_modules_extra/lxml_build_step.cmake.in
+++ /dev/null
@@ -1,19 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CXXFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CPPFLAGS}")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@LXML_SOURCE_DIR@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE LXML_OUT
-  OUTPUT_VARIABLE LXML_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}")
-  message(FATAL_ERROR "Error in config of LXML")
-endif()
-message("lxml build worked.")
-
diff --git a/CMake/cdat_modules_extra/lxml_install_step.cmake.in b/CMake/cdat_modules_extra/lxml_install_step.cmake.in
deleted file mode 100644
index 21651e44e..000000000
--- a/CMake/cdat_modules_extra/lxml_install_step.cmake.in
+++ /dev/null
@@ -1,14 +0,0 @@
-message("Installing LXML:\n@LXML_PREFIX_ARGS@")
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@LXML_BINARY_DIR@"
-  RESULT_VARIABLE res
-)
-
-if(NOT ${res} EQUAL 0)
-  message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}")
-  message(FATAL_ERROR "Error in config of LXML")
-endif()
-message("lxml install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in
deleted file mode 100644
index bb0102cf5..000000000
--- a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@matplotlib_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in
deleted file mode 100644
index 6c28091a3..000000000
--- a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-set(INSTALL_DIR "@cdat_EXTERNALS@")
-
-configure_file(
-  "@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/matplotlib_setup_cfg.in"
-  "@matplotlib_source_dir@/setup.cfg"
-  @ONLY
-)
-
-set(ENV{LD_LIBRARY_PATH} "${INSTALL_DIR}/lib;$ENV{LD_LIBRARY_PATH}")
diff --git a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in b/CMake/cdat_modules_extra/matplotlib_setup_cfg.in
deleted file mode 100644
index 5dc914fe2..000000000
--- a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in
+++ /dev/null
@@ -1,76 +0,0 @@
-# Rename this file to setup.cfg to modify matplotlib's
-# build options.
-
-[egg_info]
-
-[directories]
-# Uncomment to override the default basedir in setupext.py.
-# This can be a single directory or a space-delimited list of directories.
-# basedirlist = @INSTALL_DIR@
-
-[status]
-# To suppress display of the dependencies and their versions
-# at the top of the build log, uncomment the following line:
-#suppress = True
-#
-# Uncomment to insert lots of diagnostic prints in extension code
-#verbose = True
-
-[provide_packages]
-# By default, matplotlib checks for a few dependencies and
-# installs them if missing. This feature can be turned off
-# by uncommenting the following lines. Acceptible values are:
-#     True: install, overwrite an existing installation
-#     False: do not install
-#     auto: install only if the package is unavailable. This
-#           is the default behavior
-#
-## Date/timezone support:
-#pytz = False
-#dateutil = False
-
-[gui_support]
-# Matplotlib supports multiple GUI toolkits, including Cocoa,
-# GTK, Fltk, MacOSX, Qt, Qt4, Tk, and WX. Support for many of
-# these toolkits requires AGG, the Anti-Grain Geometry library,
-# which is provided by matplotlib and built by default.
-#
-# Some backends are written in pure Python, and others require
-# extension code to be compiled. By default, matplotlib checks
-# for these GUI toolkits during installation and, if present,
-# compiles the required extensions to support the toolkit. GTK
-# support requires the GTK runtime environment and PyGTK. Wx
-# support requires wxWidgets and wxPython. Tk support requires
-# Tk and Tkinter. The other GUI toolkits do not require any
-# extension code, and can be used as long as the libraries are
-# installed on your system.
-#
-# You can uncomment any the following lines if you know you do
-# not want to use the GUI toolkit. Acceptible values are:
-#     True: build the extension. Exits with a warning if the
-#           required dependencies are not available
-#     False: do not build the extension
-#     auto: build if the required dependencies are available,
-#           otherwise skip silently. This is the default
-#           behavior
-#
-gtk = False
-gtkagg = False
-tkagg = False
-macosx = False
-qt5agg = False
-
-[rc_options]
-# User-configurable options
-#
-# Default backend, one of: Agg, Cairo, CocoaAgg, GTK, GTKAgg, GTKCairo,
-# FltkAgg, MacOSX, Pdf, Ps, QtAgg, Qt4Agg, SVG, TkAgg, WX, WXAgg.
-#
-# The Agg, Ps, Pdf and SVG backends do not require external
-# dependencies. Do not choose GTK, GTKAgg, GTKCairo, MacOSX, TkAgg or WXAgg
-# if you have disabled the relevent extension modules.  Agg will be used
-# by default.
-#
-backend = @MATPLOTLIB_BACKEND@
-backend.qt4 = PyQt4
-#
diff --git a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in
deleted file mode 100644
index 582bbbf9e..000000000
--- a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing mpi4py:\n@mpi4py_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@mpi4py_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE mpi4py_OUT
-  OUTPUT_VARIABLE mpi4py_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}")
-  message(FATAL_ERROR "Error in config of mpi4py")
-endif()
-message("Mpi4py install succeeded.")
diff --git a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in
deleted file mode 100644
index 96f160201..000000000
--- a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building Mpi4py:\n@mpi4py_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY "@mpi4py_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE mpi4py_OUT
-  OUTPUT_VARIABLE mpi4py_ERR)
-
-if(NOT ${res} EQUAL 0)
-  message("Mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}")
-  message(FATAL_ERROR "Error in config of mpi4py")
-endif()
-message("mpi4py build worked.")
diff --git a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in b/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in
deleted file mode 100644
index e16a54148..000000000
--- a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-  execute_process(
-    WORKING_DIRECTORY @netcdf_source@
-    COMMAND patch -p1
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/netcdf_clang.patch
-  )
-
diff --git a/CMake/cdat_modules_extra/paraview_download.sh.in b/CMake/cdat_modules_extra/paraview_download.sh.in
deleted file mode 100755
index dee9d7f79..000000000
--- a/CMake/cdat_modules_extra/paraview_download.sh.in
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/bin/sh
-
-cd @CMAKE_CURRENT_BINARY_DIR@/build/
-
-"@GIT_EXECUTABLE@" clone @PARAVIEW_SOURCE@
-cd ParaView
-"@GIT_EXECUTABLE@" checkout @paraview_branch@
-"@GIT_EXECUTABLE@" submodule init
-
-SUBMODULES=`git submodule status | sed 's/.* //' | sed ':a;N;$!ba;s/\n/ /g'`
-
-for SUBMODULE in $SUBMODULES
-do 
-  tmp=`git config  --get submodule.$SUBMODULE.url`
-  tmp=`echo $tmp | sed 's/@REPLACE_GIT_PROTOCOL_PREFIX@/@GIT_PROTOCOL_PREFIX@/g'`
-  git config "submodule.$SUBMODULE.url" $tmp
-done
-
-"@GIT_EXECUTABLE@" submodule update --recursive
diff --git a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in b/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in
deleted file mode 100644
index aafa3a971..000000000
--- a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in
+++ /dev/null
@@ -1,25 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_flags@ @cdat_external_include_directories@")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cppflags@ @cdat_external_include_directories@")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cxxflags@")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @ParaView_binary@/Utilities/VTKPythonWrapping
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in VTK Python Install")
-endif()
-
-message("Install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in b/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in
deleted file mode 100644
index 09cbc2ad8..000000000
--- a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in
+++ /dev/null
@@ -1,9 +0,0 @@
-
-set(EXTERNALS @pbmplus_install@)
-configure_file(@cdat_external_patch_dir@/src/pbmplus/Makefile.in
-  @pbmplus_source@/Makefile
-  @ONLY)
-
-configure_file(@cdat_external_patch_dir@/src/pbmplus/pnm/Makefile.in
-  ${pbmplus_source}/pnm/Makefile
-  @ONLY)
diff --git a/CMake/cdat_modules_extra/pmw_install_step.cmake.in b/CMake/cdat_modules_extra/pmw_install_step.cmake.in
deleted file mode 100644
index 769aa7454..000000000
--- a/CMake/cdat_modules_extra/pmw_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @Pmw_source@/src
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Install")
-endif()
-message("Install succeeded.")
diff --git a/CMake/cdat_modules_extra/pmw_make_step.cmake.in b/CMake/cdat_modules_extra/pmw_make_step.cmake.in
deleted file mode 100644
index a1d3f9759..000000000
--- a/CMake/cdat_modules_extra/pmw_make_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @Pmw_source@/src
-  OUTPUT_VARIABLE CDAT_OUT
-  ERROR_VARIABLE CDAT_ERR
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in Make")
-endif()
-message("Make succeeded.")
diff --git a/CMake/cdat_modules_extra/predownload.py.in b/CMake/cdat_modules_extra/predownload.py.in
deleted file mode 100755
index 3a3af9104..000000000
--- a/CMake/cdat_modules_extra/predownload.py.in
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/python
-
-import shlex
-import subprocess
-import urllib2
-import os
-
-fileName = "@PARTS_BUILT_INFO@"
-fetched_data = "@cdat_BINARY_DIR@/fetched_for_offline"
-try:
-    os.makedirs(fetched_data)
-except:
-    pass
-try:
-    os.makedirs(os.path.join(fetched_data,"contrib"))
-except:
-    pass
-
-
-def fetch(url,md5=None):
-    try:
-        import hashlib
-        HAS_HASHLIB=True
-    except:
-        HAS_HASHLIB=False
-    if md5 is None:
-        HAS_HASHLIB=False
-
-    print "Fetching: ",url
-    if HAS_HASHLIB:
-        print "Will control md5"
-    u = urllib2.urlopen(url)
-    nm = os.path.join(fetched_data,url.split("/")[-1])
-    f=open(nm,'w')
-    sz = 65536
-    if HAS_HASHLIB:
-        hsh =hashlib.md5()
-    buf = u.read(sz)
-    while len(buf)>0:
-        f.write(buf)
-        if HAS_HASHLIB:
-            hsh.update(buf)
-        buf=u.read(sz)
-    f.close()
-    if HAS_HASHLIB and hsh.hexdigest()!=md5:
-        raise Exception,"Error downloading file: %s, md5 does not match" % nm
-
-def processFile(name):
-    f=open(name)
-    for ln in f.xreadlines():
-        sp = ln.split()
-        nm = sp[0]
-        ver = sp[1]
-        try:
-            url = sp[2]
-        except:
-            url = None
-        try:
-            md5 = sp[3]
-        except:
-            md5 = None
-        try:
-            url2 = sp[4]
-        except:
-            url2 = None
-        try:
-            md5b = sp[5]
-        except:
-            md5b = None
-        if url=="N/A":
-            continue
-        elif url.find("git://")>-1 or url.strip()[-4:]==".git":
-            if md5 is None:
-                md5 = "master"
-            nm  = url.split("/")[-1][:-4]
-            cmd = "git clone --depth 1 -b %s %s %s/%s " % (md5,url,fetched_data,nm)
-            subprocess.Popen(shlex.split(cmd))
-        elif url is not None:
-            fetch(url,md5)
-        if url2 is not None:
-            fetch(url2,md5b)
-    ## Ok now does the git submodules
-    for c in ["eof2","windfield","sciMake","windspharm","eofs"]:
-        cmd = "cp -rf @cdat_SOURCE_DIR@/contrib/%s %s/contrib" % (c,fetched_data)
-        subprocess.Popen(shlex.split(cmd))
-if __name__ == "__main__":
-    processFile(fileName)
-
diff --git a/CMake/cdat_modules_extra/preofflinebuild.sh.in b/CMake/cdat_modules_extra/preofflinebuild.sh.in
deleted file mode 100755
index b42dacfde..000000000
--- a/CMake/cdat_modules_extra/preofflinebuild.sh.in
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-fetched_data="fetched_for_offline"
-cp ${fetched_data}/*gz ${fetched_data}/*bz2 ${fetched_data}/*zip @cdat_BINARY_DIR@
-cp -r ${fetched_data}/contrib @cdat_SOURCE_DIR@
-cp -rf ${fetched_data}/vistrails @CMAKE_INSTALL_PREFIX@
-cp -rf ${fetched_data}/paraview-*/* @cdat_BINARY_DIR@/build/ParaView
-tar -xf @cdat_BINARY_DIR@/visit*.gz -C @cdat_BINARY_DIR@
-rm -rf @cdat_BINARY_DIR@/build/VisIt
-mv @cdat_BINARY_DIR@/src @cdat_BINARY_DIR@/build/VisIt
-
diff --git a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in
deleted file mode 100644
index 910bef7e2..000000000
--- a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @PyOpenGL_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in
deleted file mode 100644
index 41fe74e84..000000000
--- a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @PyOpenGL_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in b/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in
deleted file mode 100644
index 54bf52d5f..000000000
--- a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-  execute_process(
-    WORKING_DIRECTORY @pyspharm_source@
-    COMMAND patch 
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/pyspharm_setup.patch
-  )
-
diff --git a/CMake/cdat_modules_extra/python_configure_step.cmake.in b/CMake/cdat_modules_extra/python_configure_step.cmake.in
deleted file mode 100644
index 27f653275..000000000
--- a/CMake/cdat_modules_extra/python_configure_step.cmake.in
+++ /dev/null
@@ -1,42 +0,0 @@
-CMAKE_POLICY(SET CMP0012 NEW)
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
-  if(@_CURRENT_OSX_SDK_VERSION@ VERSION_LESS "10.11")
-    set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --with-system-expat --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks)
-  else()
-    set(library_param --prefix=@CMAKE_INSTALL_PREFIX@  --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks)
-  endif()
-elseif(UNIX)
-  set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-shared --enable-unicode=ucs4)
-endif()
-
-EXECUTE_PROCESS(
-  COMMAND sh configure ${library_param}
-  WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
diff --git a/CMake/cdat_modules_extra/python_install_step.cmake.in b/CMake/cdat_modules_extra/python_install_step.cmake.in
deleted file mode 100644
index 74a63d181..000000000
--- a/CMake/cdat_modules_extra/python_install_step.cmake.in
+++ /dev/null
@@ -1,51 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-# During install for what ever reason python will fail if these are set.
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-if(NOT APPLE)
-  set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-endif()
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib  @cdat_osx_ld_flags@")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
- 
-  EXECUTE_PROCESS(
-    COMMAND make frameworkinstallunixtools
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-  EXECUTE_PROCESS(
-    COMMAND make frameworkinstall
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-else()
-
-  EXECUTE_PROCESS(
-    COMMAND make install
-    WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
-
-endif()
diff --git a/CMake/cdat_modules_extra/python_make_step.cmake.in b/CMake/cdat_modules_extra/python_make_step.cmake.in
deleted file mode 100644
index 674463f89..000000000
--- a/CMake/cdat_modules_extra/python_make_step.cmake.in
+++ /dev/null
@@ -1,34 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}")
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@  @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib  @cdat_osx_ld_flags@ $ENV{LDFLAGS}")
-set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@  ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}")
-set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}")
-set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}")
-set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig")
-set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@")
-set(ENV{FC} "")
-set(ENV{FCFLAGS} "")
-set(ENV{FCLIBS} "")
-set(ENV{F77} "")
-set(ENV{FFLAGS} "")
-set(ENV{FLIBS} "")
-set(ENV{LD_X11} "") # for xgks
-set(ENV{OPT} "")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-if(APPLE)
-  set(ENV{AQUA_CDAT} "no")
-  set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@")
-  unset(ENV{MAKEFLAGS})
-endif()
-
-EXECUTE_PROCESS( 
-  COMMAND make
-  WORKING_DIRECTORY "@python_SOURCE_DIR@"
-  )
diff --git a/CMake/cdat_modules_extra/python_patch_step.cmake.in b/CMake/cdat_modules_extra/python_patch_step.cmake.in
deleted file mode 100644
index ff2843efb..000000000
--- a/CMake/cdat_modules_extra/python_patch_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/setup.py"
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "setup.py replaced")
-else()
-    message(FATAL_ERROR "Replacing setup.py failed: ${errcode}")
-endif()
-
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/site-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/Lib/site.py"
-  RESULT_VARIABLE errcode
-)
-if("${errcode}" STREQUAL "0")
-    message(STATUS "site.py replaced")
-else()
-    message(FATAL_ERROR "Replacing site.py failed: ${errcode}")
-endif()
diff --git a/CMake/cdat_modules_extra/python_setup.py.in b/CMake/cdat_modules_extra/python_setup.py.in
deleted file mode 100644
index 106853088..000000000
--- a/CMake/cdat_modules_extra/python_setup.py.in
+++ /dev/null
@@ -1,1918 +0,0 @@
-# Autodetecting setup.py script for building the Python extensions
-#
-
-__version__ = "$Revision: 78785 $"
-
-import sys, os, imp, re, optparse
-from glob import glob
-from platform import machine as platform_machine
-
-from distutils import log
-from distutils import sysconfig
-from distutils import text_file
-from distutils.errors import *
-from distutils.core import Extension, setup
-from distutils.command.build_ext import build_ext
-from distutils.command.install import install
-from distutils.command.install_lib import install_lib
-
-# This global variable is used to hold the list of modules to be disabled.
-disabled_module_list = []
-
-def add_dir_to_list(dirlist, dir):
-    """Add the directory 'dir' to the list 'dirlist' (at the front) if
-    1) 'dir' is not already in 'dirlist'
-    2) 'dir' actually exists, and is a directory."""
-    if dir is not None and os.path.isdir(dir) and dir not in dirlist:
-        dirlist.insert(0, dir)
-
-def find_file(filename, std_dirs, paths):
-    """Searches for the directory where a given file is located,
-    and returns a possibly-empty list of additional directories, or None
-    if the file couldn't be found at all.
-
-    'filename' is the name of a file, such as readline.h or libcrypto.a.
-    'std_dirs' is the list of standard system directories; if the
-        file is found in one of them, no additional directives are needed.
-    'paths' is a list of additional locations to check; if the file is
-        found in one of them, the resulting list will contain the directory.
-    """
-
-    # Check the standard locations
-    for dir in std_dirs:
-        f = os.path.join(dir, filename)
-        if os.path.exists(f): return []
-
-    # Check the additional directories
-    for dir in paths:
-        f = os.path.join(dir, filename)
-        if os.path.exists(f):
-            return [dir]
-
-    # Not found anywhere
-    return None
-
-def find_library_file(compiler, libname, std_dirs, paths):
-    result = compiler.find_library_file(std_dirs + paths, libname)
-    if result is None:
-        return None
-
-    # Check whether the found file is in one of the standard directories
-    dirname = os.path.dirname(result)
-    for p in std_dirs:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-        if p == dirname:
-            return [ ]
-
-    # Otherwise, it must have been in one of the additional directories,
-    # so we have to figure out which one.
-    for p in paths:
-        # Ensure path doesn't end with path separator
-        p = p.rstrip(os.sep)
-        if p == dirname:
-            return [p]
-    else:
-        assert False, "Internal error: Path not found in std_dirs or paths"
-
-def module_enabled(extlist, modname):
-    """Returns whether the module 'modname' is present in the list
-    of extensions 'extlist'."""
-    extlist = [ext for ext in extlist if ext.name == modname]
-    return len(extlist)
-
-def find_module_file(module, dirlist):
-    """Find a module in a set of possible folders. If it is not found
-    return the unadorned filename"""
-    list = find_file(module, [], dirlist)
-    if not list:
-        return module
-    if len(list) > 1:
-        log.info("WARNING: multiple copies of %s found"%module)
-    return os.path.join(list[0], module)
-
-class PyBuildExt(build_ext):
-
-    def __init__(self, dist):
-        build_ext.__init__(self, dist)
-        self.failed = []
-
-    def build_extensions(self):
-
-        # Detect which modules should be compiled
-        missing = self.detect_modules()
-
-        # Remove modules that are present on the disabled list
-        extensions = [ext for ext in self.extensions
-                      if ext.name not in disabled_module_list]
-        # move ctypes to the end, it depends on other modules
-        ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
-        if "_ctypes" in ext_map:
-            ctypes = extensions.pop(ext_map["_ctypes"])
-            extensions.append(ctypes)
-        self.extensions = extensions
-
-        # Fix up the autodetected modules, prefixing all the source files
-        # with Modules/ and adding Python's include directory to the path.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        if not srcdir:
-            # Maybe running on Windows but not using CYGWIN?
-            raise ValueError("No source directory; cannot proceed.")
-
-        # Figure out the location of the source code for extension modules
-        # (This logic is copied in distutils.test.test_sysconfig,
-        # so building in a separate directory does not break test_distutils.)
-        moddir = os.path.join(os.getcwd(), srcdir, 'Modules')
-        moddir = os.path.normpath(moddir)
-        srcdir, tail = os.path.split(moddir)
-        srcdir = os.path.normpath(srcdir)
-        moddir = os.path.normpath(moddir)
-
-        moddirlist = [moddir]
-        incdirlist = ['./Include']
-
-        # Platform-dependent module source and include directories
-        platform = self.get_platform()
-        if platform in ('darwin', 'mac') and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X also includes some mac-specific modules
-            macmoddir = os.path.join(os.getcwd(), srcdir, 'Mac/Modules')
-            moddirlist.append(macmoddir)
-            incdirlist.append('./Mac/Include')
-
-        alldirlist = moddirlist + incdirlist
-
-        # Fix up the paths for scripts, too
-        self.distribution.scripts = [os.path.join(srcdir, filename)
-                                     for filename in self.distribution.scripts]
-
-        # Python header files
-        headers = glob("Include/*.h") + ["pyconfig.h"]
-
-        for ext in self.extensions[:]:
-            ext.sources = [ find_module_file(filename, moddirlist)
-                            for filename in ext.sources ]
-            if ext.depends is not None:
-                ext.depends = [find_module_file(filename, alldirlist)
-                               for filename in ext.depends]
-            else:
-                ext.depends = []
-            # re-compile extensions if a header file has been changed
-            ext.depends.extend(headers)
-
-            ext.include_dirs.append( '.' ) # to get config.h
-            for incdir in incdirlist:
-                ext.include_dirs.append( os.path.join(srcdir, incdir) )
-
-            # If a module has already been built statically,
-            # don't build it here
-            if ext.name in sys.builtin_module_names:
-                self.extensions.remove(ext)
-
-        if platform != 'mac':
-            # Parse Modules/Setup and Modules/Setup.local to figure out which
-            # modules are turned on in the file.
-            remove_modules = []
-            for filename in ('Modules/Setup', 'Modules/Setup.local'):
-                input = text_file.TextFile(filename, join_lines=1)
-                while 1:
-                    line = input.readline()
-                    if not line: break
-                    line = line.split()
-                    remove_modules.append(line[0])
-                input.close()
-
-            for ext in self.extensions[:]:
-                if ext.name in remove_modules:
-                    self.extensions.remove(ext)
-
-        # When you run "make CC=altcc" or something similar, you really want
-        # those environment variables passed into the setup.py phase.  Here's
-        # a small set of useful ones.
-        compiler = os.environ.get('CC')
-        args = {}
-        # unfortunately, distutils doesn't let us provide separate C and C++
-        # compilers
-        if compiler is not None:
-            (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
-            args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
-        self.compiler.set_executables(**args)
-
-        build_ext.build_extensions(self)
-
-        longest = max([len(e.name) for e in self.extensions])
-        if self.failed:
-            longest = max(longest, max([len(name) for name in self.failed]))
-
-        def print_three_column(lst):
-            lst.sort(key=str.lower)
-            # guarantee zip() doesn't drop anything
-            while len(lst) % 3:
-                lst.append("")
-            for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
-                print "%-*s   %-*s   %-*s" % (longest, e, longest, f,
-                                              longest, g)
-
-        if missing:
-            print
-            print "Failed to find the necessary bits to build these modules:"
-            print_three_column(missing)
-            print ("To find the necessary bits, look in setup.py in"
-                   " detect_modules() for the module's name.")
-            print
-
-        if self.failed:
-            failed = self.failed[:]
-            print
-            print "Failed to build these modules:"
-            print_three_column(failed)
-            print
-
-    def build_extension(self, ext):
-
-        if ext.name == '_ctypes':
-            if not self.configure_ctypes(ext):
-                return
-
-        try:
-            build_ext.build_extension(self, ext)
-        except (CCompilerError, DistutilsError), why:
-            self.announce('WARNING: building of extension "%s" failed: %s' %
-                          (ext.name, sys.exc_info()[1]))
-            self.failed.append(ext.name)
-            return
-        # Workaround for Mac OS X: The Carbon-based modules cannot be
-        # reliably imported into a command-line Python
-        if 'Carbon' in ext.extra_link_args:
-            self.announce(
-                'WARNING: skipping import check for Carbon-based "%s"' %
-                ext.name)
-            return
-
-        if self.get_platform() == 'darwin' and (
-                sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
-            # Don't bother doing an import check when an extension was
-            # build with an explicit '-arch' flag on OSX. That's currently
-            # only used to build 32-bit only extensions in a 4-way
-            # universal build and loading 32-bit code into a 64-bit
-            # process will fail.
-            self.announce(
-                'WARNING: skipping import check for "%s"' %
-                ext.name)
-            return
-
-        # Workaround for Cygwin: Cygwin currently has fork issues when many
-        # modules have been imported
-        if self.get_platform() == 'cygwin':
-            self.announce('WARNING: skipping import check for Cygwin-based "%s"'
-                % ext.name)
-            return
-        ext_filename = os.path.join(
-            self.build_lib,
-            self.get_ext_filename(self.get_ext_fullname(ext.name)))
-        try:
-            imp.load_dynamic(ext.name, ext_filename)
-        except ImportError, why:
-            self.failed.append(ext.name)
-            self.announce('*** WARNING: renaming "%s" since importing it'
-                          ' failed: %s' % (ext.name, why), level=3)
-            assert not self.inplace
-            basename, tail = os.path.splitext(ext_filename)
-            newname = basename + "_failed" + tail
-            if os.path.exists(newname):
-                os.remove(newname)
-            os.rename(ext_filename, newname)
-
-            # XXX -- This relies on a Vile HACK in
-            # distutils.command.build_ext.build_extension().  The
-            # _built_objects attribute is stored there strictly for
-            # use here.
-            # If there is a failure, _built_objects may not be there,
-            # so catch the AttributeError and move on.
-            try:
-                for filename in self._built_objects:
-                    os.remove(filename)
-            except AttributeError:
-                self.announce('unable to remove files (ignored)')
-        except:
-            exc_type, why, tb = sys.exc_info()
-            self.announce('*** WARNING: importing extension "%s" '
-                          'failed with %s: %s' % (ext.name, exc_type, why),
-                          level=3)
-            self.failed.append(ext.name)
-
-    def get_platform(self):
-        # Get value of sys.platform
-        for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
-            if sys.platform.startswith(platform):
-                return platform
-        return sys.platform
-
-    def detect_modules(self):
-        # Ensure that /usr/local is always used
-        add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
-        add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
-
-        # Add paths specified in the environment variables LDFLAGS and
-        # CPPFLAGS for header and library files.
-        # We must get the values from the Makefile and not the environment
-        # directly since an inconsistently reproducible issue comes up where
-        # the environment variable is not set even though the value were passed
-        # into configure and stored in the Makefile (issue found on OS X 10.3).
-        for env_var, arg_name, dir_list in (
-                ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
-                ('LDFLAGS', '-L', self.compiler.library_dirs),
-                ('CPPFLAGS', '-I', self.compiler.include_dirs)):
-            env_val = sysconfig.get_config_var(env_var)
-            if env_val:
-                # To prevent optparse from raising an exception about any
-                # options in env_val that it doesn't know about we strip out
-                # all double dashes and any dashes followed by a character
-                # that is not for the option we are dealing with.
-                #
-                # Please note that order of the regex is important!  We must
-                # strip out double-dashes first so that we don't end up with
-                # substituting "--Long" to "-Long" and thus lead to "ong" being
-                # used for a library directory.
-                env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
-                                 ' ', env_val)
-                parser = optparse.OptionParser()
-                # Make sure that allowing args interspersed with options is
-                # allowed
-                parser.allow_interspersed_args = True
-                parser.error = lambda msg: None
-                parser.add_option(arg_name, dest="dirs", action="append")
-                options = parser.parse_args(env_val.split())[0]
-                if options.dirs:
-                    for directory in reversed(options.dirs):
-                        add_dir_to_list(dir_list, directory)
-
-        if os.path.normpath(sys.prefix) != '/usr':
-            add_dir_to_list(self.compiler.library_dirs,
-                            sysconfig.get_config_var("LIBDIR"))
-            add_dir_to_list(self.compiler.include_dirs,
-                            sysconfig.get_config_var("INCLUDEDIR"))
-
-        try:
-            have_unicode = unicode
-        except NameError:
-            have_unicode = 0
-
-        # lib_dirs and inc_dirs are used to search for files;
-        # if a file is found in one of those directories, it can
-        # be assumed that no additional -I,-L directives are needed.
-        lib_dirs = self.compiler.library_dirs + [
-            '/lib64', '/usr/lib64',
-            '/lib', '/usr/lib',
-            ]
-        inc_dirs = self.compiler.include_dirs + ['/usr/include']
-        exts = []
-        missing = []
-
-        config_h = sysconfig.get_config_h_filename()
-        config_h_vars = sysconfig.parse_config_h(open(config_h))
-
-        platform = self.get_platform()
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-
-        # Check for AtheOS which has libraries in non-standard locations
-        if platform == 'atheos':
-            lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
-            lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
-            inc_dirs += ['/system/include', '/atheos/autolnk/include']
-            inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
-
-        # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
-        if platform in ['osf1', 'unixware7', 'openunix8']:
-            lib_dirs += ['/usr/ccs/lib']
-
-        if platform == 'darwin':
-            # This should work on any unixy platform ;-)
-            # If the user has bothered specifying additional -I and -L flags
-            # in OPT and LDFLAGS we might as well use them here.
-            #   NOTE: using shlex.split would technically be more correct, but
-            # also gives a bootstrap problem. Let's hope nobody uses directories
-            # with whitespace in the name to store libraries.
-            cflags, ldflags = sysconfig.get_config_vars(
-                    'CFLAGS', 'LDFLAGS')
-            for item in cflags.split():
-                if item.startswith('-I'):
-                    inc_dirs.append(item[2:])
-
-            for item in ldflags.split():
-                if item.startswith('-L'):
-                    lib_dirs.append(item[2:])
-
-        # Check for MacOS X, which doesn't need libm.a at all
-        math_libs = ['m']
-        if platform in ['darwin', 'beos', 'mac']:
-            math_libs = []
-
-        # XXX Omitted modules: gl, pure, dl, SGI-specific modules
-
-        #
-        # The following modules are all pretty straightforward, and compile
-        # on pretty much any POSIXish platform.
-        #
-
-        # Some modules that are normally always on:
-        exts.append( Extension('_weakref', ['_weakref.c']) )
-
-        # array objects
-        exts.append( Extension('array', ['arraymodule.c']) )
-        # complex math library functions
-        exts.append( Extension('cmath', ['cmathmodule.c'],
-                               libraries=math_libs) )
-
-        # math library functions, e.g. sin()
-        exts.append( Extension('math',  ['mathmodule.c'],
-                               libraries=math_libs) )
-        # fast string operations implemented in C
-        exts.append( Extension('strop', ['stropmodule.c']) )
-        # time operations and variables
-        exts.append( Extension('time', ['timemodule.c'],
-                               libraries=math_libs) )
-        exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
-                               libraries=math_libs) )
-        # fast iterator tools implemented in C
-        exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
-        # code that will be builtins in the future, but conflict with the
-        #  current builtins
-        exts.append( Extension('future_builtins', ['future_builtins.c']) )
-        # random number generator implemented in C
-        exts.append( Extension("_random", ["_randommodule.c"]) )
-        # high-performance collections
-        exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
-        # bisect
-        exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
-        # heapq
-        exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
-        # operator.add() and similar goodies
-        exts.append( Extension('operator', ['operator.c']) )
-        # Python 3.0 _fileio module
-        exts.append( Extension("_fileio", ["_fileio.c"]) )
-        # Python 3.0 _bytesio module
-        exts.append( Extension("_bytesio", ["_bytesio.c"]) )
-        # _functools
-        exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
-        # _json speedups
-        exts.append( Extension("_json", ["_json.c"]) )
-        # Python C API test module
-        exts.append( Extension('_testcapi', ['_testcapimodule.c'],
-                               depends=['testcapi_long.h']) )
-        # profilers (_lsprof is for cProfile.py)
-        exts.append( Extension('_hotshot', ['_hotshot.c']) )
-        exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
-        # static Unicode character database
-        if have_unicode:
-            exts.append( Extension('unicodedata', ['unicodedata.c']) )
-        else:
-            missing.append('unicodedata')
-        # access to ISO C locale support
-        data = open('pyconfig.h').read()
-        m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
-        if m is not None:
-            locale_libs = ['intl']
-        else:
-            locale_libs = []
-        if platform == 'darwin':
-            locale_extra_link_args = ['-framework', 'CoreFoundation']
-        else:
-            locale_extra_link_args = []
-
-
-        exts.append( Extension('_locale', ['_localemodule.c'],
-                               libraries=locale_libs,
-                               extra_link_args=locale_extra_link_args) )
-
-        # Modules with some UNIX dependencies -- on by default:
-        # (If you have a really backward UNIX, select and socket may not be
-        # supported...)
-
-        # fcntl(2) and ioctl(2)
-        exts.append( Extension('fcntl', ['fcntlmodule.c']) )
-        if platform not in ['mac']:
-            # pwd(3)
-            exts.append( Extension('pwd', ['pwdmodule.c']) )
-            # grp(3)
-            exts.append( Extension('grp', ['grpmodule.c']) )
-            # spwd, shadow passwords
-            if (config_h_vars.get('HAVE_GETSPNAM', False) or
-                    config_h_vars.get('HAVE_GETSPENT', False)):
-                exts.append( Extension('spwd', ['spwdmodule.c']) )
-            else:
-                missing.append('spwd')
-        else:
-            missing.extend(['pwd', 'grp', 'spwd'])
-
-        # select(2); not on ancient System V
-        exts.append( Extension('select', ['selectmodule.c']) )
-
-        # Fred Drake's interface to the Python parser
-        exts.append( Extension('parser', ['parsermodule.c']) )
-
-        # cStringIO and cPickle
-        exts.append( Extension('cStringIO', ['cStringIO.c']) )
-        exts.append( Extension('cPickle', ['cPickle.c']) )
-
-        # Memory-mapped files (also works on Win32).
-        if platform not in ['atheos', 'mac']:
-            exts.append( Extension('mmap', ['mmapmodule.c']) )
-        else:
-            missing.append('mmap')
-
-        # Lance Ellinghaus's syslog module
-        if platform not in ['mac']:
-            # syslog daemon interface
-            exts.append( Extension('syslog', ['syslogmodule.c']) )
-        else:
-            missing.append('syslog')
-
-        # George Neville-Neil's timing module:
-        # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
-        # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
-        #exts.append( Extension('timing', ['timingmodule.c']) )
-
-        #
-        # Here ends the simple stuff.  From here on, modules need certain
-        # libraries, are platform-specific, or present other surprises.
-        #
-
-        # Multimedia modules
-        # These don't work for 64-bit platforms!!!
-        # These represent audio samples or images as strings:
-
-        # Operations on audio samples
-        # According to #993173, this one should actually work fine on
-        # 64-bit platforms.
-        exts.append( Extension('audioop', ['audioop.c']) )
-
-        # Disabled on 64-bit platforms
-        if sys.maxint != 9223372036854775807L:
-            # Operations on images
-            exts.append( Extension('imageop', ['imageop.c']) )
-        else:
-            missing.extend(['imageop'])
-
-        # readline
-        do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
-        if platform == 'darwin':
-            os_release = int(os.uname()[2].split('.')[0])
-            dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
-            if dep_target and dep_target.split('.') < ['10', '5']:
-                os_release = 8
-            if os_release < 9:
-                # MacOSX 10.4 has a broken readline. Don't try to build
-                # the readline module unless the user has installed a fixed
-                # readline package
-                if find_file('readline/rlconf.h', inc_dirs, []) is None:
-                    do_readline = False
-        if do_readline:
-            if platform == 'darwin' and os_release < 9:
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom readline gets picked up
-                # before the (broken) dynamic library in /usr/lib.
-                readline_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                readline_extra_link_args = ()
-
-            readline_libs = ['readline']
-            if self.compiler.find_library_file(lib_dirs,
-                                                 'ncursesw'):
-                readline_libs.append('ncursesw')
-            elif self.compiler.find_library_file(lib_dirs,
-                                                 'ncurses'):
-                readline_libs.append('ncurses')
-            elif self.compiler.find_library_file(lib_dirs, 'curses'):
-                readline_libs.append('curses')
-            elif self.compiler.find_library_file(lib_dirs +
-                                               ['/usr/lib/termcap'],
-                                               'termcap'):
-                readline_libs.append('termcap')
-            exts.append( Extension('readline', ['readline.c'],
-                                   library_dirs=['/usr/lib/termcap'],
-                                   extra_link_args=readline_extra_link_args,
-                                   libraries=readline_libs) )
-        else:
-            missing.append('readline')
-
-        if platform not in ['mac']:
-            # crypt module.
-
-            if self.compiler.find_library_file(lib_dirs, 'crypt'):
-                libs = ['crypt']
-            else:
-                libs = []
-            exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
-        else:
-            missing.append('crypt')
-
-        # CSV files
-        exts.append( Extension('_csv', ['_csv.c']) )
-
-        # socket(2)
-        exts.append( Extension('_socket', ['socketmodule.c'],
-                               depends = ['socketmodule.h']) )
-        # Detect SSL support for the socket module (via _ssl)
-        search_for_ssl_incs_in = [
-                              '/usr/local/ssl/include',
-                              '/usr/contrib/ssl/include/'
-                             ]
-        ssl_incs = find_file('openssl/ssl.h', inc_dirs,
-                             search_for_ssl_incs_in
-                             )
-        if ssl_incs is not None:
-            krb5_h = find_file('krb5.h', inc_dirs,
-                               ['/usr/kerberos/include'])
-            if krb5_h:
-                ssl_incs += krb5_h
-        ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
-                                     ['/usr/local/ssl/lib',
-                                      '/usr/contrib/ssl/lib/'
-                                     ] )
-
-        if (ssl_incs is not None and
-            ssl_libs is not None):
-            exts.append( Extension('_ssl', ['_ssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto'],
-                                   depends = ['socketmodule.h']), )
-        else:
-            missing.append('_ssl')
-
-        # find out which version of OpenSSL we have
-        openssl_ver = 0
-        openssl_ver_re = re.compile(
-            '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
-        for ssl_inc_dir in inc_dirs + search_for_ssl_incs_in:
-            name = os.path.join(ssl_inc_dir, 'openssl', 'opensslv.h')
-            if os.path.isfile(name):
-                try:
-                    incfile = open(name, 'r')
-                    for line in incfile:
-                        m = openssl_ver_re.match(line)
-                        if m:
-                            openssl_ver = eval(m.group(1))
-                            break
-                except IOError:
-                    pass
-
-            # first version found is what we'll use (as the compiler should)
-            if openssl_ver:
-                break
-
-        #print 'openssl_ver = 0x%08x' % openssl_ver
-
-        if (ssl_incs is not None and
-            ssl_libs is not None and
-            openssl_ver >= 0x00907000):
-            # The _hashlib module wraps optimized implementations
-            # of hash functions from the OpenSSL library.
-            exts.append( Extension('_hashlib', ['_hashopenssl.c'],
-                                   include_dirs = ssl_incs,
-                                   library_dirs = ssl_libs,
-                                   libraries = ['ssl', 'crypto']) )
-            # these aren't strictly missing since they are unneeded.
-            #missing.extend(['_sha', '_md5'])
-        else:
-            # The _sha module implements the SHA1 hash algorithm.
-            exts.append( Extension('_sha', ['shamodule.c']) )
-            # The _md5 module implements the RSA Data Security, Inc. MD5
-            # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5.c and md5.h are included here.
-            exts.append( Extension('_md5',
-                            sources = ['md5module.c', 'md5.c'],
-                            depends = ['md5.h']) )
-            missing.append('_hashlib')
-
-        if (openssl_ver < 0x00908000):
-            # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
-            exts.append( Extension('_sha256', ['sha256module.c']) )
-            exts.append( Extension('_sha512', ['sha512module.c']) )
-
-        # Modules that provide persistent dictionary-like semantics.  You will
-        # probably want to arrange for at least one of them to be available on
-        # your machine, though none are defined by default because of library
-        # dependencies.  The Python module anydbm.py provides an
-        # implementation independent wrapper for these; dumbdbm.py provides
-        # similar functionality (but slower of course) implemented in Python.
-
-        # Sleepycat^WOracle Berkeley DB interface.
-        #  http://www.oracle.com/database/berkeley-db/db/index.html
-        #
-        # This requires the Sleepycat^WOracle DB code. The supported versions
-        # are set below.  Visit the URL above to download
-        # a release.  Most open source OSes come with one or more
-        # versions of BerkeleyDB already installed.
-
-        max_db_ver = (4, 7)
-        min_db_ver = (3, 3)
-        db_setup_debug = False   # verbose debug prints from this script?
-
-        def allow_db_ver(db_ver):
-            """Returns a boolean if the given BerkeleyDB version is acceptable.
-
-            Args:
-              db_ver: A tuple of the version to verify.
-            """
-            if not (min_db_ver <= db_ver <= max_db_ver):
-                return False
-            # Use this function to filter out known bad configurations.
-            if (4, 6) == db_ver[:2]:
-                # BerkeleyDB 4.6.x is not stable on many architectures.
-                arch = platform_machine()
-                if arch not in ('i386', 'i486', 'i586', 'i686',
-                                'x86_64', 'ia64'):
-                    return False
-            return True
-
-        def gen_db_minor_ver_nums(major):
-            if major == 4:
-                for x in range(max_db_ver[1]+1):
-                    if allow_db_ver((4, x)):
-                        yield x
-            elif major == 3:
-                for x in (3,):
-                    if allow_db_ver((3, x)):
-                        yield x
-            else:
-                raise ValueError("unknown major BerkeleyDB version", major)
-
-        # construct a list of paths to look for the header file in on
-        # top of the normal inc_dirs.
-        db_inc_paths = [
-            '/usr/include/db4',
-            '/usr/local/include/db4',
-            '/opt/sfw/include/db4',
-            '/usr/include/db3',
-            '/usr/local/include/db3',
-            '/opt/sfw/include/db3',
-            # Fink defaults (http://fink.sourceforge.net/)
-            '/sw/include/db4',
-            '/sw/include/db3',
-        ]
-        # 4.x minor number specific paths
-        for x in gen_db_minor_ver_nums(4):
-            db_inc_paths.append('/usr/include/db4%d' % x)
-            db_inc_paths.append('/usr/include/db4.%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db4%d' % x)
-            db_inc_paths.append('/pkg/db-4.%d/include' % x)
-            db_inc_paths.append('/opt/db-4.%d/include' % x)
-            # MacPorts default (http://www.macports.org/)
-            db_inc_paths.append('/opt/local/include/db4%d' % x)
-        # 3.x minor number specific paths
-        for x in gen_db_minor_ver_nums(3):
-            db_inc_paths.append('/usr/include/db3%d' % x)
-            db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
-            db_inc_paths.append('/usr/local/include/db3%d' % x)
-            db_inc_paths.append('/pkg/db-3.%d/include' % x)
-            db_inc_paths.append('/opt/db-3.%d/include' % x)
-
-        # Add some common subdirectories for Sleepycat DB to the list,
-        # based on the standard include directories. This way DB3/4 gets
-        # picked up when it is installed in a non-standard prefix and
-        # the user has added that prefix into inc_dirs.
-        std_variants = []
-        for dn in inc_dirs:
-            std_variants.append(os.path.join(dn, 'db3'))
-            std_variants.append(os.path.join(dn, 'db4'))
-            for x in gen_db_minor_ver_nums(4):
-                std_variants.append(os.path.join(dn, "db4%d"%x))
-                std_variants.append(os.path.join(dn, "db4.%d"%x))
-            for x in gen_db_minor_ver_nums(3):
-                std_variants.append(os.path.join(dn, "db3%d"%x))
-                std_variants.append(os.path.join(dn, "db3.%d"%x))
-
-        db_inc_paths = std_variants + db_inc_paths
-        db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
-
-        db_ver_inc_map = {}
-
-        class db_found(Exception): pass
-        try:
-            # See whether there is a Sleepycat header in the standard
-            # search path.
-            for d in inc_dirs + db_inc_paths:
-                f = os.path.join(d, "db.h")
-                if db_setup_debug: print "db: looking for db.h in", f
-                if os.path.exists(f):
-                    f = open(f).read()
-                    m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
-                    if m:
-                        db_major = int(m.group(1))
-                        m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
-                        db_minor = int(m.group(1))
-                        db_ver = (db_major, db_minor)
-
-                        # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
-                        if db_ver == (4, 6):
-                            m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
-                            db_patch = int(m.group(1))
-                            if db_patch < 21:
-                                print "db.h:", db_ver, "patch", db_patch,
-                                print "being ignored (4.6.x must be >= 4.6.21)"
-                                continue
-
-                        if ( (db_ver not in db_ver_inc_map) and
-                            allow_db_ver(db_ver) ):
-                            # save the include directory with the db.h version
-                            # (first occurrence only)
-                            db_ver_inc_map[db_ver] = d
-                            if db_setup_debug:
-                                print "db.h: found", db_ver, "in", d
-                        else:
-                            # we already found a header for this library version
-                            if db_setup_debug: print "db.h: ignoring", d
-                    else:
-                        # ignore this header, it didn't contain a version number
-                        if db_setup_debug:
-                            print "db.h: no version number version in", d
-
-            db_found_vers = db_ver_inc_map.keys()
-            db_found_vers.sort()
-
-            while db_found_vers:
-                db_ver = db_found_vers.pop()
-                db_incdir = db_ver_inc_map[db_ver]
-
-                # check lib directories parallel to the location of the header
-                db_dirs_to_check = [
-                    db_incdir.replace("include", 'lib64'),
-                    db_incdir.replace("include", 'lib'),
-                ]
-                db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
-
-                # Look for a version specific db-X.Y before an ambiguoius dbX
-                # XXX should we -ever- look for a dbX name?  Do any
-                # systems really not name their library by version and
-                # symlink to more general names?
-                for dblib in (('db-%d.%d' % db_ver),
-                              ('db%d%d' % db_ver),
-                              ('db%d' % db_ver[0])):
-                    dblib_file = self.compiler.find_library_file(
-                                    db_dirs_to_check + lib_dirs, dblib )
-                    if dblib_file:
-                        dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
-                        raise db_found
-                    else:
-                        if db_setup_debug: print "db lib: ", dblib, "not found"
-
-        except db_found:
-            if db_setup_debug:
-                print "bsddb using BerkeleyDB lib:", db_ver, dblib
-                print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
-            db_incs = [db_incdir]
-            dblibs = [dblib]
-            # We add the runtime_library_dirs argument because the
-            # BerkeleyDB lib we're linking against often isn't in the
-            # system dynamic library search path.  This is usually
-            # correct and most trouble free, but may cause problems in
-            # some unusual system configurations (e.g. the directory
-            # is on an NFS server that goes away).
-            exts.append(Extension('_bsddb', ['_bsddb.c'],
-                                  depends = ['bsddb.h'],
-                                  library_dirs=dblib_dir,
-                                  runtime_library_dirs=dblib_dir,
-                                  include_dirs=db_incs,
-                                  libraries=dblibs))
-        else:
-            if db_setup_debug: print "db: no appropriate library found"
-            db_incs = None
-            dblibs = []
-            dblib_dir = None
-            missing.append('_bsddb')
-
-        # The sqlite interface
-        sqlite_setup_debug = False   # verbose debug prints from this script?
-
-        # We hunt for #define SQLITE_VERSION "n.n.n"
-        # We need to find >= sqlite version 3.0.8
-        sqlite_incdir = sqlite_libdir = None
-        sqlite_inc_paths = [ '/usr/include',
-                             '/usr/include/sqlite',
-                             '/usr/include/sqlite3',
-                             '/usr/local/include',
-                             '/usr/local/include/sqlite',
-                             '/usr/local/include/sqlite3',
-                           ]
-        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
-        MIN_SQLITE_VERSION = ".".join([str(x)
-                                    for x in MIN_SQLITE_VERSION_NUMBER])
-
-        # Scan the default include directories before the SQLite specific
-        # ones. This allows one to override the copy of sqlite on OSX,
-        # where /usr/include contains an old version of sqlite.
-        for d in inc_dirs + sqlite_inc_paths:
-            f = os.path.join(d, "sqlite3.h")
-            if os.path.exists(f):
-                if sqlite_setup_debug: print "sqlite: found %s"%f
-                incf = open(f).read()
-                m = re.search(
-                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
-                if m:
-                    sqlite_version = m.group(1)
-                    sqlite_version_tuple = tuple([int(x)
-                                        for x in sqlite_version.split(".")])
-                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
-                        # we win!
-                        if sqlite_setup_debug:
-                            print "%s/sqlite3.h: version %s"%(d, sqlite_version)
-                        sqlite_incdir = d
-                        break
-                    else:
-                        if sqlite_setup_debug:
-                            print "%s: version %d is too old, need >= %s"%(d,
-                                        sqlite_version, MIN_SQLITE_VERSION)
-                elif sqlite_setup_debug:
-                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
-
-        if sqlite_incdir:
-            sqlite_dirs_to_check = [
-                os.path.join(sqlite_incdir, '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', 'lib'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
-                os.path.join(sqlite_incdir, '..', '..', 'lib'),
-            ]
-            sqlite_libfile = self.compiler.find_library_file(
-                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
-            if sqlite_libfile:
-                sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
-
-        if sqlite_incdir and sqlite_libdir:
-            sqlite_srcs = ['_sqlite/cache.c',
-                '_sqlite/connection.c',
-                '_sqlite/cursor.c',
-                '_sqlite/microprotocols.c',
-                '_sqlite/module.c',
-                '_sqlite/prepare_protocol.c',
-                '_sqlite/row.c',
-                '_sqlite/statement.c',
-                '_sqlite/util.c', ]
-
-            sqlite_defines = []
-            if sys.platform != "win32":
-                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
-            else:
-                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
-
-
-            if sys.platform == 'darwin':
-                # In every directory on the search path search for a dynamic
-                # library and then a static library, instead of first looking
-                # for dynamic libraries on the entiry path.
-                # This way a staticly linked custom sqlite gets picked up
-                # before the dynamic library in /usr/lib.
-                sqlite_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                sqlite_extra_link_args = ()
-
-            exts.append(Extension('_sqlite3', sqlite_srcs,
-                                  define_macros=sqlite_defines,
-                                  include_dirs=["Modules/_sqlite",
-                                                sqlite_incdir],
-                                  library_dirs=sqlite_libdir,
-                                  runtime_library_dirs=sqlite_libdir,
-                                  extra_link_args=sqlite_extra_link_args,
-                                  libraries=["sqlite3",]))
-        else:
-            missing.append('_sqlite3')
-
-        # Look for Berkeley db 1.85.   Note that it is built as a different
-        # module name so it can be included even when later versions are
-        # available.  A very restrictive search is performed to avoid
-        # accidentally building this module with a later version of the
-        # underlying db library.  May BSD-ish Unixes incorporate db 1.85
-        # symbols into libc and place the include file in /usr/include.
-        #
-        # If the better bsddb library can be built (db_incs is defined)
-        # we do not build this one.  Otherwise this build will pick up
-        # the more recent berkeleydb's db.h file first in the include path
-        # when attempting to compile and it will fail.
-        f = "/usr/include/db.h"
-        if os.path.exists(f) and not db_incs:
-            data = open(f).read()
-            m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
-            if m is not None:
-                # bingo - old version used hash file format version 2
-                ### XXX this should be fixed to not be platform-dependent
-                ### but I don't have direct access to an osf1 platform and
-                ### seemed to be muffing the search somehow
-                libraries = platform == "osf1" and ['db'] or None
-                if libraries is not None:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c'],
-                                          libraries=libraries))
-                else:
-                    exts.append(Extension('bsddb185', ['bsddbmodule.c']))
-            else:
-                missing.append('bsddb185')
-        else:
-            missing.append('bsddb185')
-
-        # The standard Unix dbm module:
-        if platform not in ['cygwin']:
-            if find_file("ndbm.h", inc_dirs, []) is not None:
-                # Some systems have -lndbm, others don't
-                if self.compiler.find_library_file(lib_dirs, 'ndbm'):
-                    ndbm_libs = ['ndbm']
-                else:
-                    ndbm_libs = []
-                exts.append( Extension('dbm', ['dbmmodule.c'],
-                                       define_macros=[('HAVE_NDBM_H',None)],
-                                       libraries = ndbm_libs ) )
-            elif self.compiler.find_library_file(lib_dirs, 'gdbm'):
-                gdbm_libs = ['gdbm']
-                if self.compiler.find_library_file(lib_dirs, 'gdbm_compat'):
-                    gdbm_libs.append('gdbm_compat')
-                if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
-                    exts.append( Extension(
-                        'dbm', ['dbmmodule.c'],
-                        define_macros=[('HAVE_GDBM_NDBM_H',None)],
-                        libraries = gdbm_libs ) )
-                elif find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
-                    exts.append( Extension(
-                        'dbm', ['dbmmodule.c'],
-                        define_macros=[('HAVE_GDBM_DASH_NDBM_H',None)],
-                        libraries = gdbm_libs ) )
-                else:
-                    missing.append('dbm')
-            elif db_incs is not None:
-                exts.append( Extension('dbm', ['dbmmodule.c'],
-                                       library_dirs=dblib_dir,
-                                       runtime_library_dirs=dblib_dir,
-                                       include_dirs=db_incs,
-                                       define_macros=[('HAVE_BERKDB_H',None),
-                                                      ('DB_DBM_HSEARCH',None)],
-                                       libraries=dblibs))
-            else:
-                missing.append('dbm')
-
-        # Anthony Baxter's gdbm module.  GNU dbm(3) will require -lgdbm:
-        if (self.compiler.find_library_file(lib_dirs, 'gdbm')):
-            exts.append( Extension('gdbm', ['gdbmmodule.c'],
-                                   libraries = ['gdbm'] ) )
-        else:
-            missing.append('gdbm')
-
-        # Unix-only modules
-        if platform not in ['mac', 'win32']:
-            # Steen Lumholt's termios module
-            exts.append( Extension('termios', ['termios.c']) )
-            # Jeremy Hylton's rlimit interface
-            if platform not in ['atheos']:
-                exts.append( Extension('resource', ['resource.c']) )
-            else:
-                missing.append('resource')
-
-            # Sun yellow pages. Some systems have the functions in libc.
-            if (platform not in ['cygwin', 'atheos', 'qnx6'] and
-                find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None):
-                if (self.compiler.find_library_file(lib_dirs, 'nsl')):
-                    libs = ['nsl']
-                else:
-                    libs = []
-                exts.append( Extension('nis', ['nismodule.c'],
-                                       libraries = libs) )
-            else:
-                missing.append('nis')
-        else:
-            missing.extend(['nis', 'resource', 'termios'])
-
-        # Curses support, requiring the System V version of curses, often
-        # provided by the ncurses library.
-        panel_library = 'panel'
-        if (self.compiler.find_library_file(lib_dirs, 'ncursesw')):
-            curses_libs = ['ncursesw']
-            # Bug 1464056: If _curses.so links with ncursesw,
-            # _curses_panel.so must link with panelw.
-            panel_library = 'panelw'
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif (self.compiler.find_library_file(lib_dirs, 'ncurses')):
-            curses_libs = ['ncurses']
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        elif (self.compiler.find_library_file(lib_dirs, 'curses')
-              and platform != 'darwin'):
-                # OSX has an old Berkeley curses, not good enough for
-                # the _curses module.
-            if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
-                curses_libs = ['curses', 'terminfo']
-            elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
-                curses_libs = ['curses', 'termcap']
-            else:
-                curses_libs = ['curses']
-
-            exts.append( Extension('_curses', ['_cursesmodule.c'],
-                                   libraries = curses_libs) )
-        else:
-            missing.append('_curses')
-
-        # If the curses module is enabled, check for the panel module
-        if (module_enabled(exts, '_curses') and
-            self.compiler.find_library_file(lib_dirs, panel_library)):
-            exts.append( Extension('_curses_panel', ['_curses_panel.c'],
-                                   libraries = [panel_library] + curses_libs) )
-        else:
-            missing.append('_curses_panel')
-
-        # Andrew Kuchling's zlib module.  Note that some versions of zlib
-        # 1.1.3 have security problems.  See CERT Advisory CA-2002-07:
-        # http://www.cert.org/advisories/CA-2002-07.html
-        #
-        # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
-        # patch its zlib 1.1.3 package instead of upgrading to 1.1.4.  For
-        # now, we still accept 1.1.3, because we think it's difficult to
-        # exploit this in Python, and we'd rather make it RedHat's problem
-        # than our problem <wink>.
-        #
-        # You can upgrade zlib to version 1.1.4 yourself by going to
-        # http://www.gzip.org/zlib/
-        zlib_inc = find_file('zlib.h', [], inc_dirs)
-        have_zlib = False
-        if zlib_inc is not None:
-            zlib_h = zlib_inc[0] + '/zlib.h'
-            version = '"0.0.0"'
-            version_req = '"1.1.3"'
-            fp = open(zlib_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    break
-                if line.startswith('#define ZLIB_VERSION'):
-                    version = line.split()[2]
-                    break
-            if version >= version_req:
-                if (self.compiler.find_library_file(lib_dirs, 'z')):
-                    if sys.platform == "darwin":
-                        zlib_extra_link_args = ('-Wl,-search_paths_first',)
-                    else:
-                        zlib_extra_link_args = ()
-                    exts.append( Extension('zlib', ['zlibmodule.c'],
-                                           libraries = ['z'],
-                                           extra_link_args = zlib_extra_link_args))
-                    have_zlib = True
-                else:
-                    missing.append('zlib')
-            else:
-                missing.append('zlib')
-        else:
-            missing.append('zlib')
-
-        # Helper module for various ascii-encoders.  Uses zlib for an optimized
-        # crc32 if we have it.  Otherwise binascii uses its own.
-        if have_zlib:
-            extra_compile_args = ['-DUSE_ZLIB_CRC32']
-            libraries = ['z']
-            extra_link_args = zlib_extra_link_args
-        else:
-            extra_compile_args = []
-            libraries = []
-            extra_link_args = []
-        exts.append( Extension('binascii', ['binascii.c'],
-                               extra_compile_args = extra_compile_args,
-                               libraries = libraries,
-                               extra_link_args = extra_link_args) )
-
-        # Gustavo Niemeyer's bz2 module.
-        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
-            if sys.platform == "darwin":
-                bz2_extra_link_args = ('-Wl,-search_paths_first',)
-            else:
-                bz2_extra_link_args = ()
-            exts.append( Extension('bz2', ['bz2module.c'],
-                                   libraries = ['bz2'],
-                                   extra_link_args = bz2_extra_link_args) )
-        else:
-            missing.append('bz2')
-
-        # Interface to the Expat XML parser
-        #
-        # Expat was written by James Clark and is now maintained by a
-        # group of developers on SourceForge; see www.libexpat.org for
-        # more information.  The pyexpat module was written by Paul
-        # Prescod after a prototype by Jack Jansen.  The Expat source
-        # is included in Modules/expat/.  Usage of a system
-        # shared libexpat.so/expat.dll is not advised.
-        #
-        # More information on Expat can be found at www.libexpat.org.
-        #
-        expatinc = os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')
-        define_macros = [
-            ('HAVE_EXPAT_CONFIG_H', '1'),
-        ]
-
-        exts.append(Extension('pyexpat',
-                              define_macros = define_macros,
-                              include_dirs = [expatinc],
-                              sources = ['pyexpat.c',
-                                         'expat/xmlparse.c',
-                                         'expat/xmlrole.c',
-                                         'expat/xmltok.c',
-                                         ],
-                              ))
-
-        # Fredrik Lundh's cElementTree module.  Note that this also
-        # uses expat (via the CAPI hook in pyexpat).
-
-        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
-            define_macros.append(('USE_PYEXPAT_CAPI', None))
-            exts.append(Extension('_elementtree',
-                                  define_macros = define_macros,
-                                  include_dirs = [expatinc],
-                                  sources = ['_elementtree.c'],
-                                  ))
-        else:
-            missing.append('_elementtree')
-
-        # Hye-Shik Chang's CJKCodecs modules.
-        if have_unicode:
-            exts.append(Extension('_multibytecodec',
-                                  ['cjkcodecs/multibytecodec.c']))
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                exts.append(Extension('_codecs_%s' % loc,
-                                      ['cjkcodecs/_codecs_%s.c' % loc]))
-        else:
-            missing.append('_multibytecodec')
-            for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
-                missing.append('_codecs_%s' % loc)
-
-        # Dynamic loading module
-        if sys.maxint == 0x7fffffff:
-            # This requires sizeof(int) == sizeof(long) == sizeof(char*)
-            dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos']):
-                exts.append( Extension('dl', ['dlmodule.c']) )
-            else:
-                missing.append('dl')
-        else:
-            missing.append('dl')
-
-        # Thomas Heller's _ctypes module
-        self.detect_ctypes(inc_dirs, lib_dirs)
-
-        # Richard Oudkerk's multiprocessing module
-        if platform == 'win32':             # Windows
-            macros = dict()
-            libraries = ['ws2_32']
-
-        elif platform == 'darwin':          # Mac OSX
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                HAVE_BROKEN_SEM_GETVALUE=1
-                )
-            libraries = []
-
-        elif platform == 'cygwin':          # Cygwin
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=1,
-                HAVE_FD_TRANSFER=0,
-                HAVE_BROKEN_SEM_UNLINK=1
-                )
-            libraries = []
-
-        elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
-            # FreeBSD's P1003.1b semaphore support is very experimental
-            # and has many known problems. (as of June 2008)
-            macros = dict(                  # FreeBSD
-                HAVE_SEM_OPEN=0,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                )
-            libraries = []
-
-        elif platform.startswith('openbsd'):
-            macros = dict(                  # OpenBSD
-                HAVE_SEM_OPEN=0,            # Not implemented
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                )
-            libraries = []
-
-        elif platform.startswith('netbsd'):
-            macros = dict(                  # at least NetBSD 5
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=0,
-                HAVE_FD_TRANSFER=1,
-                HAVE_BROKEN_SEM_GETVALUE=1
-                )
-            libraries = []
-
-        else:                                   # Linux and other unices
-            macros = dict(
-                HAVE_SEM_OPEN=1,
-                HAVE_SEM_TIMEDWAIT=1,
-                HAVE_FD_TRANSFER=1
-                )
-            libraries = ['rt']
-
-        if platform == 'win32':
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/semaphore.c',
-                                     '_multiprocessing/pipe_connection.c',
-                                     '_multiprocessing/socket_connection.c',
-                                     '_multiprocessing/win32_functions.c'
-                                   ]
-
-        else:
-            multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
-                                     '_multiprocessing/socket_connection.c'
-                                   ]
-
-            if macros.get('HAVE_SEM_OPEN', False):
-                multiprocessing_srcs.append('_multiprocessing/semaphore.c')
-
-        if sysconfig.get_config_var('WITH_THREAD'):
-            exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
-                                    define_macros=macros.items(),
-                                    include_dirs=["Modules/_multiprocessing"]))
-        else:
-            missing.append('_multiprocessing')
-
-        # End multiprocessing
-
-
-        # Platform-specific libraries
-        if platform == 'linux2':
-            # Linux-specific modules
-            exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
-        else:
-            missing.append('linuxaudiodev')
-
-        if platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
-                        'freebsd7', 'freebsd8'):
-            exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
-        else:
-            missing.append('ossaudiodev')
-
-        if platform == 'sunos5':
-            # SunOS specific modules
-            exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
-        else:
-            missing.append('sunaudiodev')
-
-        if platform == 'darwin':
-            # _scproxy
-            exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")],
-                extra_link_args= [
-                    '-framework', 'SystemConfiguration',
-                    '-framework', 'CoreFoundation'
-                ]))
-
-
-        if platform == 'darwin' and ("--disable-toolbox-glue" not in
-                sysconfig.get_config_var("CONFIG_ARGS")):
-
-            if int(os.uname()[2].split('.')[0]) >= 8:
-                # We're on Mac OS X 10.4 or later, the compiler should
-                # support '-Wno-deprecated-declarations'. This will
-                # surpress deprecation warnings for the Carbon extensions,
-                # these extensions wrap the Carbon APIs and even those
-                # parts that are deprecated.
-                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
-            else:
-                carbon_extra_compile_args = []
-
-            # Mac OS X specific modules.
-            def macSrcExists(name1, name2=''):
-                if not name1:
-                    return None
-                names = (name1,)
-                if name2:
-                    names = (name1, name2)
-                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
-                return os.path.exists(path)
-
-            def addMacExtension(name, kwds, extra_srcs=[]):
-                dirname = ''
-                if name[0] == '_':
-                    dirname = name[1:].lower()
-                cname = name + '.c'
-                cmodulename = name + 'module.c'
-                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
-                if macSrcExists(cname):
-                    srcs = [cname]
-                elif macSrcExists(cmodulename):
-                    srcs = [cmodulename]
-                elif macSrcExists(dirname, cname):
-                    # XXX(nnorwitz): If all the names ended with module, we
-                    # wouldn't need this condition.  ibcarbon is the only one.
-                    srcs = [os.path.join(dirname, cname)]
-                elif macSrcExists(dirname, cmodulename):
-                    srcs = [os.path.join(dirname, cmodulename)]
-                else:
-                    raise RuntimeError("%s not found" % name)
-
-                # Here's the whole point:  add the extension with sources
-                exts.append(Extension(name, srcs + extra_srcs, **kwds))
-
-            # Core Foundation
-            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                         'extra_link_args': ['-framework', 'CoreFoundation'],
-                        }
-            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
-            addMacExtension('autoGIL', core_kwds)
-
-
-
-            # Carbon
-            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                           'extra_link_args': ['-framework', 'Carbon'],
-                          }
-            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
-                           'OSATerminology', 'icglue',
-                           # All these are in subdirs
-                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
-                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
-                           '_Help', '_Icn', '_IBCarbon', '_List',
-                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
-                           '_Scrap', '_Snd', '_TE',
-                          ]
-            for name in CARBON_EXTS:
-                addMacExtension(name, carbon_kwds)
-
-            # Workaround for a bug in the version of gcc shipped with Xcode 3.
-            # The _Win extension should build just like the other Carbon extensions, but
-            # this actually results in a hard crash of the linker.
-            #
-            if '-arch ppc64' in cflags and '-arch ppc' in cflags:
-                win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
-                               'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
-                           }
-                addMacExtension('_Win', win_kwds)
-            else:
-                addMacExtension('_Win', carbon_kwds)
-
-
-            # Application Services & QuickTime
-            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
-                        'extra_link_args': ['-framework','ApplicationServices'],
-                       }
-            addMacExtension('_Launch', app_kwds)
-            addMacExtension('_CG', app_kwds)
-
-            exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                        extra_compile_args=carbon_extra_compile_args,
-                        extra_link_args=['-framework', 'QuickTime',
-                                     '-framework', 'Carbon']) )
-
-
-        self.extensions.extend(exts)
-
-        # Call the method for detecting whether _tkinter can be compiled
-        self.detect_tkinter(inc_dirs, lib_dirs)
-
-        if '_tkinter' not in [e.name for e in self.extensions]:
-            missing.append('_tkinter')
-
-        return missing
-
-    def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
-        # The _tkinter module, using frameworks. Since frameworks are quite
-        # different the UNIX search logic is not sharable.
-        from os.path import join, exists
-        framework_dirs = [
-            '/Library/Frameworks',
-            '/System/Library/Frameworks/',
-            join(os.getenv('HOME'), '/Library/Frameworks')
-        ]
-
-        # Find the directory that contains the Tcl.framework and Tk.framework
-        # bundles.
-        # XXX distutils should support -F!
-        for F in framework_dirs:
-            # both Tcl.framework and Tk.framework should be present
-            for fw in 'Tcl', 'Tk':
-                if not exists(join(F, fw + '.framework')):
-                    break
-            else:
-                # ok, F is now directory with both frameworks. Continure
-                # building
-                break
-        else:
-            # Tk and Tcl frameworks not found. Normal "unix" tkinter search
-            # will now resume.
-            return 0
-
-        # For 8.4a2, we must add -I options that point inside the Tcl and Tk
-        # frameworks. In later release we should hopefully be able to pass
-        # the -F option to gcc, which specifies a framework lookup path.
-        #
-        include_dirs = [
-            join(F, fw + '.framework', H)
-            for fw in 'Tcl', 'Tk'
-            for H in 'Headers', 'Versions/Current/PrivateHeaders'
-        ]
-
-        # For 8.4a2, the X11 headers are not included. Rather than include a
-        # complicated search, this is a hard-coded path. It could bail out
-        # if X11 libs are not found...
-        include_dirs.append('/usr/X11R6/include')
-        frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
-
-        # All existing framework builds of Tcl/Tk don't support 64-bit
-        # architectures.
-        cflags = sysconfig.get_config_vars('CFLAGS')[0]
-        archs = re.findall('-arch\s+(\w+)', cflags)
-        fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,))
-        detected_archs = []
-        for ln in fp:
-            a = ln.split()[-1]
-            if a in archs:
-                detected_archs.append(ln.split()[-1])
-        fp.close()
-
-        for a in detected_archs:
-            frameworks.append('-arch')
-            frameworks.append(a)
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)],
-                        include_dirs = include_dirs,
-                        libraries = [],
-                        extra_compile_args = frameworks[2:],
-                        extra_link_args = frameworks,
-                        )
-        self.extensions.append(ext)
-        return 1
-
-
-    def detect_tkinter(self, inc_dirs, lib_dirs):
-        # The _tkinter module.
-
-        # Rather than complicate the code below, detecting and building
-        # AquaTk is a separate method. Only one Tkinter will be built on
-        # Darwin - either AquaTk, if it is found, or X11 based Tk.
-        platform = self.get_platform()
-        if (platform == 'darwin' and
-            self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
-            return
-
-        # Assume we haven't found any of the libraries or include files
-        # The versions with dots are used on Unix, and the versions without
-        # dots on Windows, for detection by cygwin.
-        tcllib = tklib = tcl_includes = tk_includes = None
-        for version in ['8.4', '84']:
-            tklib = self.compiler.find_library_file(lib_dirs, 'tk' + version)
-            tcllib = self.compiler.find_library_file(lib_dirs, 'tcl' + version)
-            if tklib and tcllib:
-                # Exit the loop when we've found the Tcl/Tk libraries
-                break
-
-        # Now check for the header files
-        if tklib and tcllib:
-            # Check for the include files on Debian and {Free,Open}BSD, where
-            # they're put in /usr/include/{tcl,tk}X.Y
-            dotversion = version
-            if '.' not in dotversion and "bsd" in sys.platform.lower():
-                # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
-                # but the include subdirs are named like .../include/tcl8.3.
-                dotversion = dotversion[:-1] + '.' + dotversion[-1]
-            tcl_include_sub = []
-            tk_include_sub = []
-            for dir in inc_dirs:
-                tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
-                tk_include_sub += [dir + os.sep + "tk" + dotversion]
-            tk_include_sub += tcl_include_sub
-            tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
-            tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
-
-        if (tcllib is None or tklib is None or
-            tcl_includes is None or tk_includes is None):
-            self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
-            return
-
-        # OK... everything seems to be present for Tcl/Tk.
-
-        include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
-        for dir in tcl_includes + tk_includes:
-            if dir not in include_dirs:
-                include_dirs.append(dir)
-
-        # Check for various platform-specific directories
-        if platform == 'sunos5':
-            include_dirs.append('/usr/openwin/include')
-            added_lib_dirs.append('/usr/openwin/lib')
-        elif os.path.exists('/usr/X11R6/include'):
-            include_dirs.append('/usr/X11R6/include')
-            added_lib_dirs.append('/usr/X11R6/lib64')
-            added_lib_dirs.append('/usr/X11R6/lib')
-        elif os.path.exists('/usr/X11R5/include'):
-            include_dirs.append('/usr/X11R5/include')
-            added_lib_dirs.append('/usr/X11R5/lib')
-        else:
-            # Assume default location for X11
-            include_dirs.append('/usr/X11/include')
-            added_lib_dirs.append('/usr/X11/lib')
-
-        # If Cygwin, then verify that X is installed before proceeding
-        if platform == 'cygwin':
-            x11_inc = find_file('X11/Xlib.h', [], include_dirs)
-            if x11_inc is None:
-                return
-
-        # Check for BLT extension
-        if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                           'BLT8.0'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT8.0')
-        elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
-                                           'BLT'):
-            defs.append( ('WITH_BLT', 1) )
-            libs.append('BLT')
-
-        # Add the Tcl/Tk libraries
-        libs.append('tk'+ version)
-        libs.append('tcl'+ version)
-
-        if platform in ['aix3', 'aix4']:
-            libs.append('ld')
-
-        # Finally, link with the X11 libraries (not appropriate on cygwin)
-        if platform != "cygwin":
-            libs.append('X11')
-
-        ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
-                        define_macros=[('WITH_APPINIT', 1)] + defs,
-                        include_dirs = include_dirs,
-                        libraries = libs,
-                        library_dirs = added_lib_dirs,
-                        )
-        self.extensions.append(ext)
-
-##         # Uncomment these lines if you want to play with xxmodule.c
-##         ext = Extension('xx', ['xxmodule.c'])
-##         self.extensions.append(ext)
-
-        # XXX handle these, but how to detect?
-        # *** Uncomment and edit for PIL (TkImaging) extension only:
-        #       -DWITH_PIL -I../Extensions/Imaging/libImaging  tkImaging.c \
-        # *** Uncomment and edit for TOGL extension only:
-        #       -DWITH_TOGL togl.c \
-        # *** Uncomment these for TOGL extension only:
-        #       -lGL -lGLU -lXext -lXmu \
-
-    def configure_ctypes_darwin(self, ext):
-        # Darwin (OS X) uses preconfigured files, in
-        # the Modules/_ctypes/libffi_osx directory.
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                                  '_ctypes', 'libffi_osx'))
-        sources = [os.path.join(ffi_srcdir, p)
-                   for p in ['ffi.c',
-                             'x86/darwin64.S',
-                             'x86/x86-darwin.S',
-                             'x86/x86-ffi_darwin.c',
-                             'x86/x86-ffi64.c',
-                             'powerpc/ppc-darwin.S',
-                             'powerpc/ppc-darwin_closure.S',
-                             'powerpc/ppc-ffi_darwin.c',
-                             'powerpc/ppc64-darwin_closure.S',
-                             ]]
-
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
-
-        include_dirs = [os.path.join(ffi_srcdir, 'include'),
-                        os.path.join(ffi_srcdir, 'powerpc')]
-        ext.include_dirs.extend(include_dirs)
-        ext.sources.extend(sources)
-        return True
-
-    def configure_ctypes(self, ext):
-        if not self.use_system_libffi:
-            if sys.platform == 'darwin':
-                return self.configure_ctypes_darwin(ext)
-
-            (srcdir,) = sysconfig.get_config_vars('srcdir')
-            ffi_builddir = os.path.join(self.build_temp, 'libffi')
-            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                         '_ctypes', 'libffi'))
-            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
-
-            from distutils.dep_util import newer_group
-
-            config_sources = [os.path.join(ffi_srcdir, fname)
-                              for fname in os.listdir(ffi_srcdir)
-                              if os.path.isfile(os.path.join(ffi_srcdir, fname))]
-            if self.force or newer_group(config_sources,
-                                         ffi_configfile):
-                from distutils.dir_util import mkpath
-                mkpath(ffi_builddir)
-                config_args = []
-
-                # Pass empty CFLAGS because we'll just append the resulting
-                # CFLAGS to Python's; -g or -O2 is to be avoided.
-                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
-
-                res = os.system(cmd)
-                if res or not os.path.exists(ffi_configfile):
-                    print "Failed to configure _ctypes module"
-                    return False
-
-            fficonfig = {}
-            exec open(ffi_configfile) in fficonfig
-
-            # Add .S (preprocessed assembly) to C compiler source extensions.
-            self.compiler.src_extensions.append('.S')
-
-            include_dirs = [os.path.join(ffi_builddir, 'include'),
-                            ffi_builddir,
-                            os.path.join(ffi_srcdir, 'src')]
-            extra_compile_args = fficonfig['ffi_cflags'].split()
-
-            ext.sources.extend(os.path.join(ffi_srcdir, f) for f in
-                               fficonfig['ffi_sources'])
-            ext.include_dirs.extend(include_dirs)
-            ext.extra_compile_args.extend(extra_compile_args)
-        return True
-
-    def detect_ctypes(self, inc_dirs, lib_dirs):
-        self.use_system_libffi = False
-        include_dirs = []
-        extra_compile_args = []
-        extra_link_args = []
-        sources = ['_ctypes/_ctypes.c',
-                   '_ctypes/callbacks.c',
-                   '_ctypes/callproc.c',
-                   '_ctypes/stgdict.c',
-                   '_ctypes/cfield.c',
-                   '_ctypes/malloc_closure.c']
-        depends = ['_ctypes/ctypes.h']
-
-        if sys.platform == 'darwin':
-            sources.append('_ctypes/darwin/dlfcn_simple.c')
-            extra_compile_args.append('-DMACOSX')
-            include_dirs.append('_ctypes/darwin')
-# XXX Is this still needed?
-##            extra_link_args.extend(['-read_only_relocs', 'warning'])
-
-        elif sys.platform == 'sunos5':
-            # XXX This shouldn't be necessary; it appears that some
-            # of the assembler code is non-PIC (i.e. it has relocations
-            # when it shouldn't. The proper fix would be to rewrite
-            # the assembler code to be PIC.
-            # This only works with GCC; the Sun compiler likely refuses
-            # this option. If you want to compile ctypes with the Sun
-            # compiler, please research a proper solution, instead of
-            # finding some -z option for the Sun compiler.
-            extra_link_args.append('-mimpure-text')
-
-        elif sys.platform.startswith('hp-ux'):
-            extra_link_args.append('-fPIC')
-
-        ext = Extension('_ctypes',
-                        include_dirs=include_dirs,
-                        extra_compile_args=extra_compile_args,
-                        extra_link_args=extra_link_args,
-                        libraries=[],
-                        sources=sources,
-                        depends=depends)
-        ext_test = Extension('_ctypes_test',
-                             sources=['_ctypes/_ctypes_test.c'])
-        self.extensions.extend([ext, ext_test])
-
-        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
-            return
-
-        if sys.platform == 'darwin':
-            # OS X 10.5 comes with libffi.dylib; the include files are
-            # in /usr/include/ffi
-            inc_dirs.append('/usr/include/ffi')
-
-        ffi_inc = find_file('ffi.h', [], inc_dirs)
-        if ffi_inc is not None:
-            ffi_h = ffi_inc[0] + '/ffi.h'
-            fp = open(ffi_h)
-            while 1:
-                line = fp.readline()
-                if not line:
-                    ffi_inc = None
-                    break
-                if line.startswith('#define LIBFFI_H'):
-                    break
-        ffi_lib = None
-        if ffi_inc is not None:
-            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
-                if (self.compiler.find_library_file(lib_dirs, lib_name)):
-                    ffi_lib = lib_name
-                    break
-
-        if ffi_inc and ffi_lib:
-            ext.include_dirs.extend(ffi_inc)
-            ext.libraries.append(ffi_lib)
-            self.use_system_libffi = True
-
-
-class PyBuildInstall(install):
-    # Suppress the warning about installation into the lib_dynload
-    # directory, which is not in sys.path when running Python during
-    # installation:
-    def initialize_options (self):
-        install.initialize_options(self)
-        self.warn_dir=0
-
-class PyBuildInstallLib(install_lib):
-    # Do exactly what install_lib does but make sure correct access modes get
-    # set on installed directories and files. All installed files with get
-    # mode 644 unless they are a shared library in which case they will get
-    # mode 755. All installed directories will get mode 755.
-
-    so_ext = sysconfig.get_config_var("SO")
-
-    def install(self):
-        outfiles = install_lib.install(self)
-        self.set_file_modes(outfiles, 0644, 0755)
-        self.set_dir_modes(self.install_dir, 0755)
-        return outfiles
-
-    def set_file_modes(self, files, defaultMode, sharedLibMode):
-        if not self.is_chmod_supported(): return
-        if not files: return
-
-        for filename in files:
-            if os.path.islink(filename): continue
-            mode = defaultMode
-            if filename.endswith(self.so_ext): mode = sharedLibMode
-            log.info("changing mode of %s to %o", filename, mode)
-            if not self.dry_run: os.chmod(filename, mode)
-
-    def set_dir_modes(self, dirname, mode):
-        if not self.is_chmod_supported(): return
-        os.path.walk(dirname, self.set_dir_modes_visitor, mode)
-
-    def set_dir_modes_visitor(self, mode, dirname, names):
-        if os.path.islink(dirname): return
-        log.info("changing mode of %s to %o", dirname, mode)
-        if not self.dry_run: os.chmod(dirname, mode)
-
-    def is_chmod_supported(self):
-        return hasattr(os, 'chmod')
-
-SUMMARY = """
-Python is an interpreted, interactive, object-oriented programming
-language. It is often compared to Tcl, Perl, Scheme or Java.
-
-Python combines remarkable power with very clear syntax. It has
-modules, classes, exceptions, very high level dynamic data types, and
-dynamic typing. There are interfaces to many system calls and
-libraries, as well as to various windowing systems (X11, Motif, Tk,
-Mac, MFC). New built-in modules are easily written in C or C++. Python
-is also usable as an extension language for applications that need a
-programmable interface.
-
-The Python implementation is portable: it runs on many brands of UNIX,
-on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
-listed here, it may still be supported, if there's a C compiler for
-it. Ask around on comp.lang.python -- or just try compiling Python
-yourself.
-"""
-
-CLASSIFIERS = """
-Development Status :: 6 - Mature
-License :: OSI Approved :: Python Software Foundation License
-Natural Language :: English
-Programming Language :: C
-Programming Language :: Python
-Topic :: Software Development
-"""
-
-def main():
-    # turn off warnings when deprecated modules are imported
-    import warnings
-    warnings.filterwarnings("ignore",category=DeprecationWarning)
-    setup(# PyPI Metadata (PEP 301)
-          name = "Python",
-          version = sys.version.split()[0],
-          url = "http://www.python.org/%s" % sys.version[:3],
-          maintainer = "Guido van Rossum and the Python community",
-          maintainer_email = "python-dev@python.org",
-          description = "A high-level object-oriented programming language",
-          long_description = SUMMARY.strip(),
-          license = "PSF license",
-          classifiers = filter(None, CLASSIFIERS.split("\n")),
-          platforms = ["Many"],
-
-          # Build info
-          cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
-                      'install_lib':PyBuildInstallLib},
-          # The struct module is defined here, because build_ext won't be
-          # called unless there's at least one extension module defined.
-          ext_modules=[Extension('_struct', ['_struct.c'])],
-
-          # Scripts to install
-          scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
-                     'Tools/scripts/2to3',
-                     'Lib/smtpd.py']
-        )
-
-# --install-platlib
-if __name__ == '__main__':
-    main()
diff --git a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in
deleted file mode 100644
index 96a849a05..000000000
--- a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in
+++ /dev/null
@@ -1,20 +0,0 @@
-message("Building PyZMQ:\n@pyzmq_binary@")
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py configure --zmq=@cdat_EXTERNALS@
-  WORKING_DIRECTORY "@pyzmq_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE pyzmq_OUT
-  OUTPUT_VARIABLE pyzmq_ERR)
-
-if(NOT ${res} EQUAL 0)
-    message("PyZMQ errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}")
-  message(FATAL_ERROR "Error in config of pyzmq")
-endif()
-message("pyzmq build worked.")
diff --git a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in
deleted file mode 100644
index da21d89c1..000000000
--- a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in
+++ /dev/null
@@ -1,21 +0,0 @@
-message("Installing pyzmq:\n@pyzmq_PREFIX_ARGS@")
-set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{VS_UNICODE_OUTPUT} "")
-
-if(APPLE)
-    set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@")
-endif()
-
-execute_process(
-  COMMAND env  @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install --zmq=@cdat_EXTERNALS@ --prefix=@PYTHON_SITE_PACKAGES_PREFIX@
-  WORKING_DIRECTORY "@pyzmq_binary@"
-  RESULT_VARIABLE res
-  OUTPUT_VARIABLE pyzmq_OUT
-  OUTPUT_VARIABLE pyzmq_ERR
-)
-
-if(NOT ${res} EQUAL 0)
-  message("pyzmq Errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}")
-  message(FATAL_ERROR "Error in config of pyzmq")
-endif()
-message("pyzmq install succeeded.")
diff --git a/CMake/cdat_modules_extra/reset_runtime.csh.in b/CMake/cdat_modules_extra/reset_runtime.csh.in
deleted file mode 100644
index c4ed68faa..000000000
--- a/CMake/cdat_modules_extra/reset_runtime.csh.in
+++ /dev/null
@@ -1,24 +0,0 @@
-# First of all reset variables
-foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH prompt )
-    set tmp="UVCDAT_ORIGINAL_"${v}
-    if ( `eval echo \$\?$tmp` ) then
-        echo ${v}" env variable reset"
-        set vv=`eval echo \$$tmp`
-        setenv ${v} ${vv}
-        unsetenv ${tmp}
-    endif
-end
-
-# Now variables for which we may have changed value or created
-foreach v ( OPAL_PREFIX LIBOVERLAY_SCROLLBAR )
-    set tmp="UVCDAT_ORIGINAL_"${v}
-    if ( `eval echo \$\?$tmp` ) then
-        echo ${v}" env variable reset"
-        set vv=`eval echo \$$tmp`
-        setenv ${v} ${vv}
-    else
-        unsetenv ${tmp}
-    endif
-end
-unsetenv UVCDAT_PROMPT_STRING
-unsetenv UVCDAT_SETUP_PATH
diff --git a/CMake/cdat_modules_extra/reset_runtime.sh.in b/CMake/cdat_modules_extra/reset_runtime.sh.in
deleted file mode 100644
index 37f957727..000000000
--- a/CMake/cdat_modules_extra/reset_runtime.sh.in
+++ /dev/null
@@ -1,16 +0,0 @@
-# First of all reset variables
-for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH PS1 OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do
-   tmp="UVCDAT_ORIGINAL_"${v}
-   if [ -n "${!tmp}" ] ; then
-        echo ${v}" env variable reset"
-        if [ "${!tmp}" != " " ] ; then
-            export ${v}=${!tmp}
-        else
-            unset ${v}
-        fi
-        unset ${tmp}
-    fi
-done
-unset UVCDAT_PROMPT_STRING
-unset UVCDAT_SETUP_PATH
-return 0
diff --git a/CMake/cdat_modules_extra/runpytest.in b/CMake/cdat_modules_extra/runpytest.in
deleted file mode 100755
index 42fe55e68..000000000
--- a/CMake/cdat_modules_extra/runpytest.in
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-# glue script to playback a recorded uvcdat vistrail and compare the result
-# with a known good baseline image.
-# takes three arguments: the name of the vistrail.vt:tagname to playback
-# a set of aliases for that trail (to replace filenames for example)
-# the filename of the image to compare against
-
-# setup uvcdat run time environment
-. @CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh
-
-# play back the requested vistrail and make an image
-"@PYTHON_EXECUTABLE@" \
-  @CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py \
- -R \
- -S $1\
- -b $2\
- -a $3\
- -e @CMAKE_BINARY_DIR@/Testing/Temporary
-
-# compare that image with the baseline(s) for it
-"@PYTHON_EXECUTABLE@" \
- @cdat_SOURCE_DIR@/testing/checkimage.py \
- @CMAKE_BINARY_DIR@/Testing/Temporary/$4 \
- $5/$4 \
- $6
diff --git a/CMake/cdat_modules_extra/seawater_build_step.cmake.in b/CMake/cdat_modules_extra/seawater_build_step.cmake.in
deleted file mode 100644
index 7118a8eb2..000000000
--- a/CMake/cdat_modules_extra/seawater_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@seawater_source_dir@"
-)
diff --git a/CMake/cdat_modules_extra/setup_runtime.csh.in b/CMake/cdat_modules_extra/setup_runtime.csh.in
deleted file mode 100755
index 8a7f1c83b..000000000
--- a/CMake/cdat_modules_extra/setup_runtime.csh.in
+++ /dev/null
@@ -1,117 +0,0 @@
-# Main install prefix set by user or post install script:
-# UVCDAT_INSTALL_PREFIX
-
-# First reset any existing UVCDAT env
-. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh
-
-# Now store existing env var that we will be tweaking
-foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR )
-    if ( `eval echo \$\?$v` ) then
-        set vv=`eval echo \$$v`
-        setenv UVCDAT_ORIGINAL_${v} ${vv}
-    else
-        setenv UVCDAT_ORIGINAL_${v} " "
-    endif
-end
-
-setenv UVCDAT_PROMPT_STRING @UVCDAT_PROMPT_STRING@
-if ( $?UVCDAT_PROMPT_BEGINNING ) then
-  setenv UVCDAT_ORIGINAL_prompt ${prompt}
-  set prompt = "[@UVCDAT_PROMPT_STRING@]${prompt}"
-else if ( $?UVCDAT_PROMPT_END ) then
-  setenv UVCDAT_ORIGINAL_prompt ${prompt}
-  set prompt = "${prompt}[@UVCDAT_PROMPT_STRING@]"
-endif
-
-# If unset, use the value configured by cmake by default.
-
-# Everything beyond this point will be determined relatively
-# from this path.
-if ( $?UVCDAT_INSTALL_PREFIX ) then
-  set install_prefix=${UVCDAT_INSTALL_PREFIX}
-else
-  set install_prefix=@CMAKE_INSTALL_PREFIX@
-endif
-
-# Try to prevent the user from sourcing twice,
-# which can lead to errors.
-if ( $?UVCDAT_SETUP_PATH ) then
-  if ( ${UVCDAT_SETUP_PATH} == ${install_prefix} ) then
-    echo 'Nothing to do since UVCDAT is already setup at '${UVCDAT_SETUP_PATH}
-    exit 0
-  else
-    echo 'ERROR: UVCDAT setup was previously sourced at '${UVCDAT_SETUP_PATH}
-    echo 'ERROR: There is no need to run setup_runtime manually anymore.'
-    echo 'ERROR: Open a new shell in order to use a different install location.'
-    echo 'ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh.'
-    exit 1
-  endif
-endif
-
-# Check that the install prefix exists, otherwise stop.
-if ( ! -d ${install_prefix} ) then
-  echo 'ERROR: '${install_prefix}' is not a directory.'
-  exit 1
-endif
-
-if ( ! $?LD_LIBRARY_PATH ) then
-  setenv LD_LIBRARY_PATH ''
-endif
-
-if ( ! $?PYTHONPATH ) then
-  setenv PYTHONPATH ''
-endif
-
-if ( ! $?PATH ) then
-  setenv PATH ''
-endif
-
-if ( '@QT_LIB_DIR@' != '' ) then
-  if ( -d @QT_LIB_DIR@ ) then
-    setenv LD_LIBRARY_PATH @QT_LIB_DIR@:${LD_LIBRARY_PATH}
-  endif
-endif
-
-foreach d ( @SETUP_LIBRARY_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv LD_LIBRARY_PATH ${f}:${LD_LIBRARY_PATH}
-  endif
-end
-
-if ( `uname` == 'Darwin' ) then
-  setenv LD_LIBRARY_PATH /usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}
-  setenv DYLD_FALLBACK_LIBRARY_PATH ${LD_LIBRARY_PATH}
-endif
-
-foreach d ( @SETUP_PYTHON_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv PYTHONPATH ${f}:${PYTHONPATH}
-  endif
-end
-
-foreach d ( @SETUP_EXECUTABLE_PATHS@ )
-  set f=${install_prefix}/${d}
-  if ( -d ${f} ) then
-    setenv PATH ${f}:${PATH}
-  endif
-end
-
-if ( -d ${install_prefix}/Externals/lib/R ) then
-  setenv R_HOME ${install_prefix}/Externals/lib/R
-endif
-
-setenv GDAL_DATA ${install_prefix}/Externals/share/gdal
-setenv OPAL_PREFIX ${install_prefix}/Externals
-setenv LIBOVERLAY_SCROLLBAR 0
-
-setenv UVCDAT_SETUP_PATH ${install_prefix}
-
-unset install_prefix
-
-echo 'Successfully updated your environment to use UVCDAT'
-echo '(changes are valid for this session/terminal only)'
-echo 'Version: '${UVCDAT_PROMPT_STRING}
-echo 'Location: '${UVCDAT_SETUP_PATH}
-echo 'Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh'
diff --git a/CMake/cdat_modules_extra/setup_runtime.sh.in b/CMake/cdat_modules_extra/setup_runtime.sh.in
deleted file mode 100755
index 0476b092b..000000000
--- a/CMake/cdat_modules_extra/setup_runtime.sh.in
+++ /dev/null
@@ -1,111 +0,0 @@
-# Everything beyond this point will be determined relatively
-# from this path.
-install_prefix="@CMAKE_INSTALL_PREFIX@"
-# Reset previous uvcdat env messing up
-. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh
-
-# Saves what we will mess with
-for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do
-   tmp="${v}"
-    if [ -n "${!tmp}" ] ; then
-        export UVCDAT_ORIGINAL_${v}=${!v}
-    else
-        export UVCDAT_ORIGINAL_${v}=" "
-    fi
-done
-
-function cleanup {
-  unset cleanup install_prefix library_paths python_paths executable_paths
-}
-
-# Try to prevent the user from sourcing twice,
-# which can lead to errors.
-if [ -n "${UVCDAT_SETUP_PATH}" ] ; then
-  if [ "${UVCDAT_SETUP_PATH}" = "${install_prefix}" ] ; then
-    echo "Nothing to do since UVCDAT is already setup at: ${UVCDAT_SETUP_PATH}" 1>&2
-    cleanup
-    return 0
-  else
-    echo "ERROR: UVCDAT setup was previously sourced at: ${UVCDAT_SETUP_PATH}" 1>&2
-    echo "ERROR: There is no need to run setup_runtime manually anymore." 1>&2
-    echo "ERROR: Open a new shell in order to use a different install location." 1>&2
-    echo "ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh." 1>&2
-    cleanup
-    return 1
-  fi
-fi
-
-# Check that the install prefix exists, otherwise stop.
-if [ ! -d "${install_prefix}" ] ; then
-  echo "ERROR: ${install_prefix} is not a directory." 1>&2
-  cleanup
-  return 1
-fi
-
-# cmake set variables
-library_paths=( @SETUP_LIBRARY_PATHS@ )
-python_paths=( @SETUP_PYTHON_PATHS@ )
-executable_paths=( @SETUP_EXECUTABLE_PATHS@ )
-
-export UVCDAT_PROMPT_STRING=@UVCDAT_PROMPT_STRING@
-if [ "$UVCDAT_ENABLE_PROMPT_BEGINNING" ] ; then
-    export UVCDAT_ORIGINAL_PS1=${PS1}" "
-    export PS1="[@UVCDAT_PROMPT_STRING@]$PS1"
-
-elif [ "$UVCDAT_ENABLE_PROMPT_END" ] ; then
-    export UVCDAT_ORIGINAL_PS1=${PS1}" "
-    export PS1="$PS1[@UVCDAT_PROMPT_STRING@]"
-fi
-
-if [ -d '@QT_LIB_DIR@' ] ; then
-  LD_LIBRARY_PATH='@QT_LIB_DIR@:'"${LD_LIBRARY_PATH}"
-fi
-
-for d in "${library_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    LD_LIBRARY_PATH="${f}:${LD_LIBRARY_PATH}"
-  fi
-done
-
-if [ `uname` = 'Darwin' ] ; then
-  LD_LIBRARY_PATH="/usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}"
-  export DYLD_FALLBACK_LIBRARY_PATH="${LD_LIBRARY_PATH}"
-fi
-
-for d in "${python_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    PYTHONPATH="${f}:${PYTHONPATH}"
-  fi
-  unset f
-done
-
-for d in "${executable_paths[@]}" ; do
-  f="${install_prefix}/${d}"
-  if [ -d "${f}" ] ; then
-    PATH="${f}:${PATH}"
-  fi
-  unset f
-done
-
-if [ -d "${install_prefix}/Externals/lib/R" ] ; then
-  export R_HOME="${install_prefix}/Externals/lib/R"
-fi
-
-export GDAL_DATA="${install_prefix}/Externals/share/gdal"
-export OPAL_PREFIX="${install_prefix}/Externals"
-export LIBOVERLAY_SCROLLBAR=0
-
-export PATH
-export LD_LIBRARY_PATH
-export PYTHONPATH
-
-export UVCDAT_SETUP_PATH="${install_prefix}"
-cleanup
-echo "Successfully updated your environment to use UVCDAT" 1>&2
-echo "(changes are valid for this session/terminal only)" 1>&2
-echo "Version: ${UVCDAT_PROMPT_STRING}" 1>&2
-echo "Location: ${UVCDAT_SETUP_PATH}" 1>&2
-echo "Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh" 1>&2
-return 0
diff --git a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in b/CMake/cdat_modules_extra/setuptools_install_step.cmake.in
deleted file mode 100644
index 0e5f477c5..000000000
--- a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @setuptools_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in b/CMake/cdat_modules_extra/setuptools_make_step.cmake.in
deleted file mode 100644
index 7ddaec6a0..000000000
--- a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py build
-  WORKING_DIRECTORY @setuptools_source@
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/cdat_modules_extra/site.cfg.in b/CMake/cdat_modules_extra/site.cfg.in
deleted file mode 100644
index 1a250deb7..000000000
--- a/CMake/cdat_modules_extra/site.cfg.in
+++ /dev/null
@@ -1,4 +0,0 @@
-[DEFAULT]
-library_dirs = @EXTERNALS@/lib
-include_dirs = @EXTERNALS@/include
-
diff --git a/CMake/cdat_modules_extra/udunits2_apple_configure.in b/CMake/cdat_modules_extra/udunits2_apple_configure.in
deleted file mode 100755
index 5bb7d2828..000000000
--- a/CMake/cdat_modules_extra/udunits2_apple_configure.in
+++ /dev/null
@@ -1,18006 +0,0 @@
-#! /bin/sh
-# Guess values for system-dependent variables and create Makefiles.
-# Generated by GNU Autoconf 2.68 for UDUNITS 2.2.17.
-#
-# Report bugs to <support-udunits@unidata.ucar.edu>.
-#
-#
-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001,
-# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software
-# Foundation, Inc.
-#
-#
-# This configure script is free software; the Free Software Foundation
-# gives unlimited permission to copy, distribute and modify it.
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '${1+"$@"}'='"$@"'
-  setopt NO_GLOB_SUBST
-else
-  case `(set -o) 2>/dev/null` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
-    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='print -r --'
-  as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='printf %s\n'
-  as_echo_n='printf %s'
-else
-  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
-    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
-    as_echo_n='/usr/ucb/echo -n'
-  else
-    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
-    as_echo_n_body='eval
-      arg=$1;
-      case $arg in #(
-      *"$as_nl"*)
-	expr "X$arg" : "X\\(.*\\)$as_nl";
-	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
-      esac;
-      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
-    '
-    export as_echo_n_body
-    as_echo_n='sh -c $as_echo_n_body as_echo'
-  fi
-  export as_echo_body
-  as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
-  PATH_SEPARATOR=:
-  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
-    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
-      PATH_SEPARATOR=';'
-  }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order.  Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" ""	$as_nl"
-
-# Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
-  *[\\/]* ) as_myself=$0 ;;
-  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
-  done
-IFS=$as_save_IFS
-
-     ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
-  as_myself=$0
-fi
-if test ! -f "$as_myself"; then
-  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
-  exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there.  '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
-  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-if test "x$CONFIG_SHELL" = x; then
-  as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '\${1+\"\$@\"}'='\"\$@\"'
-  setopt NO_GLOB_SUBST
-else
-  case \`(set -o) 2>/dev/null\` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-"
-  as_required="as_fn_return () { (exit \$1); }
-as_fn_success () { as_fn_return 0; }
-as_fn_failure () { as_fn_return 1; }
-as_fn_ret_success () { return 0; }
-as_fn_ret_failure () { return 1; }
-
-exitcode=0
-as_fn_success || { exitcode=1; echo as_fn_success failed.; }
-as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; }
-as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; }
-as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; }
-if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then :
-
-else
-  exitcode=1; echo positional parameters were not saved.
-fi
-test x\$exitcode = x0 || exit 1"
-  as_suggested="  as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO
-  as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO
-  eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" &&
-  test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1
-test \$(( 1 + 1 )) = 2 || exit 1
-
-  test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || (
-    ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
-    ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO
-    PATH=/empty FPATH=/empty; export PATH FPATH
-    test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\
-      || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1"
-  if (eval "$as_required") 2>/dev/null; then :
-  as_have_required=yes
-else
-  as_have_required=no
-fi
-  if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then :
-
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-as_found=false
-for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-  as_found=:
-  case $as_dir in #(
-	 /*)
-	   for as_base in sh bash ksh sh5; do
-	     # Try only shells that exist, to save several forks.
-	     as_shell=$as_dir/$as_base
-	     if { test -f "$as_shell" || test -f "$as_shell.exe"; } &&
-		    { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then :
-  CONFIG_SHELL=$as_shell as_have_required=yes
-		   if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then :
-  break 2
-fi
-fi
-	   done;;
-       esac
-  as_found=false
-done
-$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } &&
-	      { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then :
-  CONFIG_SHELL=$SHELL as_have_required=yes
-fi; }
-IFS=$as_save_IFS
-
-
-      if test "x$CONFIG_SHELL" != x; then :
-  # We cannot yet assume a decent shell, so we have to provide a
-	# neutralization value for shells without unset; and this also
-	# works around shells that cannot unset nonexistent variables.
-	# Preserve -v and -x to the replacement shell.
-	BASH_ENV=/dev/null
-	ENV=/dev/null
-	(unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV
-	export CONFIG_SHELL
-	case $- in # ((((
-	  *v*x* | *x*v* ) as_opts=-vx ;;
-	  *v* ) as_opts=-v ;;
-	  *x* ) as_opts=-x ;;
-	  * ) as_opts= ;;
-	esac
-	exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"}
-fi
-
-    if test x$as_have_required = xno; then :
-  $as_echo "$0: This script requires a shell more modern than all"
-  $as_echo "$0: the shells that I found on your system."
-  if test x${ZSH_VERSION+set} = xset ; then
-    $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should"
-    $as_echo "$0: be upgraded to zsh 4.3.4 or later."
-  else
-    $as_echo "$0: Please tell bug-autoconf@gnu.org and
-$0: support-udunits@unidata.ucar.edu about your system,
-$0: including any error possibly output before this
-$0: message. Then install a modern shell, or manually run
-$0: the script under such a shell if you do have one."
-  fi
-  exit 1
-fi
-fi
-fi
-SHELL=${CONFIG_SHELL-/bin/sh}
-export SHELL
-# Unset more variables known to interfere with behavior of common tools.
-CLICOLOR_FORCE= GREP_OPTIONS=
-unset CLICOLOR_FORCE GREP_OPTIONS
-
-## --------------------- ##
-## M4sh Shell Functions. ##
-## --------------------- ##
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
-  { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
-  return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
-  set +e
-  as_fn_set_status $1
-  exit $1
-} # as_fn_exit
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
-  case $as_dir in #(
-  -*) as_dir=./$as_dir;;
-  esac
-  test -d "$as_dir" || eval $as_mkdir_p || {
-    as_dirs=
-    while :; do
-      case $as_dir in #(
-      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
-      *) as_qdir=$as_dir;;
-      esac
-      as_dirs="'$as_qdir' $as_dirs"
-      as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_dir" : 'X\(//\)[^/]' \| \
-	 X"$as_dir" : 'X\(//\)$' \| \
-	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      test -d "$as_dir" && break
-    done
-    test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
-  eval 'as_fn_append ()
-  {
-    eval $1+=\$2
-  }'
-else
-  as_fn_append ()
-  {
-    eval $1=\$$1\$2
-  }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
-  eval 'as_fn_arith ()
-  {
-    as_val=$(( $* ))
-  }'
-else
-  as_fn_arith ()
-  {
-    as_val=`expr "$@" || test $? -eq 1`
-  }
-fi # as_fn_arith
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
-  fi
-  $as_echo "$as_me: error: $2" >&2
-  as_fn_exit $as_status
-} # as_fn_error
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
-   test "X`expr 00001 : '.*\(...\)'`" = X001; then
-  as_expr=expr
-else
-  as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
-  as_basename=basename
-else
-  as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
-  as_dirname=dirname
-else
-  as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
-	 X"$0" : 'X\(//\)$' \| \
-	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
-    sed '/^.*\/\([^/][^/]*\)\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-
-  as_lineno_1=$LINENO as_lineno_1a=$LINENO
-  as_lineno_2=$LINENO as_lineno_2a=$LINENO
-  eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" &&
-  test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || {
-  # Blame Lee E. McMahon (1931-1989) for sed's syntax.  :-)
-  sed -n '
-    p
-    /[$]LINENO/=
-  ' <$as_myself |
-    sed '
-      s/[$]LINENO.*/&-/
-      t lineno
-      b
-      :lineno
-      N
-      :loop
-      s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/
-      t loop
-      s/-\n.*//
-    ' >$as_me.lineno &&
-  chmod +x "$as_me.lineno" ||
-    { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; }
-
-  # Don't try to exec as it changes $[0], causing all sort of problems
-  # (the dirname of $[0] is not the place where we might find the
-  # original and so on.  Autoconf is especially sensitive to this).
-  . "./$as_me.lineno"
-  # Exit status is that of the last command.
-  exit
-}
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
-  case `echo 'xy\c'` in
-  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
-  xy)  ECHO_C='\c';;
-  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
-       ECHO_T='	';;
-  esac;;
-*)
-  ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
-  rm -f conf$$.dir/conf$$.file
-else
-  rm -f conf$$.dir
-  mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
-  if ln -s conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s='ln -s'
-    # ... but there are two gotchas:
-    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
-    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
-    # In both cases, we have to default to `cp -p'.
-    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
-      as_ln_s='cp -p'
-  elif ln conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s=ln
-  else
-    as_ln_s='cp -p'
-  fi
-else
-  as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-if mkdir -p . 2>/dev/null; then
-  as_mkdir_p='mkdir -p "$as_dir"'
-else
-  test -d ./-p && rmdir ./-p
-  as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
-  as_test_x='test -x'
-else
-  if ls -dL / >/dev/null 2>&1; then
-    as_ls_L_option=L
-  else
-    as_ls_L_option=
-  fi
-  as_test_x='
-    eval sh -c '\''
-      if test -d "$1"; then
-	test -d "$1/.";
-      else
-	case $1 in #(
-	-*)set "./$1";;
-	esac;
-	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
-	???[sx]*):;;*)false;;esac;fi
-    '\'' sh
-  '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-SHELL=${CONFIG_SHELL-/bin/sh}
-
-
-test -n "$DJDIR" || exec 7<&0 </dev/null
-exec 6>&1
-
-# Name of the host.
-# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status,
-# so uname gets run too.
-ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q`
-
-#
-# Initializations.
-#
-ac_default_prefix=/usr/local
-ac_clean_files=
-ac_config_libobj_dir=.
-LIBOBJS=
-cross_compiling=no
-subdirs=
-MFLAGS=
-MAKEFLAGS=
-
-# Identity of this package.
-PACKAGE_NAME='UDUNITS'
-PACKAGE_TARNAME='udunits'
-PACKAGE_VERSION='2.2.17'
-PACKAGE_STRING='UDUNITS 2.2.17'
-PACKAGE_BUGREPORT='support-udunits@unidata.ucar.edu'
-PACKAGE_URL=''
-
-ac_unique_file="lib/converter.c"
-# Factoring default headers for most tests.
-ac_includes_default="\
-#include <stdio.h>
-#ifdef HAVE_SYS_TYPES_H
-# include <sys/types.h>
-#endif
-#ifdef HAVE_SYS_STAT_H
-# include <sys/stat.h>
-#endif
-#ifdef STDC_HEADERS
-# include <stdlib.h>
-# include <stddef.h>
-#else
-# ifdef HAVE_STDLIB_H
-#  include <stdlib.h>
-# endif
-#endif
-#ifdef HAVE_STRING_H
-# if !defined STDC_HEADERS && defined HAVE_MEMORY_H
-#  include <memory.h>
-# endif
-# include <string.h>
-#endif
-#ifdef HAVE_STRINGS_H
-# include <strings.h>
-#endif
-#ifdef HAVE_INTTYPES_H
-# include <inttypes.h>
-#endif
-#ifdef HAVE_STDINT_H
-# include <stdint.h>
-#endif
-#ifdef HAVE_UNISTD_H
-# include <unistd.h>
-#endif"
-
-ac_subst_vars='am__EXEEXT_FALSE
-am__EXEEXT_TRUE
-LTLIBOBJS
-LIBOBJS
-OTOOL64
-OTOOL
-LIPO
-NMEDIT
-DSYMUTIL
-MANIFEST_TOOL
-RANLIB
-ac_ct_AR
-AR
-DLLTOOL
-OBJDUMP
-NM
-ac_ct_DUMPBIN
-DUMPBIN
-LD
-FGREP
-SED
-host_os
-host_vendor
-host_cpu
-host
-build_os
-build_vendor
-build_cpu
-build
-LIBTOOL
-HAVE_CUNIT_FALSE
-HAVE_CUNIT_TRUE
-LD_CUNIT
-EGREP
-GREP
-ac_ct_FC
-FCFLAGS
-FC
-LEXLIB
-LEX_OUTPUT_ROOT
-LEX
-YFLAGS
-YACC
-LN_S
-CPP
-am__fastdepCC_FALSE
-am__fastdepCC_TRUE
-CCDEPMODE
-AMDEPBACKSLASH
-AMDEP_FALSE
-AMDEP_TRUE
-am__quote
-am__include
-DEPDIR
-OBJEXT
-EXEEXT
-ac_ct_CC
-CPPFLAGS
-LDFLAGS
-CFLAGS
-CC
-ENABLE_UDUNITS_1_FALSE
-ENABLE_UDUNITS_1_TRUE
-DEBUG_FALSE
-DEBUG_TRUE
-LIBS_COVERAGE
-CFLAGS_COVERAGE
-am__untar
-am__tar
-AMTAR
-am__leading_dot
-SET_MAKE
-AWK
-mkdir_p
-MKDIR_P
-INSTALL_STRIP_PROGRAM
-STRIP
-install_sh
-MAKEINFO
-AUTOHEADER
-AUTOMAKE
-AUTOCONF
-ACLOCAL
-VERSION
-PACKAGE
-CYGPATH_W
-am__isrc
-INSTALL_DATA
-INSTALL_SCRIPT
-INSTALL_PROGRAM
-target_alias
-host_alias
-build_alias
-LIBS
-ECHO_T
-ECHO_N
-ECHO_C
-DEFS
-mandir
-localedir
-libdir
-psdir
-pdfdir
-dvidir
-htmldir
-infodir
-docdir
-oldincludedir
-includedir
-localstatedir
-sharedstatedir
-sysconfdir
-datadir
-datarootdir
-libexecdir
-sbindir
-bindir
-program_transform_name
-prefix
-exec_prefix
-PACKAGE_URL
-PACKAGE_BUGREPORT
-PACKAGE_STRING
-PACKAGE_VERSION
-PACKAGE_TARNAME
-PACKAGE_NAME
-PATH_SEPARATOR
-SHELL'
-ac_subst_files=''
-ac_user_opts='
-enable_option_checking
-enable_coverage
-enable_debug
-enable_udunits_1
-enable_dependency_tracking
-enable_shared
-enable_static
-with_pic
-enable_fast_install
-with_gnu_ld
-with_sysroot
-enable_libtool_lock
-'
-      ac_precious_vars='build_alias
-host_alias
-target_alias
-CC
-CFLAGS
-LDFLAGS
-LIBS
-CPPFLAGS
-CPP
-YACC
-YFLAGS
-FC
-FCFLAGS'
-
-
-# Initialize some variables set by options.
-ac_init_help=
-ac_init_version=false
-ac_unrecognized_opts=
-ac_unrecognized_sep=
-# The variables have the same names as the options, with
-# dashes changed to underlines.
-cache_file=/dev/null
-exec_prefix=NONE
-no_create=
-no_recursion=
-prefix=NONE
-program_prefix=NONE
-program_suffix=NONE
-program_transform_name=s,x,x,
-silent=
-site=
-srcdir=
-verbose=
-x_includes=NONE
-x_libraries=NONE
-
-# Installation directory options.
-# These are left unexpanded so users can "make install exec_prefix=/foo"
-# and all the variables that are supposed to be based on exec_prefix
-# by default will actually change.
-# Use braces instead of parens because sh, perl, etc. also accept them.
-# (The list follows the same order as the GNU Coding Standards.)
-bindir='${exec_prefix}/bin'
-sbindir='${exec_prefix}/sbin'
-libexecdir='${exec_prefix}/libexec'
-datarootdir='${prefix}/share'
-datadir='${datarootdir}'
-sysconfdir='${prefix}/etc'
-sharedstatedir='${prefix}/com'
-localstatedir='${prefix}/var'
-includedir='${prefix}/include'
-oldincludedir='/usr/include'
-docdir='${datarootdir}/doc/${PACKAGE_TARNAME}'
-infodir='${datarootdir}/info'
-htmldir='${docdir}'
-dvidir='${docdir}'
-pdfdir='${docdir}'
-psdir='${docdir}'
-libdir='${exec_prefix}/lib'
-localedir='${datarootdir}/locale'
-mandir='${datarootdir}/man'
-
-ac_prev=
-ac_dashdash=
-for ac_option
-do
-  # If the previous option needs an argument, assign it.
-  if test -n "$ac_prev"; then
-    eval $ac_prev=\$ac_option
-    ac_prev=
-    continue
-  fi
-
-  case $ac_option in
-  *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;;
-  *=)   ac_optarg= ;;
-  *)    ac_optarg=yes ;;
-  esac
-
-  # Accept the important Cygnus configure options, so we can diagnose typos.
-
-  case $ac_dashdash$ac_option in
-  --)
-    ac_dashdash=yes ;;
-
-  -bindir | --bindir | --bindi | --bind | --bin | --bi)
-    ac_prev=bindir ;;
-  -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*)
-    bindir=$ac_optarg ;;
-
-  -build | --build | --buil | --bui | --bu)
-    ac_prev=build_alias ;;
-  -build=* | --build=* | --buil=* | --bui=* | --bu=*)
-    build_alias=$ac_optarg ;;
-
-  -cache-file | --cache-file | --cache-fil | --cache-fi \
-  | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c)
-    ac_prev=cache_file ;;
-  -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \
-  | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*)
-    cache_file=$ac_optarg ;;
-
-  --config-cache | -C)
-    cache_file=config.cache ;;
-
-  -datadir | --datadir | --datadi | --datad)
-    ac_prev=datadir ;;
-  -datadir=* | --datadir=* | --datadi=* | --datad=*)
-    datadir=$ac_optarg ;;
-
-  -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \
-  | --dataroo | --dataro | --datar)
-    ac_prev=datarootdir ;;
-  -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \
-  | --dataroot=* | --dataroo=* | --dataro=* | --datar=*)
-    datarootdir=$ac_optarg ;;
-
-  -disable-* | --disable-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"enable_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval enable_$ac_useropt=no ;;
-
-  -docdir | --docdir | --docdi | --doc | --do)
-    ac_prev=docdir ;;
-  -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*)
-    docdir=$ac_optarg ;;
-
-  -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv)
-    ac_prev=dvidir ;;
-  -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*)
-    dvidir=$ac_optarg ;;
-
-  -enable-* | --enable-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid feature name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"enable_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval enable_$ac_useropt=\$ac_optarg ;;
-
-  -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \
-  | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \
-  | --exec | --exe | --ex)
-    ac_prev=exec_prefix ;;
-  -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \
-  | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \
-  | --exec=* | --exe=* | --ex=*)
-    exec_prefix=$ac_optarg ;;
-
-  -gas | --gas | --ga | --g)
-    # Obsolete; use --with-gas.
-    with_gas=yes ;;
-
-  -help | --help | --hel | --he | -h)
-    ac_init_help=long ;;
-  -help=r* | --help=r* | --hel=r* | --he=r* | -hr*)
-    ac_init_help=recursive ;;
-  -help=s* | --help=s* | --hel=s* | --he=s* | -hs*)
-    ac_init_help=short ;;
-
-  -host | --host | --hos | --ho)
-    ac_prev=host_alias ;;
-  -host=* | --host=* | --hos=* | --ho=*)
-    host_alias=$ac_optarg ;;
-
-  -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht)
-    ac_prev=htmldir ;;
-  -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \
-  | --ht=*)
-    htmldir=$ac_optarg ;;
-
-  -includedir | --includedir | --includedi | --included | --include \
-  | --includ | --inclu | --incl | --inc)
-    ac_prev=includedir ;;
-  -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \
-  | --includ=* | --inclu=* | --incl=* | --inc=*)
-    includedir=$ac_optarg ;;
-
-  -infodir | --infodir | --infodi | --infod | --info | --inf)
-    ac_prev=infodir ;;
-  -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*)
-    infodir=$ac_optarg ;;
-
-  -libdir | --libdir | --libdi | --libd)
-    ac_prev=libdir ;;
-  -libdir=* | --libdir=* | --libdi=* | --libd=*)
-    libdir=$ac_optarg ;;
-
-  -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \
-  | --libexe | --libex | --libe)
-    ac_prev=libexecdir ;;
-  -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \
-  | --libexe=* | --libex=* | --libe=*)
-    libexecdir=$ac_optarg ;;
-
-  -localedir | --localedir | --localedi | --localed | --locale)
-    ac_prev=localedir ;;
-  -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*)
-    localedir=$ac_optarg ;;
-
-  -localstatedir | --localstatedir | --localstatedi | --localstated \
-  | --localstate | --localstat | --localsta | --localst | --locals)
-    ac_prev=localstatedir ;;
-  -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \
-  | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*)
-    localstatedir=$ac_optarg ;;
-
-  -mandir | --mandir | --mandi | --mand | --man | --ma | --m)
-    ac_prev=mandir ;;
-  -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*)
-    mandir=$ac_optarg ;;
-
-  -nfp | --nfp | --nf)
-    # Obsolete; use --without-fp.
-    with_fp=no ;;
-
-  -no-create | --no-create | --no-creat | --no-crea | --no-cre \
-  | --no-cr | --no-c | -n)
-    no_create=yes ;;
-
-  -no-recursion | --no-recursion | --no-recursio | --no-recursi \
-  | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r)
-    no_recursion=yes ;;
-
-  -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \
-  | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \
-  | --oldin | --oldi | --old | --ol | --o)
-    ac_prev=oldincludedir ;;
-  -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \
-  | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \
-  | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*)
-    oldincludedir=$ac_optarg ;;
-
-  -prefix | --prefix | --prefi | --pref | --pre | --pr | --p)
-    ac_prev=prefix ;;
-  -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*)
-    prefix=$ac_optarg ;;
-
-  -program-prefix | --program-prefix | --program-prefi | --program-pref \
-  | --program-pre | --program-pr | --program-p)
-    ac_prev=program_prefix ;;
-  -program-prefix=* | --program-prefix=* | --program-prefi=* \
-  | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*)
-    program_prefix=$ac_optarg ;;
-
-  -program-suffix | --program-suffix | --program-suffi | --program-suff \
-  | --program-suf | --program-su | --program-s)
-    ac_prev=program_suffix ;;
-  -program-suffix=* | --program-suffix=* | --program-suffi=* \
-  | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*)
-    program_suffix=$ac_optarg ;;
-
-  -program-transform-name | --program-transform-name \
-  | --program-transform-nam | --program-transform-na \
-  | --program-transform-n | --program-transform- \
-  | --program-transform | --program-transfor \
-  | --program-transfo | --program-transf \
-  | --program-trans | --program-tran \
-  | --progr-tra | --program-tr | --program-t)
-    ac_prev=program_transform_name ;;
-  -program-transform-name=* | --program-transform-name=* \
-  | --program-transform-nam=* | --program-transform-na=* \
-  | --program-transform-n=* | --program-transform-=* \
-  | --program-transform=* | --program-transfor=* \
-  | --program-transfo=* | --program-transf=* \
-  | --program-trans=* | --program-tran=* \
-  | --progr-tra=* | --program-tr=* | --program-t=*)
-    program_transform_name=$ac_optarg ;;
-
-  -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd)
-    ac_prev=pdfdir ;;
-  -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*)
-    pdfdir=$ac_optarg ;;
-
-  -psdir | --psdir | --psdi | --psd | --ps)
-    ac_prev=psdir ;;
-  -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*)
-    psdir=$ac_optarg ;;
-
-  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-  | -silent | --silent | --silen | --sile | --sil)
-    silent=yes ;;
-
-  -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb)
-    ac_prev=sbindir ;;
-  -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \
-  | --sbi=* | --sb=*)
-    sbindir=$ac_optarg ;;
-
-  -sharedstatedir | --sharedstatedir | --sharedstatedi \
-  | --sharedstated | --sharedstate | --sharedstat | --sharedsta \
-  | --sharedst | --shareds | --shared | --share | --shar \
-  | --sha | --sh)
-    ac_prev=sharedstatedir ;;
-  -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \
-  | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \
-  | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \
-  | --sha=* | --sh=*)
-    sharedstatedir=$ac_optarg ;;
-
-  -site | --site | --sit)
-    ac_prev=site ;;
-  -site=* | --site=* | --sit=*)
-    site=$ac_optarg ;;
-
-  -srcdir | --srcdir | --srcdi | --srcd | --src | --sr)
-    ac_prev=srcdir ;;
-  -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*)
-    srcdir=$ac_optarg ;;
-
-  -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \
-  | --syscon | --sysco | --sysc | --sys | --sy)
-    ac_prev=sysconfdir ;;
-  -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \
-  | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*)
-    sysconfdir=$ac_optarg ;;
-
-  -target | --target | --targe | --targ | --tar | --ta | --t)
-    ac_prev=target_alias ;;
-  -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*)
-    target_alias=$ac_optarg ;;
-
-  -v | -verbose | --verbose | --verbos | --verbo | --verb)
-    verbose=yes ;;
-
-  -version | --version | --versio | --versi | --vers | -V)
-    ac_init_version=: ;;
-
-  -with-* | --with-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"with_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval with_$ac_useropt=\$ac_optarg ;;
-
-  -without-* | --without-*)
-    ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'`
-    # Reject names that are not valid shell variable names.
-    expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null &&
-      as_fn_error $? "invalid package name: $ac_useropt"
-    ac_useropt_orig=$ac_useropt
-    ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'`
-    case $ac_user_opts in
-      *"
-"with_$ac_useropt"
-"*) ;;
-      *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig"
-	 ac_unrecognized_sep=', ';;
-    esac
-    eval with_$ac_useropt=no ;;
-
-  --x)
-    # Obsolete; use --with-x.
-    with_x=yes ;;
-
-  -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \
-  | --x-incl | --x-inc | --x-in | --x-i)
-    ac_prev=x_includes ;;
-  -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \
-  | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*)
-    x_includes=$ac_optarg ;;
-
-  -x-libraries | --x-libraries | --x-librarie | --x-librari \
-  | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l)
-    ac_prev=x_libraries ;;
-  -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \
-  | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*)
-    x_libraries=$ac_optarg ;;
-
-  -*) as_fn_error $? "unrecognized option: \`$ac_option'
-Try \`$0 --help' for more information"
-    ;;
-
-  *=*)
-    ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='`
-    # Reject names that are not valid shell variable names.
-    case $ac_envvar in #(
-      '' | [0-9]* | *[!_$as_cr_alnum]* )
-      as_fn_error $? "invalid variable name: \`$ac_envvar'" ;;
-    esac
-    eval $ac_envvar=\$ac_optarg
-    export $ac_envvar ;;
-
-  *)
-    # FIXME: should be removed in autoconf 3.0.
-    $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2
-    expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null &&
-      $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2
-    : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}"
-    ;;
-
-  esac
-done
-
-if test -n "$ac_prev"; then
-  ac_option=--`echo $ac_prev | sed 's/_/-/g'`
-  as_fn_error $? "missing argument to $ac_option"
-fi
-
-if test -n "$ac_unrecognized_opts"; then
-  case $enable_option_checking in
-    no) ;;
-    fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;;
-    *)     $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;;
-  esac
-fi
-
-# Check all directory arguments for consistency.
-for ac_var in	exec_prefix prefix bindir sbindir libexecdir datarootdir \
-		datadir sysconfdir sharedstatedir localstatedir includedir \
-		oldincludedir docdir infodir htmldir dvidir pdfdir psdir \
-		libdir localedir mandir
-do
-  eval ac_val=\$$ac_var
-  # Remove trailing slashes.
-  case $ac_val in
-    */ )
-      ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'`
-      eval $ac_var=\$ac_val;;
-  esac
-  # Be sure to have absolute directory names.
-  case $ac_val in
-    [\\/$]* | ?:[\\/]* )  continue;;
-    NONE | '' ) case $ac_var in *prefix ) continue;; esac;;
-  esac
-  as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val"
-done
-
-# There might be people who depend on the old broken behavior: `$host'
-# used to hold the argument of --host etc.
-# FIXME: To remove some day.
-build=$build_alias
-host=$host_alias
-target=$target_alias
-
-# FIXME: To remove some day.
-if test "x$host_alias" != x; then
-  if test "x$build_alias" = x; then
-    cross_compiling=maybe
-    $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host.
-    If a cross compiler is detected then cross compile mode will be used" >&2
-  elif test "x$build_alias" != "x$host_alias"; then
-    cross_compiling=yes
-  fi
-fi
-
-ac_tool_prefix=
-test -n "$host_alias" && ac_tool_prefix=$host_alias-
-
-test "$silent" = yes && exec 6>/dev/null
-
-
-ac_pwd=`pwd` && test -n "$ac_pwd" &&
-ac_ls_di=`ls -di .` &&
-ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` ||
-  as_fn_error $? "working directory cannot be determined"
-test "X$ac_ls_di" = "X$ac_pwd_ls_di" ||
-  as_fn_error $? "pwd does not report name of working directory"
-
-
-# Find the source files, if location was not specified.
-if test -z "$srcdir"; then
-  ac_srcdir_defaulted=yes
-  # Try the directory containing this script, then the parent directory.
-  ac_confdir=`$as_dirname -- "$as_myself" ||
-$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_myself" : 'X\(//\)[^/]' \| \
-	 X"$as_myself" : 'X\(//\)$' \| \
-	 X"$as_myself" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_myself" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-  srcdir=$ac_confdir
-  if test ! -r "$srcdir/$ac_unique_file"; then
-    srcdir=..
-  fi
-else
-  ac_srcdir_defaulted=no
-fi
-if test ! -r "$srcdir/$ac_unique_file"; then
-  test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .."
-  as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir"
-fi
-ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work"
-ac_abs_confdir=`(
-	cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg"
-	pwd)`
-# When building in place, set srcdir=.
-if test "$ac_abs_confdir" = "$ac_pwd"; then
-  srcdir=.
-fi
-# Remove unnecessary trailing slashes from srcdir.
-# Double slashes in file names in object file debugging info
-# mess up M-x gdb in Emacs.
-case $srcdir in
-*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;;
-esac
-for ac_var in $ac_precious_vars; do
-  eval ac_env_${ac_var}_set=\${${ac_var}+set}
-  eval ac_env_${ac_var}_value=\$${ac_var}
-  eval ac_cv_env_${ac_var}_set=\${${ac_var}+set}
-  eval ac_cv_env_${ac_var}_value=\$${ac_var}
-done
-
-#
-# Report the --help message.
-#
-if test "$ac_init_help" = "long"; then
-  # Omit some internal or obsolete options to make the list less imposing.
-  # This message is too long to be a string in the A/UX 3.1 sh.
-  cat <<_ACEOF
-\`configure' configures UDUNITS 2.2.17 to adapt to many kinds of systems.
-
-Usage: $0 [OPTION]... [VAR=VALUE]...
-
-To assign environment variables (e.g., CC, CFLAGS...), specify them as
-VAR=VALUE.  See below for descriptions of some of the useful variables.
-
-Defaults for the options are specified in brackets.
-
-Configuration:
-  -h, --help              display this help and exit
-      --help=short        display options specific to this package
-      --help=recursive    display the short help of all the included packages
-  -V, --version           display version information and exit
-  -q, --quiet, --silent   do not print \`checking ...' messages
-      --cache-file=FILE   cache test results in FILE [disabled]
-  -C, --config-cache      alias for \`--cache-file=config.cache'
-  -n, --no-create         do not create output files
-      --srcdir=DIR        find the sources in DIR [configure dir or \`..']
-
-Installation directories:
-  --prefix=PREFIX         install architecture-independent files in PREFIX
-                          [$ac_default_prefix]
-  --exec-prefix=EPREFIX   install architecture-dependent files in EPREFIX
-                          [PREFIX]
-
-By default, \`make install' will install all the files in
-\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc.  You can specify
-an installation prefix other than \`$ac_default_prefix' using \`--prefix',
-for instance \`--prefix=\$HOME'.
-
-For better control, use the options below.
-
-Fine tuning of the installation directories:
-  --bindir=DIR            user executables [EPREFIX/bin]
-  --sbindir=DIR           system admin executables [EPREFIX/sbin]
-  --libexecdir=DIR        program executables [EPREFIX/libexec]
-  --sysconfdir=DIR        read-only single-machine data [PREFIX/etc]
-  --sharedstatedir=DIR    modifiable architecture-independent data [PREFIX/com]
-  --localstatedir=DIR     modifiable single-machine data [PREFIX/var]
-  --libdir=DIR            object code libraries [EPREFIX/lib]
-  --includedir=DIR        C header files [PREFIX/include]
-  --oldincludedir=DIR     C header files for non-gcc [/usr/include]
-  --datarootdir=DIR       read-only arch.-independent data root [PREFIX/share]
-  --datadir=DIR           read-only architecture-independent data [DATAROOTDIR]
-  --infodir=DIR           info documentation [DATAROOTDIR/info]
-  --localedir=DIR         locale-dependent data [DATAROOTDIR/locale]
-  --mandir=DIR            man documentation [DATAROOTDIR/man]
-  --docdir=DIR            documentation root [DATAROOTDIR/doc/udunits]
-  --htmldir=DIR           html documentation [DOCDIR]
-  --dvidir=DIR            dvi documentation [DOCDIR]
-  --pdfdir=DIR            pdf documentation [DOCDIR]
-  --psdir=DIR             ps documentation [DOCDIR]
-_ACEOF
-
-  cat <<\_ACEOF
-
-Program names:
-  --program-prefix=PREFIX            prepend PREFIX to installed program names
-  --program-suffix=SUFFIX            append SUFFIX to installed program names
-  --program-transform-name=PROGRAM   run sed PROGRAM on installed program names
-
-System types:
-  --build=BUILD     configure for building on BUILD [guessed]
-  --host=HOST       cross-compile to build programs to run on HOST [BUILD]
-_ACEOF
-fi
-
-if test -n "$ac_init_help"; then
-  case $ac_init_help in
-     short | recursive ) echo "Configuration of UDUNITS 2.2.17:";;
-   esac
-  cat <<\_ACEOF
-
-Optional Features:
-  --disable-option-checking  ignore unrecognized --enable/--with options
-  --disable-FEATURE       do not include FEATURE (same as --enable-FEATURE=no)
-  --enable-FEATURE[=ARG]  include FEATURE [ARG=yes]
-  --enable-coverage       Turn on code-coverage support
-  --enable-debug          Turn on debugging support
-  --disable-udunits-1     Turn off support for the UDUNITS-1 API
-                          [default=enabled]
-  --disable-dependency-tracking  speeds up one-time build
-  --enable-dependency-tracking   do not reject slow dependency extractors
-  --enable-shared[=PKGS]  build shared libraries [default=yes]
-  --enable-static[=PKGS]  build static libraries [default=yes]
-  --enable-fast-install[=PKGS]
-                          optimize for fast installation [default=yes]
-  --disable-libtool-lock  avoid locking (might break parallel builds)
-
-Optional Packages:
-  --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
-  --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
-  --with-pic[=PKGS]       try to use only PIC/non-PIC objects [default=use
-                          both]
-  --with-gnu-ld           assume the C compiler uses GNU ld [default=no]
-  --with-sysroot=DIR Search for dependent libraries within DIR
-                        (or the compiler's sysroot if not specified).
-
-Some influential environment variables:
-  CC          C compiler command
-  CFLAGS      C compiler flags
-  LDFLAGS     linker flags, e.g. -L<lib dir> if you have libraries in a
-              nonstandard directory <lib dir>
-  LIBS        libraries to pass to the linker, e.g. -l<library>
-  CPPFLAGS    (Objective) C/C++ preprocessor flags, e.g. -I<include dir> if
-              you have headers in a nonstandard directory <include dir>
-  CPP         C preprocessor
-  YACC        The `Yet Another Compiler Compiler' implementation to use.
-              Defaults to the first program found out of: `bison -y', `byacc',
-              `yacc'.
-  YFLAGS      The list of arguments that will be passed by default to $YACC.
-              This script will default YFLAGS to the empty string to avoid a
-              default value of `-d' given by some make applications.
-  FC          Fortran compiler command
-  FCFLAGS     Fortran compiler flags
-
-Use these variables to override the choices made by `configure' or to help
-it to find libraries and programs with nonstandard names/locations.
-
-Report bugs to <support-udunits@unidata.ucar.edu>.
-_ACEOF
-ac_status=$?
-fi
-
-if test "$ac_init_help" = "recursive"; then
-  # If there are subdirs, report their specific --help.
-  for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue
-    test -d "$ac_dir" ||
-      { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } ||
-      continue
-    ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
-  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
-  # A ".." for each directory in $ac_dir_suffix.
-  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
-  case $ac_top_builddir_sub in
-  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
-  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
-  esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
-  .)  # We are building in place.
-    ac_srcdir=.
-    ac_top_srcdir=$ac_top_builddir_sub
-    ac_abs_top_srcdir=$ac_pwd ;;
-  [\\/]* | ?:[\\/]* )  # Absolute name.
-    ac_srcdir=$srcdir$ac_dir_suffix;
-    ac_top_srcdir=$srcdir
-    ac_abs_top_srcdir=$srcdir ;;
-  *) # Relative name.
-    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
-    ac_top_srcdir=$ac_top_build_prefix$srcdir
-    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
-    cd "$ac_dir" || { ac_status=$?; continue; }
-    # Check for guested configure.
-    if test -f "$ac_srcdir/configure.gnu"; then
-      echo &&
-      $SHELL "$ac_srcdir/configure.gnu" --help=recursive
-    elif test -f "$ac_srcdir/configure"; then
-      echo &&
-      $SHELL "$ac_srcdir/configure" --help=recursive
-    else
-      $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
-    fi || ac_status=$?
-    cd "$ac_pwd" || { ac_status=$?; break; }
-  done
-fi
-
-test -n "$ac_init_help" && exit $ac_status
-if $ac_init_version; then
-  cat <<\_ACEOF
-UDUNITS configure 2.2.17
-generated by GNU Autoconf 2.68
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This configure script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it.
-_ACEOF
-  exit
-fi
-
-## ------------------------ ##
-## Autoconf initialization. ##
-## ------------------------ ##
-
-# ac_fn_c_try_compile LINENO
-# --------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext
-  if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest.$ac_objext; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_compile
-
-# ac_fn_c_try_cpp LINENO
-# ----------------------
-# Try to preprocess conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_cpp ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if { { ac_try="$ac_cpp conftest.$ac_ext"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } > conftest.i && {
-	 test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-    ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_cpp
-
-# ac_fn_c_try_link LINENO
-# -----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_c_try_link ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext conftest$ac_exeext
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_c_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest$ac_exeext && {
-	 test "$cross_compiling" = yes ||
-	 $as_test_x conftest$ac_exeext
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
-  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
-  # interfere with the next link command; also delete a directory that is
-  # left behind by Apple's compiler.  We do this before executing the actions.
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_link
-
-# ac_fn_fc_try_compile LINENO
-# ---------------------------
-# Try to compile conftest.$ac_ext, and return whether this succeeded.
-ac_fn_fc_try_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext
-  if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_fc_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest.$ac_objext; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_fc_try_compile
-
-# ac_fn_c_try_run LINENO
-# ----------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes
-# that executables *can* be run.
-ac_fn_c_try_run ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && { ac_try='./conftest$ac_exeext'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: program exited with status $ac_status" >&5
-       $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-       ac_retval=$ac_status
-fi
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_c_try_run
-
-# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES
-# -------------------------------------------------------
-# Tests whether HEADER exists, giving a warning if it cannot be compiled using
-# the include files in INCLUDES and setting the cache variable VAR
-# accordingly.
-ac_fn_c_check_header_mongrel ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  if eval \${$3+:} false; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-else
-  # Is the header compilable?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5
-$as_echo_n "checking $2 usability... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_header_compiler=yes
-else
-  ac_header_compiler=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5
-$as_echo "$ac_header_compiler" >&6; }
-
-# Is the header present?
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5
-$as_echo_n "checking $2 presence... " >&6; }
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <$2>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  ac_header_preproc=yes
-else
-  ac_header_preproc=no
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5
-$as_echo "$ac_header_preproc" >&6; }
-
-# So?  What about this header?
-case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #((
-  yes:no: )
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5
-$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-    ;;
-  no:yes:* )
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5
-$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2:     check for missing prerequisite headers?" >&5
-$as_echo "$as_me: WARNING: $2:     check for missing prerequisite headers?" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5
-$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&5
-$as_echo "$as_me: WARNING: $2:     section \"Present But Cannot Be Compiled\"" >&2;}
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5
-$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;}
-( $as_echo "## ----------------------------------------------- ##
-## Report this to support-udunits@unidata.ucar.edu ##
-## ----------------------------------------------- ##"
-     ) | sed "s/^/$as_me: WARNING:     /" >&2
-    ;;
-esac
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  eval "$3=\$ac_header_compiler"
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-fi
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_header_mongrel
-
-# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES
-# -------------------------------------------------------
-# Tests whether HEADER exists and can be compiled using the include files in
-# INCLUDES, setting the cache variable VAR accordingly.
-ac_fn_c_check_header_compile ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-#include <$2>
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  eval "$3=yes"
-else
-  eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_header_compile
-
-# ac_fn_c_check_type LINENO TYPE VAR INCLUDES
-# -------------------------------------------
-# Tests whether TYPE exists after having included INCLUDES, setting cache
-# variable VAR accordingly.
-ac_fn_c_check_type ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  eval "$3=no"
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-int
-main ()
-{
-if (sizeof ($2))
-	 return 0;
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-$4
-int
-main ()
-{
-if (sizeof (($2)))
-	    return 0;
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-
-else
-  eval "$3=yes"
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_type
-
-# ac_fn_c_check_func LINENO FUNC VAR
-# ----------------------------------
-# Tests whether FUNC exists, setting the cache variable VAR accordingly
-ac_fn_c_check_func ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5
-$as_echo_n "checking for $2... " >&6; }
-if eval \${$3+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-/* Define $2 to an innocuous variant, in case <limits.h> declares $2.
-   For example, HP-UX 11i <limits.h> declares gettimeofday.  */
-#define $2 innocuous_$2
-
-/* System header to define __stub macros and hopefully few prototypes,
-    which can conflict with char $2 (); below.
-    Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-    <limits.h> exists even on freestanding compilers.  */
-
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-
-#undef $2
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char $2 ();
-/* The GNU C library defines this for functions which it implements
-    to always fail with ENOSYS.  Some functions are actually named
-    something starting with __ and the normal name is an alias.  */
-#if defined __stub_$2 || defined __stub___$2
-choke me
-#endif
-
-int
-main ()
-{
-return $2 ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  eval "$3=yes"
-else
-  eval "$3=no"
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-fi
-eval ac_res=\$$3
-	       { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
-$as_echo "$ac_res" >&6; }
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-
-} # ac_fn_c_check_func
-
-# ac_fn_fc_try_link LINENO
-# ------------------------
-# Try to link conftest.$ac_ext, and return whether this succeeded.
-ac_fn_fc_try_link ()
-{
-  as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-  rm -f conftest.$ac_objext conftest$ac_exeext
-  if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    grep -v '^ *+' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-    mv -f conftest.er1 conftest.err
-  fi
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && {
-	 test -z "$ac_fc_werror_flag" ||
-	 test ! -s conftest.err
-       } && test -s conftest$ac_exeext && {
-	 test "$cross_compiling" = yes ||
-	 $as_test_x conftest$ac_exeext
-       }; then :
-  ac_retval=0
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-	ac_retval=1
-fi
-  # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information
-  # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would
-  # interfere with the next link command; also delete a directory that is
-  # left behind by Apple's compiler.  We do this before executing the actions.
-  rm -rf conftest.dSYM conftest_ipa8_conftest.oo
-  eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno
-  as_fn_set_status $ac_retval
-
-} # ac_fn_fc_try_link
-cat >config.log <<_ACEOF
-This file contains any messages produced by compilers while
-running configure, to aid debugging if configure makes a mistake.
-
-It was created by UDUNITS $as_me 2.2.17, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
-
-  $ $0 $@
-
-_ACEOF
-exec 5>>config.log
-{
-cat <<_ASUNAME
-## --------- ##
-## Platform. ##
-## --------- ##
-
-hostname = `(hostname || uname -n) 2>/dev/null | sed 1q`
-uname -m = `(uname -m) 2>/dev/null || echo unknown`
-uname -r = `(uname -r) 2>/dev/null || echo unknown`
-uname -s = `(uname -s) 2>/dev/null || echo unknown`
-uname -v = `(uname -v) 2>/dev/null || echo unknown`
-
-/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown`
-/bin/uname -X     = `(/bin/uname -X) 2>/dev/null     || echo unknown`
-
-/bin/arch              = `(/bin/arch) 2>/dev/null              || echo unknown`
-/usr/bin/arch -k       = `(/usr/bin/arch -k) 2>/dev/null       || echo unknown`
-/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown`
-/usr/bin/hostinfo      = `(/usr/bin/hostinfo) 2>/dev/null      || echo unknown`
-/bin/machine           = `(/bin/machine) 2>/dev/null           || echo unknown`
-/usr/bin/oslevel       = `(/usr/bin/oslevel) 2>/dev/null       || echo unknown`
-/bin/universe          = `(/bin/universe) 2>/dev/null          || echo unknown`
-
-_ASUNAME
-
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    $as_echo "PATH: $as_dir"
-  done
-IFS=$as_save_IFS
-
-} >&5
-
-cat >&5 <<_ACEOF
-
-
-## ----------- ##
-## Core tests. ##
-## ----------- ##
-
-_ACEOF
-
-
-# Keep a trace of the command line.
-# Strip out --no-create and --no-recursion so they do not pile up.
-# Strip out --silent because we don't want to record it for future runs.
-# Also quote any args containing shell meta-characters.
-# Make two passes to allow for proper duplicate-argument suppression.
-ac_configure_args=
-ac_configure_args0=
-ac_configure_args1=
-ac_must_keep_next=false
-for ac_pass in 1 2
-do
-  for ac_arg
-  do
-    case $ac_arg in
-    -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;;
-    -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-    | -silent | --silent | --silen | --sile | --sil)
-      continue ;;
-    *\'*)
-      ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    esac
-    case $ac_pass in
-    1) as_fn_append ac_configure_args0 " '$ac_arg'" ;;
-    2)
-      as_fn_append ac_configure_args1 " '$ac_arg'"
-      if test $ac_must_keep_next = true; then
-	ac_must_keep_next=false # Got value, back to normal.
-      else
-	case $ac_arg in
-	  *=* | --config-cache | -C | -disable-* | --disable-* \
-	  | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \
-	  | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \
-	  | -with-* | --with-* | -without-* | --without-* | --x)
-	    case "$ac_configure_args0 " in
-	      "$ac_configure_args1"*" '$ac_arg' "* ) continue ;;
-	    esac
-	    ;;
-	  -* ) ac_must_keep_next=true ;;
-	esac
-      fi
-      as_fn_append ac_configure_args " '$ac_arg'"
-      ;;
-    esac
-  done
-done
-{ ac_configure_args0=; unset ac_configure_args0;}
-{ ac_configure_args1=; unset ac_configure_args1;}
-
-# When interrupted or exit'd, cleanup temporary files, and complete
-# config.log.  We remove comments because anyway the quotes in there
-# would cause problems or look ugly.
-# WARNING: Use '\'' to represent an apostrophe within the trap.
-# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug.
-trap 'exit_status=$?
-  # Save into config.log some information that might help in debugging.
-  {
-    echo
-
-    $as_echo "## ---------------- ##
-## Cache variables. ##
-## ---------------- ##"
-    echo
-    # The following way of writing the cache mishandles newlines in values,
-(
-  for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do
-    eval ac_val=\$$ac_var
-    case $ac_val in #(
-    *${as_nl}*)
-      case $ac_var in #(
-      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
-      esac
-      case $ac_var in #(
-      _ | IFS | as_nl) ;; #(
-      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
-      *) { eval $ac_var=; unset $ac_var;} ;;
-      esac ;;
-    esac
-  done
-  (set) 2>&1 |
-    case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #(
-    *${as_nl}ac_space=\ *)
-      sed -n \
-	"s/'\''/'\''\\\\'\'''\''/g;
-	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p"
-      ;; #(
-    *)
-      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
-      ;;
-    esac |
-    sort
-)
-    echo
-
-    $as_echo "## ----------------- ##
-## Output variables. ##
-## ----------------- ##"
-    echo
-    for ac_var in $ac_subst_vars
-    do
-      eval ac_val=\$$ac_var
-      case $ac_val in
-      *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
-      esac
-      $as_echo "$ac_var='\''$ac_val'\''"
-    done | sort
-    echo
-
-    if test -n "$ac_subst_files"; then
-      $as_echo "## ------------------- ##
-## File substitutions. ##
-## ------------------- ##"
-      echo
-      for ac_var in $ac_subst_files
-      do
-	eval ac_val=\$$ac_var
-	case $ac_val in
-	*\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;;
-	esac
-	$as_echo "$ac_var='\''$ac_val'\''"
-      done | sort
-      echo
-    fi
-
-    if test -s confdefs.h; then
-      $as_echo "## ----------- ##
-## confdefs.h. ##
-## ----------- ##"
-      echo
-      cat confdefs.h
-      echo
-    fi
-    test "$ac_signal" != 0 &&
-      $as_echo "$as_me: caught signal $ac_signal"
-    $as_echo "$as_me: exit $exit_status"
-  } >&5
-  rm -f core *.core core.conftest.* &&
-    rm -f -r conftest* confdefs* conf$$* $ac_clean_files &&
-    exit $exit_status
-' 0
-for ac_signal in 1 2 13 15; do
-  trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal
-done
-ac_signal=0
-
-# confdefs.h avoids OS command line length limits that DEFS can exceed.
-rm -f -r conftest* confdefs.h
-
-$as_echo "/* confdefs.h */" > confdefs.h
-
-# Predefined preprocessor variables.
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_NAME "$PACKAGE_NAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_TARNAME "$PACKAGE_TARNAME"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_VERSION "$PACKAGE_VERSION"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_STRING "$PACKAGE_STRING"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT"
-_ACEOF
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE_URL "$PACKAGE_URL"
-_ACEOF
-
-
-# Let the site file select an alternate cache file if it wants to.
-# Prefer an explicitly selected file to automatically selected ones.
-ac_site_file1=NONE
-ac_site_file2=NONE
-if test -n "$CONFIG_SITE"; then
-  # We do not want a PATH search for config.site.
-  case $CONFIG_SITE in #((
-    -*)  ac_site_file1=./$CONFIG_SITE;;
-    */*) ac_site_file1=$CONFIG_SITE;;
-    *)   ac_site_file1=./$CONFIG_SITE;;
-  esac
-elif test "x$prefix" != xNONE; then
-  ac_site_file1=$prefix/share/config.site
-  ac_site_file2=$prefix/etc/config.site
-else
-  ac_site_file1=$ac_default_prefix/share/config.site
-  ac_site_file2=$ac_default_prefix/etc/config.site
-fi
-for ac_site_file in "$ac_site_file1" "$ac_site_file2"
-do
-  test "x$ac_site_file" = xNONE && continue
-  if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5
-$as_echo "$as_me: loading site script $ac_site_file" >&6;}
-    sed 's/^/| /' "$ac_site_file" >&5
-    . "$ac_site_file" \
-      || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "failed to load site script $ac_site_file
-See \`config.log' for more details" "$LINENO" 5; }
-  fi
-done
-
-if test -r "$cache_file"; then
-  # Some versions of bash will fail to source /dev/null (special files
-  # actually), so we avoid doing that.  DJGPP emulates it as a regular file.
-  if test /dev/null != "$cache_file" && test -f "$cache_file"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5
-$as_echo "$as_me: loading cache $cache_file" >&6;}
-    case $cache_file in
-      [\\/]* | ?:[\\/]* ) . "$cache_file";;
-      *)                      . "./$cache_file";;
-    esac
-  fi
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5
-$as_echo "$as_me: creating cache $cache_file" >&6;}
-  >$cache_file
-fi
-
-# Check that the precious variables saved in the cache have kept the same
-# value.
-ac_cache_corrupted=false
-for ac_var in $ac_precious_vars; do
-  eval ac_old_set=\$ac_cv_env_${ac_var}_set
-  eval ac_new_set=\$ac_env_${ac_var}_set
-  eval ac_old_val=\$ac_cv_env_${ac_var}_value
-  eval ac_new_val=\$ac_env_${ac_var}_value
-  case $ac_old_set,$ac_new_set in
-    set,)
-      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;}
-      ac_cache_corrupted=: ;;
-    ,set)
-      { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5
-$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;}
-      ac_cache_corrupted=: ;;
-    ,);;
-    *)
-      if test "x$ac_old_val" != "x$ac_new_val"; then
-	# differences in whitespace do not lead to failure.
-	ac_old_val_w=`echo x $ac_old_val`
-	ac_new_val_w=`echo x $ac_new_val`
-	if test "$ac_old_val_w" != "$ac_new_val_w"; then
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5
-$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;}
-	  ac_cache_corrupted=:
-	else
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5
-$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;}
-	  eval $ac_var=\$ac_old_val
-	fi
-	{ $as_echo "$as_me:${as_lineno-$LINENO}:   former value:  \`$ac_old_val'" >&5
-$as_echo "$as_me:   former value:  \`$ac_old_val'" >&2;}
-	{ $as_echo "$as_me:${as_lineno-$LINENO}:   current value: \`$ac_new_val'" >&5
-$as_echo "$as_me:   current value: \`$ac_new_val'" >&2;}
-      fi;;
-  esac
-  # Pass precious variables to config.status.
-  if test "$ac_new_set" = set; then
-    case $ac_new_val in
-    *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;;
-    *) ac_arg=$ac_var=$ac_new_val ;;
-    esac
-    case " $ac_configure_args " in
-      *" '$ac_arg' "*) ;; # Avoid dups.  Use of quotes ensures accuracy.
-      *) as_fn_append ac_configure_args " '$ac_arg'" ;;
-    esac
-  fi
-done
-if $ac_cache_corrupted; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-  { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5
-$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;}
-  as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5
-fi
-## -------------------- ##
-## Main body of script. ##
-## -------------------- ##
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-
-ac_aux_dir=
-for ac_dir in build-aux "$srcdir"/build-aux; do
-  if test -f "$ac_dir/install-sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install-sh -c"
-    break
-  elif test -f "$ac_dir/install.sh"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/install.sh -c"
-    break
-  elif test -f "$ac_dir/shtool"; then
-    ac_aux_dir=$ac_dir
-    ac_install_sh="$ac_aux_dir/shtool install -c"
-    break
-  fi
-done
-if test -z "$ac_aux_dir"; then
-  as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5
-fi
-
-# These three variables are undocumented and unsupported,
-# and are intended to be withdrawn in a future Autoconf release.
-# They can cause serious problems if a builder's source tree is in a directory
-# whose full name contains unusual characters.
-ac_config_guess="$SHELL $ac_aux_dir/config.guess"  # Please don't use this var.
-ac_config_sub="$SHELL $ac_aux_dir/config.sub"  # Please don't use this var.
-ac_configure="$SHELL $ac_aux_dir/configure"  # Please don't use this var.
-
-
-
-am__api_version='1.11'
-
-# Find a good install program.  We prefer a C program (faster),
-# so one script is as good as another.  But avoid the broken or
-# incompatible versions:
-# SysV /etc/install, /usr/sbin/install
-# SunOS /usr/etc/install
-# IRIX /sbin/install
-# AIX /bin/install
-# AmigaOS /C/install, which installs bootblocks on floppy discs
-# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag
-# AFS /usr/afsws/bin/install, which mishandles nonexistent args
-# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff"
-# OS/2's system install, which has a completely different semantic
-# ./install, which can be erroneously created by make from ./install.sh.
-# Reject install programs that cannot install multiple files.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5
-$as_echo_n "checking for a BSD-compatible install... " >&6; }
-if test -z "$INSTALL"; then
-if ${ac_cv_path_install+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    # Account for people who put trailing slashes in PATH elements.
-case $as_dir/ in #((
-  ./ | .// | /[cC]/* | \
-  /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \
-  ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \
-  /usr/ucb/* ) ;;
-  *)
-    # OSF1 and SCO ODT 3.0 have their own names for install.
-    # Don't use installbsd from OSF since it installs stuff as root
-    # by default.
-    for ac_prog in ginstall scoinst install; do
-      for ac_exec_ext in '' $ac_executable_extensions; do
-	if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then
-	  if test $ac_prog = install &&
-	    grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
-	    # AIX install.  It has an incompatible calling convention.
-	    :
-	  elif test $ac_prog = install &&
-	    grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then
-	    # program-specific install script used by HP pwplus--don't use.
-	    :
-	  else
-	    rm -rf conftest.one conftest.two conftest.dir
-	    echo one > conftest.one
-	    echo two > conftest.two
-	    mkdir conftest.dir
-	    if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" &&
-	      test -s conftest.one && test -s conftest.two &&
-	      test -s conftest.dir/conftest.one &&
-	      test -s conftest.dir/conftest.two
-	    then
-	      ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c"
-	      break 3
-	    fi
-	  fi
-	fi
-      done
-    done
-    ;;
-esac
-
-  done
-IFS=$as_save_IFS
-
-rm -rf conftest.one conftest.two conftest.dir
-
-fi
-  if test "${ac_cv_path_install+set}" = set; then
-    INSTALL=$ac_cv_path_install
-  else
-    # As a last resort, use the slow shell script.  Don't cache a
-    # value for INSTALL within a source directory, because that will
-    # break other packages using the cache if that directory is
-    # removed, or if the value is a relative name.
-    INSTALL=$ac_install_sh
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5
-$as_echo "$INSTALL" >&6; }
-
-# Use test -z because SunOS4 sh mishandles braces in ${var-val}.
-# It thinks the first close brace ends the variable substitution.
-test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}'
-
-test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}'
-
-test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644'
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5
-$as_echo_n "checking whether build environment is sane... " >&6; }
-# Just in case
-sleep 1
-echo timestamp > conftest.file
-# Reject unsafe characters in $srcdir or the absolute working directory
-# name.  Accept space and tab only in the latter.
-am_lf='
-'
-case `pwd` in
-  *[\\\"\#\$\&\'\`$am_lf]*)
-    as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;;
-esac
-case $srcdir in
-  *[\\\"\#\$\&\'\`$am_lf\ \	]*)
-    as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;;
-esac
-
-# Do `set' in a subshell so we don't clobber the current shell's
-# arguments.  Must try -L first in case configure is actually a
-# symlink; some systems play weird games with the mod time of symlinks
-# (eg FreeBSD returns the mod time of the symlink's containing
-# directory).
-if (
-   set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null`
-   if test "$*" = "X"; then
-      # -L didn't work.
-      set X `ls -t "$srcdir/configure" conftest.file`
-   fi
-   rm -f conftest.file
-   if test "$*" != "X $srcdir/configure conftest.file" \
-      && test "$*" != "X conftest.file $srcdir/configure"; then
-
-      # If neither matched, then we have a broken ls.  This can happen
-      # if, for instance, CONFIG_SHELL is bash and it inherits a
-      # broken ls alias from the environment.  This has actually
-      # happened.  Such a system could not be considered "sane".
-      as_fn_error $? "ls -t appears to fail.  Make sure there is not a broken
-alias in your environment" "$LINENO" 5
-   fi
-
-   test "$2" = conftest.file
-   )
-then
-   # Ok.
-   :
-else
-   as_fn_error $? "newly created file is older than distributed files!
-Check your system clock" "$LINENO" 5
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-test "$program_prefix" != NONE &&
-  program_transform_name="s&^&$program_prefix&;$program_transform_name"
-# Use a double $ so make ignores it.
-test "$program_suffix" != NONE &&
-  program_transform_name="s&\$&$program_suffix&;$program_transform_name"
-# Double any \ or $.
-# By default was `s,x,x', remove it if useless.
-ac_script='s/[\\$]/&&/g;s/;s,x,x,$//'
-program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"`
-
-# expand $ac_aux_dir to an absolute path
-am_aux_dir=`cd $ac_aux_dir && pwd`
-
-if test x"${MISSING+set}" != xset; then
-  case $am_aux_dir in
-  *\ * | *\	*)
-    MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;;
-  *)
-    MISSING="\${SHELL} $am_aux_dir/missing" ;;
-  esac
-fi
-# Use eval to expand $SHELL
-if eval "$MISSING --run true"; then
-  am_missing_run="$MISSING --run "
-else
-  am_missing_run=
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5
-$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;}
-fi
-
-if test x"${install_sh}" != xset; then
-  case $am_aux_dir in
-  *\ * | *\	*)
-    install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;;
-  *)
-    install_sh="\${SHELL} $am_aux_dir/install-sh"
-  esac
-fi
-
-# Installed binaries are usually stripped using `strip' when the user
-# run `make install-strip'.  However `strip' might not be the right
-# tool to use in cross-compilation environments, therefore Automake
-# will honor the `STRIP' environment variable to overrule this program.
-if test "$cross_compiling" != no; then
-  if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$STRIP"; then
-  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
-  ac_ct_STRIP=$STRIP
-  # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_STRIP"; then
-  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_STRIP="strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_STRIP" = x; then
-    STRIP=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    STRIP=$ac_ct_STRIP
-  fi
-else
-  STRIP="$ac_cv_prog_STRIP"
-fi
-
-fi
-INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s"
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5
-$as_echo_n "checking for a thread-safe mkdir -p... " >&6; }
-if test -z "$MKDIR_P"; then
-  if ${ac_cv_path_mkdir+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in mkdir gmkdir; do
-	 for ac_exec_ext in '' $ac_executable_extensions; do
-	   { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue
-	   case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #(
-	     'mkdir (GNU coreutils) '* | \
-	     'mkdir (coreutils) '* | \
-	     'mkdir (fileutils) '4.1*)
-	       ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext
-	       break 3;;
-	   esac
-	 done
-       done
-  done
-IFS=$as_save_IFS
-
-fi
-
-  test -d ./--version && rmdir ./--version
-  if test "${ac_cv_path_mkdir+set}" = set; then
-    MKDIR_P="$ac_cv_path_mkdir -p"
-  else
-    # As a last resort, use the slow shell script.  Don't cache a
-    # value for MKDIR_P within a source directory, because that will
-    # break other packages using the cache if that directory is
-    # removed, or if the value is a relative name.
-    MKDIR_P="$ac_install_sh -d"
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5
-$as_echo "$MKDIR_P" >&6; }
-
-mkdir_p="$MKDIR_P"
-case $mkdir_p in
-  [\\/$]* | ?:[\\/]*) ;;
-  */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;;
-esac
-
-for ac_prog in gawk mawk nawk awk
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AWK+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$AWK"; then
-  ac_cv_prog_AWK="$AWK" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_AWK="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-AWK=$ac_cv_prog_AWK
-if test -n "$AWK"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5
-$as_echo "$AWK" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$AWK" && break
-done
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
-set x ${MAKE-make}
-ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat >conftest.make <<\_ACEOF
-SHELL = /bin/sh
-all:
-	@echo '@@@%%%=$(MAKE)=@@@%%%'
-_ACEOF
-# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
-case `${MAKE-make} -f conftest.make 2>/dev/null` in
-  *@@@%%%=?*=@@@%%%*)
-    eval ac_cv_prog_make_${ac_make}_set=yes;;
-  *)
-    eval ac_cv_prog_make_${ac_make}_set=no;;
-esac
-rm -f conftest.make
-fi
-if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-  SET_MAKE=
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-  SET_MAKE="MAKE=${MAKE-make}"
-fi
-
-rm -rf .tst 2>/dev/null
-mkdir .tst 2>/dev/null
-if test -d .tst; then
-  am__leading_dot=.
-else
-  am__leading_dot=_
-fi
-rmdir .tst 2>/dev/null
-
-if test "`cd $srcdir && pwd`" != "`pwd`"; then
-  # Use -I$(srcdir) only when $(srcdir) != ., so that make's output
-  # is not polluted with repeated "-I."
-  am__isrc=' -I$(srcdir)'
-  # test to see if srcdir already configured
-  if test -f $srcdir/config.status; then
-    as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5
-  fi
-fi
-
-# test whether we have cygpath
-if test -z "$CYGPATH_W"; then
-  if (cygpath --version) >/dev/null 2>/dev/null; then
-    CYGPATH_W='cygpath -w'
-  else
-    CYGPATH_W=echo
-  fi
-fi
-
-
-# Define the identity of the package.
- PACKAGE='udunits'
- VERSION='2.2.17'
-
-
-cat >>confdefs.h <<_ACEOF
-#define PACKAGE "$PACKAGE"
-_ACEOF
-
-
-cat >>confdefs.h <<_ACEOF
-#define VERSION "$VERSION"
-_ACEOF
-
-# Some tools Automake needs.
-
-ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"}
-
-
-AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"}
-
-
-AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"}
-
-
-AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"}
-
-
-MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"}
-
-# We need awk for the "check" target.  The system "awk" is bad on
-# some platforms.
-# Always define AMTAR for backward compatibility.
-
-AMTAR=${AMTAR-"${am_missing_run}tar"}
-
-am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'
-
-
-
-
-
-ac_config_headers="$ac_config_headers config.h"
-
-
-CFLAGS_COVERAGE=''
-LIBS_COVERAGE=''
-
-# Check whether --enable-coverage was given.
-if test "${enable_coverage+set}" = set; then :
-  enableval=$enable_coverage; case "${enableval}" in
-  yes) CFLAGS_COVERAGE='--coverage'
-       LIBS_COVERAGE=-lgcov
-       coverage_enabled=true;;
-  no) ;;
-  *) as_fn_error $? "bad value ${enableval} for --enable-coverage" "$LINENO" 5 ;;
-esac
-fi
-
-
-
-
-# Check whether --enable-debug was given.
-if test "${enable_debug+set}" = set; then :
-  enableval=$enable_debug; case "${enableval}" in
-  yes)
-    CFLAGS="-g${CFLAGS:+ $CFLAGS}"
-    debug=true ;;
-  no)
-    CFLAGS="-O${CFLAGS:+ $CFLAGS}"
-    debug=false ;;
-  *) as_fn_error $? "bad value ${enableval} for --enable-debug" "$LINENO" 5 ;;
-esac
-else
-  if test "$coverage_enabled" = true; then
-    CFLAGS="-g${CFLAGS:+ $CFLAGS}"
-    debug=true
-else
-    debug=false
-fi
-
-fi
-
- if test x$debug = xtrue; then
-  DEBUG_TRUE=
-  DEBUG_FALSE='#'
-else
-  DEBUG_TRUE='#'
-  DEBUG_FALSE=
-fi
-
-
- if true; then
-  ENABLE_UDUNITS_1_TRUE=
-  ENABLE_UDUNITS_1_FALSE='#'
-else
-  ENABLE_UDUNITS_1_TRUE='#'
-  ENABLE_UDUNITS_1_FALSE=
-fi
-
-# Check whether --enable-udunits-1 was given.
-if test "${enable_udunits_1+set}" = set; then :
-  enableval=$enable_udunits_1; case "${enableval}" in
-      no)    if false; then
-  ENABLE_UDUNITS_1_TRUE=
-  ENABLE_UDUNITS_1_FALSE='#'
-else
-  ENABLE_UDUNITS_1_TRUE='#'
-  ENABLE_UDUNITS_1_FALSE=
-fi
- ;;
-      yes)  ;;
-      *)    as_fn_error $? "bad value ${enableval} for --enable-udunits-1" "$LINENO" 5 ;;
-    esac
-fi
-
-
-# Ensure that compilation is optimized and with assertions disabled by default.
-CFLAGS=${CFLAGS:--O}
-CPPFLAGS=${CPPFLAGS:--DNDEBUG}
-
-# The default absolute pathname of the installed units database. "pkgdatadir"
-# isn't a configure-variable in the normal sense: it doesn't appear in
-# "config.status" yet appears in "Makefile"; consequently, the following
-# nonsense just to avoid defining the pathname in the makefile so that Eclipse
-# is happy.
-pkgdatadir=$(eval echo $(eval echo `echo ${datadir}`/${PACKAGE}))
-
-cat >>confdefs.h <<_ACEOF
-#define DEFAULT_UDUNITS2_XML_PATH "${pkgdatadir}/udunits2.xml"
-_ACEOF
-
-
-# Checks for programs.
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="${ac_tool_prefix}gcc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_CC"; then
-  ac_ct_CC=$CC
-  # Extract the first word of "gcc", so it can be a program name with args.
-set dummy gcc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_CC"; then
-  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_CC="gcc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_CC" = x; then
-    CC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    CC=$ac_ct_CC
-  fi
-else
-  CC="$ac_cv_prog_CC"
-fi
-
-if test -z "$CC"; then
-          if test -n "$ac_tool_prefix"; then
-    # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args.
-set dummy ${ac_tool_prefix}cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="${ac_tool_prefix}cc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  fi
-fi
-if test -z "$CC"; then
-  # Extract the first word of "cc", so it can be a program name with args.
-set dummy cc; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-  ac_prog_rejected=no
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then
-       ac_prog_rejected=yes
-       continue
-     fi
-    ac_cv_prog_CC="cc"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-if test $ac_prog_rejected = yes; then
-  # We found a bogon in the path, so make sure we never use it.
-  set dummy $ac_cv_prog_CC
-  shift
-  if test $# != 0; then
-    # We chose a different compiler from the bogus one.
-    # However, it has the same basename, so the bogon will be chosen
-    # first if we set CC to just the basename; use the full file name.
-    shift
-    ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@"
-  fi
-fi
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$CC"; then
-  if test -n "$ac_tool_prefix"; then
-  for ac_prog in cl.exe
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$CC"; then
-  ac_cv_prog_CC="$CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_CC="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-CC=$ac_cv_prog_CC
-if test -n "$CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5
-$as_echo "$CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$CC" && break
-  done
-fi
-if test -z "$CC"; then
-  ac_ct_CC=$CC
-  for ac_prog in cl.exe
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_CC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_CC"; then
-  ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_CC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_CC=$ac_cv_prog_ac_ct_CC
-if test -n "$ac_ct_CC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5
-$as_echo "$ac_ct_CC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_CC" && break
-done
-
-  if test "x$ac_ct_CC" = x; then
-    CC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    CC=$ac_ct_CC
-  fi
-fi
-
-fi
-
-
-test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "no acceptable C compiler found in \$PATH
-See \`config.log' for more details" "$LINENO" 5; }
-
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
-  { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    sed '10a\
-... rest of stderr output deleted ...
-         10q' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-  fi
-  rm -f conftest.er1 conftest.err
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-done
-
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out"
-# Try to create an executable without -o first, disregard a.out.
-# It will help us diagnose broken compilers, and finding out an intuition
-# of exeext.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5
-$as_echo_n "checking whether the C compiler works... " >&6; }
-ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
-
-# The possible output files:
-ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*"
-
-ac_rmfiles=
-for ac_file in $ac_files
-do
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
-    * ) ac_rmfiles="$ac_rmfiles $ac_file";;
-  esac
-done
-rm -f $ac_rmfiles
-
-if { { ac_try="$ac_link_default"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link_default") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  # Autoconf-2.13 could set the ac_cv_exeext variable to `no'.
-# So ignore a value of `no', otherwise this would lead to `EXEEXT = no'
-# in a Makefile.  We should not override ac_cv_exeext if it was cached,
-# so that the user can short-circuit this test for compilers unknown to
-# Autoconf.
-for ac_file in $ac_files ''
-do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj )
-	;;
-    [ab].out )
-	# We found the default executable, but exeext='' is most
-	# certainly right.
-	break;;
-    *.* )
-	if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no;
-	then :; else
-	   ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	fi
-	# We set ac_cv_exeext here because the later test for it is not
-	# safe: cross compilers may not add the suffix if given an `-o'
-	# argument, so we may need to know it at that point already.
-	# Even if this section looks crufty: it has the advantage of
-	# actually working.
-	break;;
-    * )
-	break;;
-  esac
-done
-test "$ac_cv_exeext" = no && ac_cv_exeext=
-
-else
-  ac_file=''
-fi
-if test -z "$ac_file"; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-$as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error 77 "C compiler cannot create executables
-See \`config.log' for more details" "$LINENO" 5; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5
-$as_echo_n "checking for C compiler default output file name... " >&6; }
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5
-$as_echo "$ac_file" >&6; }
-ac_exeext=$ac_cv_exeext
-
-rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5
-$as_echo_n "checking for suffix of executables... " >&6; }
-if { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  # If both `conftest.exe' and `conftest' are `present' (well, observable)
-# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
-# work properly (i.e., refer to `conftest.exe'), while it won't with
-# `rm'.
-for ac_file in conftest.exe conftest conftest.*; do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;;
-    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	  break;;
-    * ) break;;
-  esac
-done
-else
-  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest conftest$ac_cv_exeext
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5
-$as_echo "$ac_cv_exeext" >&6; }
-
-rm -f conftest.$ac_ext
-EXEEXT=$ac_cv_exeext
-ac_exeext=$EXEEXT
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdio.h>
-int
-main ()
-{
-FILE *f = fopen ("conftest.out", "w");
- return ferror (f) || fclose (f) != 0;
-
-  ;
-  return 0;
-}
-_ACEOF
-ac_clean_files="$ac_clean_files conftest.out"
-# Check that the compiler produces executables we can run.  If not, either
-# the compiler is broken, or we cross compile.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5
-$as_echo_n "checking whether we are cross compiling... " >&6; }
-if test "$cross_compiling" != yes; then
-  { { ac_try="$ac_link"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_link") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-  if { ac_try='./conftest$ac_cv_exeext'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then
-    cross_compiling=no
-  else
-    if test "$cross_compiling" = maybe; then
-	cross_compiling=yes
-    else
-	{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot run C compiled programs.
-If you meant to cross compile, use \`--host'.
-See \`config.log' for more details" "$LINENO" 5; }
-    fi
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5
-$as_echo "$cross_compiling" >&6; }
-
-rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out
-ac_clean_files=$ac_clean_files_save
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5
-$as_echo_n "checking for suffix of object files... " >&6; }
-if ${ac_cv_objext+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-rm -f conftest.o conftest.obj
-if { { ac_try="$ac_compile"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compile") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then :
-  for ac_file in conftest.o conftest.obj conftest.*; do
-  test -f "$ac_file" || continue;
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;;
-    *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'`
-       break;;
-  esac
-done
-else
-  $as_echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "cannot compute suffix of object files: cannot compile
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-rm -f conftest.$ac_cv_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5
-$as_echo "$ac_cv_objext" >&6; }
-OBJEXT=$ac_cv_objext
-ac_objext=$OBJEXT
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5
-$as_echo_n "checking whether we are using the GNU C compiler... " >&6; }
-if ${ac_cv_c_compiler_gnu+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-#ifndef __GNUC__
-       choke me
-#endif
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_compiler_gnu=yes
-else
-  ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_c_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5
-$as_echo "$ac_cv_c_compiler_gnu" >&6; }
-if test $ac_compiler_gnu = yes; then
-  GCC=yes
-else
-  GCC=
-fi
-ac_test_CFLAGS=${CFLAGS+set}
-ac_save_CFLAGS=$CFLAGS
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5
-$as_echo_n "checking whether $CC accepts -g... " >&6; }
-if ${ac_cv_prog_cc_g+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_save_c_werror_flag=$ac_c_werror_flag
-   ac_c_werror_flag=yes
-   ac_cv_prog_cc_g=no
-   CFLAGS="-g"
-   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_g=yes
-else
-  CFLAGS=""
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-
-else
-  ac_c_werror_flag=$ac_save_c_werror_flag
-	 CFLAGS="-g"
-	 cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_g=yes
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-   ac_c_werror_flag=$ac_save_c_werror_flag
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5
-$as_echo "$ac_cv_prog_cc_g" >&6; }
-if test "$ac_test_CFLAGS" = set; then
-  CFLAGS=$ac_save_CFLAGS
-elif test $ac_cv_prog_cc_g = yes; then
-  if test "$GCC" = yes; then
-    CFLAGS="-g -O2"
-  else
-    CFLAGS="-g"
-  fi
-else
-  if test "$GCC" = yes; then
-    CFLAGS="-O2"
-  else
-    CFLAGS=
-  fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5
-$as_echo_n "checking for $CC option to accept ISO C89... " >&6; }
-if ${ac_cv_prog_cc_c89+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_cv_prog_cc_c89=no
-ac_save_CC=$CC
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdarg.h>
-#include <stdio.h>
-#include <sys/types.h>
-#include <sys/stat.h>
-/* Most of the following tests are stolen from RCS 5.7's src/conf.sh.  */
-struct buf { int x; };
-FILE * (*rcsopen) (struct buf *, struct stat *, int);
-static char *e (p, i)
-     char **p;
-     int i;
-{
-  return p[i];
-}
-static char *f (char * (*g) (char **, int), char **p, ...)
-{
-  char *s;
-  va_list v;
-  va_start (v,p);
-  s = g (p, va_arg (v,int));
-  va_end (v);
-  return s;
-}
-
-/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default.  It has
-   function prototypes and stuff, but not '\xHH' hex character constants.
-   These don't provoke an error unfortunately, instead are silently treated
-   as 'x'.  The following induces an error, until -std is added to get
-   proper ANSI mode.  Curiously '\x00'!='x' always comes out true, for an
-   array size at least.  It's necessary to write '\x00'==0 to get something
-   that's true only with -std.  */
-int osf4_cc_array ['\x00' == 0 ? 1 : -1];
-
-/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters
-   inside strings and character constants.  */
-#define FOO(x) 'x'
-int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1];
-
-int test (int i, double x);
-struct s1 {int (*f) (int a);};
-struct s2 {int (*f) (double a);};
-int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int);
-int argc;
-char **argv;
-int
-main ()
-{
-return f (e, argv, 0) != argv[0]  ||  f (e, argv, 1) != argv[1];
-  ;
-  return 0;
-}
-_ACEOF
-for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \
-	-Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__"
-do
-  CC="$ac_save_CC $ac_arg"
-  if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_prog_cc_c89=$ac_arg
-fi
-rm -f core conftest.err conftest.$ac_objext
-  test "x$ac_cv_prog_cc_c89" != "xno" && break
-done
-rm -f conftest.$ac_ext
-CC=$ac_save_CC
-
-fi
-# AC_CACHE_VAL
-case "x$ac_cv_prog_cc_c89" in
-  x)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5
-$as_echo "none needed" >&6; } ;;
-  xno)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5
-$as_echo "unsupported" >&6; } ;;
-  *)
-    CC="$CC $ac_cv_prog_cc_c89"
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5
-$as_echo "$ac_cv_prog_cc_c89" >&6; } ;;
-esac
-if test "x$ac_cv_prog_cc_c89" != xno; then :
-
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-DEPDIR="${am__leading_dot}deps"
-
-ac_config_commands="$ac_config_commands depfiles"
-
-
-am_make=${MAKE-make}
-cat > confinc << 'END'
-am__doit:
-	@echo this is the am__doit target
-.PHONY: am__doit
-END
-# If we don't find an include directive, just comment out the code.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5
-$as_echo_n "checking for style of include used by $am_make... " >&6; }
-am__include="#"
-am__quote=
-_am_result=none
-# First try GNU make style include.
-echo "include confinc" > confmf
-# Ignore all kinds of additional output from `make'.
-case `$am_make -s -f confmf 2> /dev/null` in #(
-*the\ am__doit\ target*)
-  am__include=include
-  am__quote=
-  _am_result=GNU
-  ;;
-esac
-# Now try BSD make style include.
-if test "$am__include" = "#"; then
-   echo '.include "confinc"' > confmf
-   case `$am_make -s -f confmf 2> /dev/null` in #(
-   *the\ am__doit\ target*)
-     am__include=.include
-     am__quote="\""
-     _am_result=BSD
-     ;;
-   esac
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5
-$as_echo "$_am_result" >&6; }
-rm -f confinc confmf
-
-# Check whether --enable-dependency-tracking was given.
-if test "${enable_dependency_tracking+set}" = set; then :
-  enableval=$enable_dependency_tracking;
-fi
-
-if test "x$enable_dependency_tracking" != xno; then
-  am_depcomp="$ac_aux_dir/depcomp"
-  AMDEPBACKSLASH='\'
-fi
- if test "x$enable_dependency_tracking" != xno; then
-  AMDEP_TRUE=
-  AMDEP_FALSE='#'
-else
-  AMDEP_TRUE='#'
-  AMDEP_FALSE=
-fi
-
-
-
-depcc="$CC"   am_compiler_list=
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5
-$as_echo_n "checking dependency style of $depcc... " >&6; }
-if ${am_cv_CC_dependencies_compiler_type+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then
-  # We make a subdir and do the tests there.  Otherwise we can end up
-  # making bogus files that we don't know about and never remove.  For
-  # instance it was reported that on HP-UX the gcc test will end up
-  # making a dummy file named `D' -- because `-MD' means `put the output
-  # in D'.
-  mkdir conftest.dir
-  # Copy depcomp to subdir because otherwise we won't find it if we're
-  # using a relative directory.
-  cp "$am_depcomp" conftest.dir
-  cd conftest.dir
-  # We will build objects and dependencies in a subdirectory because
-  # it helps to detect inapplicable dependency modes.  For instance
-  # both Tru64's cc and ICC support -MD to output dependencies as a
-  # side effect of compilation, but ICC will put the dependencies in
-  # the current directory while Tru64 will put them in the object
-  # directory.
-  mkdir sub
-
-  am_cv_CC_dependencies_compiler_type=none
-  if test "$am_compiler_list" = ""; then
-     am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp`
-  fi
-  am__universal=false
-  case " $depcc " in #(
-     *\ -arch\ *\ -arch\ *) am__universal=true ;;
-     esac
-
-  for depmode in $am_compiler_list; do
-    # Setup a source with many dependencies, because some compilers
-    # like to wrap large dependency lists on column 80 (with \), and
-    # we should not choose a depcomp mode which is confused by this.
-    #
-    # We need to recreate these files for each test, as the compiler may
-    # overwrite some of them when testing with obscure command lines.
-    # This happens at least with the AIX C compiler.
-    : > sub/conftest.c
-    for i in 1 2 3 4 5 6; do
-      echo '#include "conftst'$i'.h"' >> sub/conftest.c
-      # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with
-      # Solaris 8's {/usr,}/bin/sh.
-      touch sub/conftst$i.h
-    done
-    echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf
-
-    # We check with `-c' and `-o' for the sake of the "dashmstdout"
-    # mode.  It turns out that the SunPro C++ compiler does not properly
-    # handle `-M -o', and we need to detect this.  Also, some Intel
-    # versions had trouble with output in subdirs
-    am__obj=sub/conftest.${OBJEXT-o}
-    am__minus_obj="-o $am__obj"
-    case $depmode in
-    gcc)
-      # This depmode causes a compiler race in universal mode.
-      test "$am__universal" = false || continue
-      ;;
-    nosideeffect)
-      # after this tag, mechanisms are not by side-effect, so they'll
-      # only be used when explicitly requested
-      if test "x$enable_dependency_tracking" = xyes; then
-	continue
-      else
-	break
-      fi
-      ;;
-    msvisualcpp | msvcmsys)
-      # This compiler won't grok `-c -o', but also, the minuso test has
-      # not run yet.  These depmodes are late enough in the game, and
-      # so weak that their functioning should not be impacted.
-      am__obj=conftest.${OBJEXT-o}
-      am__minus_obj=
-      ;;
-    none) break ;;
-    esac
-    if depmode=$depmode \
-       source=sub/conftest.c object=$am__obj \
-       depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \
-       $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \
-         >/dev/null 2>conftest.err &&
-       grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 &&
-       grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 &&
-       grep $am__obj sub/conftest.Po > /dev/null 2>&1 &&
-       ${MAKE-make} -s -f confmf > /dev/null 2>&1; then
-      # icc doesn't choke on unknown options, it will just issue warnings
-      # or remarks (even with -Werror).  So we grep stderr for any message
-      # that says an option was ignored or not supported.
-      # When given -MP, icc 7.0 and 7.1 complain thusly:
-      #   icc: Command line warning: ignoring option '-M'; no argument required
-      # The diagnosis changed in icc 8.0:
-      #   icc: Command line remark: option '-MP' not supported
-      if (grep 'ignoring option' conftest.err ||
-          grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else
-        am_cv_CC_dependencies_compiler_type=$depmode
-        break
-      fi
-    fi
-  done
-
-  cd ..
-  rm -rf conftest.dir
-else
-  am_cv_CC_dependencies_compiler_type=none
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5
-$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; }
-CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type
-
- if
-  test "x$enable_dependency_tracking" != xno \
-  && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then
-  am__fastdepCC_TRUE=
-  am__fastdepCC_FALSE='#'
-else
-  am__fastdepCC_TRUE='#'
-  am__fastdepCC_FALSE=
-fi
-
-
-if test "x$CC" != xcc; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5
-$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5
-$as_echo_n "checking whether cc understands -c and -o together... " >&6; }
-fi
-set dummy $CC; ac_cc=`$as_echo "$2" |
-		      sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'`
-if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-# Make sure it works both with $CC and with simple cc.
-# We do the test twice because some compilers refuse to overwrite an
-# existing .o file with -o, though they will create one.
-ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
-rm -f conftest2.*
-if { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } &&
-   test -f conftest2.$ac_objext && { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; };
-then
-  eval ac_cv_prog_cc_${ac_cc}_c_o=yes
-  if test "x$CC" != xcc; then
-    # Test first that cc exists at all.
-    if { ac_try='cc -c conftest.$ac_ext >&5'
-  { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; }; then
-      ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5'
-      rm -f conftest2.*
-      if { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } &&
-	 test -f conftest2.$ac_objext && { { case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_try") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; };
-      then
-	# cc works too.
-	:
-      else
-	# cc exists but doesn't like -o.
-	eval ac_cv_prog_cc_${ac_cc}_c_o=no
-      fi
-    fi
-  fi
-else
-  eval ac_cv_prog_cc_${ac_cc}_c_o=no
-fi
-rm -f core conftest*
-
-fi
-if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-
-$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h
-
-fi
-
-# FIXME: we rely on the cache variable name because
-# there is no other way.
-set dummy $CC
-am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'`
-eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o
-if test "$am_t" != yes; then
-   # Losing compiler, so override with the script.
-   # FIXME: It is wrong to rewrite CC.
-   # But if we don't then we get into trouble of one sort or another.
-   # A longer-term fix would be to have automake use am__CC in this case,
-   # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)"
-   CC="$am_aux_dir/compile $CC"
-fi
-
-
-#if test "$ac_cv_prog_cc_${ac_cc}_c_o" = yes; then
-#    case "$AM_CFLAGS" in
-#	"-g") ;;
-#	*) AM_CFLAGS="${AM_CFLAGS:+$AM_CFLAGS }-g";;
-#    esac
-#fi
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5
-$as_echo_n "checking how to run the C preprocessor... " >&6; }
-# On Suns, sometimes $CPP names a directory.
-if test -n "$CPP" && test -d "$CPP"; then
-  CPP=
-fi
-if test -z "$CPP"; then
-  if ${ac_cv_prog_CPP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-      # Double quotes because CPP needs to be expanded
-    for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp"
-    do
-      ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
-  # Use a header file that comes with gcc, so configuring glibc
-  # with a fresh cross-compiler works.
-  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-  # <limits.h> exists even on freestanding compilers.
-  # On the NeXT, cc -E runs the code through the compiler's parser,
-  # not just through cpp. "Syntax error" is here to catch this case.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-		     Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
-  # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-  # OK, works on sane cases.  Now check whether nonexistent headers
-  # can be detected and how.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  # Broken: success on invalid input.
-continue
-else
-  # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-  break
-fi
-
-    done
-    ac_cv_prog_CPP=$CPP
-
-fi
-  CPP=$ac_cv_prog_CPP
-else
-  ac_cv_prog_CPP=$CPP
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5
-$as_echo "$CPP" >&6; }
-ac_preproc_ok=false
-for ac_c_preproc_warn_flag in '' yes
-do
-  # Use a header file that comes with gcc, so configuring glibc
-  # with a fresh cross-compiler works.
-  # Prefer <limits.h> to <assert.h> if __STDC__ is defined, since
-  # <limits.h> exists even on freestanding compilers.
-  # On the NeXT, cc -E runs the code through the compiler's parser,
-  # not just through cpp. "Syntax error" is here to catch this case.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#ifdef __STDC__
-# include <limits.h>
-#else
-# include <assert.h>
-#endif
-		     Syntax error
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-
-else
-  # Broken: fails on valid input.
-continue
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-  # OK, works on sane cases.  Now check whether nonexistent headers
-  # can be detected and how.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ac_nonexistent.h>
-_ACEOF
-if ac_fn_c_try_cpp "$LINENO"; then :
-  # Broken: success on invalid input.
-continue
-else
-  # Passes both tests.
-ac_preproc_ok=:
-break
-fi
-rm -f conftest.err conftest.i conftest.$ac_ext
-
-done
-# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped.
-rm -f conftest.i conftest.err conftest.$ac_ext
-if $ac_preproc_ok; then :
-
-else
-  { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5
-$as_echo "$as_me: error: in \`$ac_pwd':" >&2;}
-as_fn_error $? "C preprocessor \"$CPP\" fails sanity check
-See \`config.log' for more details" "$LINENO" 5; }
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5
-$as_echo_n "checking whether ln -s works... " >&6; }
-LN_S=$as_ln_s
-if test "$LN_S" = "ln -s"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5
-$as_echo "no, using $LN_S" >&6; }
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5
-$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; }
-set x ${MAKE-make}
-ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'`
-if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat >conftest.make <<\_ACEOF
-SHELL = /bin/sh
-all:
-	@echo '@@@%%%=$(MAKE)=@@@%%%'
-_ACEOF
-# GNU make sometimes prints "make[1]: Entering ...", which would confuse us.
-case `${MAKE-make} -f conftest.make 2>/dev/null` in
-  *@@@%%%=?*=@@@%%%*)
-    eval ac_cv_prog_make_${ac_make}_set=yes;;
-  *)
-    eval ac_cv_prog_make_${ac_make}_set=no;;
-esac
-rm -f conftest.make
-fi
-if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-  SET_MAKE=
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-  SET_MAKE="MAKE=${MAKE-make}"
-fi
-
-for ac_prog in 'bison -y' byacc
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_YACC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$YACC"; then
-  ac_cv_prog_YACC="$YACC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_YACC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-YACC=$ac_cv_prog_YACC
-if test -n "$YACC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $YACC" >&5
-$as_echo "$YACC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$YACC" && break
-done
-test -n "$YACC" || YACC="yacc"
-
-
-for ac_prog in flex lex
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LEX+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$LEX"; then
-  ac_cv_prog_LEX="$LEX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_LEX="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-LEX=$ac_cv_prog_LEX
-if test -n "$LEX"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LEX" >&5
-$as_echo "$LEX" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$LEX" && break
-done
-test -n "$LEX" || LEX=":"
-
-if test "x$LEX" != "x:"; then
-  cat >conftest.l <<_ACEOF
-%%
-a { ECHO; }
-b { REJECT; }
-c { yymore (); }
-d { yyless (1); }
-e { yyless (input () != 0); }
-f { unput (yytext[0]); }
-. { BEGIN INITIAL; }
-%%
-#ifdef YYTEXT_POINTER
-extern char *yytext;
-#endif
-int
-main (void)
-{
-  return ! yylex () + ! yywrap ();
-}
-_ACEOF
-{ { ac_try="$LEX conftest.l"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$LEX conftest.l") 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking lex output file root" >&5
-$as_echo_n "checking lex output file root... " >&6; }
-if ${ac_cv_prog_lex_root+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-# UVCDAT patch (DL) This fails on some apple Travis CI builds, and isn't used by this package.
-# Just disable the file test.
-# This:
-  $as_echo_n "(skipped) " >&6
-# Replaces this:
-#if test -f lex.yy.c; then
-#  ac_cv_prog_lex_root=lex.yy
-#elif test -f lexyy.c; then
-#  ac_cv_prog_lex_root=lexyy
-#else
-#  as_fn_error $? "cannot find output from $LEX; giving up" "$LINENO" 5
-#fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_root" >&5
-$as_echo "$ac_cv_prog_lex_root" >&6; }
-LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root
-
-if test -z "${LEXLIB+set}"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking lex library" >&5
-$as_echo_n "checking lex library... " >&6; }
-if ${ac_cv_lib_lex+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-    ac_save_LIBS=$LIBS
-    ac_cv_lib_lex='none needed'
-    for ac_lib in '' -lfl -ll; do
-      LIBS="$ac_lib $ac_save_LIBS"
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-`cat $LEX_OUTPUT_ROOT.c`
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_lex=$ac_lib
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-      test "$ac_cv_lib_lex" != 'none needed' && break
-    done
-    LIBS=$ac_save_LIBS
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lex" >&5
-$as_echo "$ac_cv_lib_lex" >&6; }
-  test "$ac_cv_lib_lex" != 'none needed' && LEXLIB=$ac_cv_lib_lex
-fi
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether yytext is a pointer" >&5
-$as_echo_n "checking whether yytext is a pointer... " >&6; }
-if ${ac_cv_prog_lex_yytext_pointer+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  # POSIX says lex can declare yytext either as a pointer or an array; the
-# default is implementation-dependent.  Figure out which it is, since
-# not all implementations provide the %pointer and %array declarations.
-ac_cv_prog_lex_yytext_pointer=no
-ac_save_LIBS=$LIBS
-LIBS="$LEXLIB $ac_save_LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-  #define YYTEXT_POINTER 1
-`cat $LEX_OUTPUT_ROOT.c`
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_prog_lex_yytext_pointer=yes
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_save_LIBS
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_yytext_pointer" >&5
-$as_echo "$ac_cv_prog_lex_yytext_pointer" >&6; }
-if test $ac_cv_prog_lex_yytext_pointer = yes; then
-
-$as_echo "#define YYTEXT_POINTER 1" >>confdefs.h
-
-fi
-rm -f conftest.l $LEX_OUTPUT_ROOT.c
-
-fi
-if test "$LEX" = :; then
-  LEX=${am_missing_run}flex
-fi
-ac_ext=${ac_fc_srcext-f}
-ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5'
-ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_fc_compiler_gnu
-if test -n "$ac_tool_prefix"; then
-  for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$FC"; then
-  ac_cv_prog_FC="$FC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_FC="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-FC=$ac_cv_prog_FC
-if test -n "$FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $FC" >&5
-$as_echo "$FC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$FC" && break
-  done
-fi
-if test -z "$FC"; then
-  ac_ct_FC=$FC
-  for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_FC"; then
-  ac_cv_prog_ac_ct_FC="$ac_ct_FC" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_FC="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_FC=$ac_cv_prog_ac_ct_FC
-if test -n "$ac_ct_FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_FC" >&5
-$as_echo "$ac_ct_FC" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_FC" && break
-done
-
-  if test "x$ac_ct_FC" = x; then
-    FC=""
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    FC=$ac_ct_FC
-  fi
-fi
-
-
-# Provide some information about the compiler.
-$as_echo "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler version" >&5
-set X $ac_compile
-ac_compiler=$2
-for ac_option in --version -v -V -qversion; do
-  { { ac_try="$ac_compiler $ac_option >&5"
-case "(($ac_try" in
-  *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;;
-  *) ac_try_echo=$ac_try;;
-esac
-eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\""
-$as_echo "$ac_try_echo"; } >&5
-  (eval "$ac_compiler $ac_option >&5") 2>conftest.err
-  ac_status=$?
-  if test -s conftest.err; then
-    sed '10a\
-... rest of stderr output deleted ...
-         10q' conftest.err >conftest.er1
-    cat conftest.er1 >&5
-  fi
-  rm -f conftest.er1 conftest.err
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-done
-rm -f a.out
-
-# If we don't use `.F' as extension, the preprocessor is not run on the
-# input file.  (Note that this only needs to work for GNU compilers.)
-ac_save_ext=$ac_ext
-ac_ext=F
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Fortran compiler" >&5
-$as_echo_n "checking whether we are using the GNU Fortran compiler... " >&6; }
-if ${ac_cv_fc_compiler_gnu+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-#ifndef __GNUC__
-       choke me
-#endif
-
-      end
-_ACEOF
-if ac_fn_fc_try_compile "$LINENO"; then :
-  ac_compiler_gnu=yes
-else
-  ac_compiler_gnu=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-ac_cv_fc_compiler_gnu=$ac_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_fc_compiler_gnu" >&5
-$as_echo "$ac_cv_fc_compiler_gnu" >&6; }
-ac_ext=$ac_save_ext
-ac_test_FCFLAGS=${FCFLAGS+set}
-ac_save_FCFLAGS=$FCFLAGS
-FCFLAGS=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $FC accepts -g" >&5
-$as_echo_n "checking whether $FC accepts -g... " >&6; }
-if ${ac_cv_prog_fc_g+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  FCFLAGS=-g
-cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_compile "$LINENO"; then :
-  ac_cv_prog_fc_g=yes
-else
-  ac_cv_prog_fc_g=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_fc_g" >&5
-$as_echo "$ac_cv_prog_fc_g" >&6; }
-if test "$ac_test_FCFLAGS" = set; then
-  FCFLAGS=$ac_save_FCFLAGS
-elif test $ac_cv_prog_fc_g = yes; then
-  if test "x$ac_cv_fc_compiler_gnu" = xyes; then
-    FCFLAGS="-g -O2"
-  else
-    FCFLAGS="-g"
-  fi
-else
-  if test "x$ac_cv_fc_compiler_gnu" = xyes; then
-    FCFLAGS="-O2"
-  else
-    FCFLAGS=
-  fi
-fi
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-# Checks for libraries.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing dirname" >&5
-$as_echo_n "checking for library containing dirname... " >&6; }
-if ${ac_cv_search_dirname+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dirname ();
-int
-main ()
-{
-return dirname ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' gen; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_dirname=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_dirname+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_dirname+:} false; then :
-
-else
-  ac_cv_search_dirname=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_dirname" >&5
-$as_echo "$ac_cv_search_dirname" >&6; }
-ac_res=$ac_cv_search_dirname
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find function dirname" "$LINENO" 5
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing log10" >&5
-$as_echo_n "checking for library containing log10... " >&6; }
-if ${ac_cv_search_log10+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char log10 ();
-int
-main ()
-{
-return log10 ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' m; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_log10=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_log10+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_log10+:} false; then :
-
-else
-  ac_cv_search_log10=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_log10" >&5
-$as_echo "$ac_cv_search_log10" >&6; }
-ac_res=$ac_cv_search_log10
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find function log10" "$LINENO" 5
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing XML_StopParser" >&5
-$as_echo_n "checking for library containing XML_StopParser... " >&6; }
-if ${ac_cv_search_XML_StopParser+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_func_search_save_LIBS=$LIBS
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char XML_StopParser ();
-int
-main ()
-{
-return XML_StopParser ();
-  ;
-  return 0;
-}
-_ACEOF
-for ac_lib in '' expat; do
-  if test -z "$ac_lib"; then
-    ac_res="none required"
-  else
-    ac_res=-l$ac_lib
-    LIBS="-l$ac_lib  $ac_func_search_save_LIBS"
-  fi
-  if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_search_XML_StopParser=$ac_res
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext
-  if ${ac_cv_search_XML_StopParser+:} false; then :
-  break
-fi
-done
-if ${ac_cv_search_XML_StopParser+:} false; then :
-
-else
-  ac_cv_search_XML_StopParser=no
-fi
-rm conftest.$ac_ext
-LIBS=$ac_func_search_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_XML_StopParser" >&5
-$as_echo "$ac_cv_search_XML_StopParser" >&6; }
-ac_res=$ac_cv_search_XML_StopParser
-if test "$ac_res" != no; then :
-  test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
-
-else
-  as_fn_error $? "cannot find EXPAT function XML_StopParser" "$LINENO" 5
-fi
-
-
-# Checks for header files.
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5
-$as_echo_n "checking for grep that handles long lines and -e... " >&6; }
-if ${ac_cv_path_GREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$GREP"; then
-  ac_path_GREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in grep ggrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue
-# Check for GNU ac_path_GREP and select it if it is found.
-  # Check for GNU $ac_path_GREP
-case `"$ac_path_GREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'GREP' >> "conftest.nl"
-    "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_GREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_GREP="$ac_path_GREP"
-      ac_path_GREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_GREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_GREP"; then
-    as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_GREP=$GREP
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5
-$as_echo "$ac_cv_path_GREP" >&6; }
- GREP="$ac_cv_path_GREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5
-$as_echo_n "checking for egrep... " >&6; }
-if ${ac_cv_path_EGREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if echo a | $GREP -E '(a|b)' >/dev/null 2>&1
-   then ac_cv_path_EGREP="$GREP -E"
-   else
-     if test -z "$EGREP"; then
-  ac_path_EGREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in egrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue
-# Check for GNU ac_path_EGREP and select it if it is found.
-  # Check for GNU $ac_path_EGREP
-case `"$ac_path_EGREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'EGREP' >> "conftest.nl"
-    "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_EGREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_EGREP="$ac_path_EGREP"
-      ac_path_EGREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_EGREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_EGREP"; then
-    as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_EGREP=$EGREP
-fi
-
-   fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5
-$as_echo "$ac_cv_path_EGREP" >&6; }
- EGREP="$ac_cv_path_EGREP"
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5
-$as_echo_n "checking for ANSI C header files... " >&6; }
-if ${ac_cv_header_stdc+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdlib.h>
-#include <stdarg.h>
-#include <string.h>
-#include <float.h>
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_header_stdc=yes
-else
-  ac_cv_header_stdc=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-if test $ac_cv_header_stdc = yes; then
-  # SunOS 4.x string.h does not declare mem*, contrary to ANSI.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <string.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-  $EGREP "memchr" >/dev/null 2>&1; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
-  # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI.
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <stdlib.h>
-
-_ACEOF
-if (eval "$ac_cpp conftest.$ac_ext") 2>&5 |
-  $EGREP "free" >/dev/null 2>&1; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f conftest*
-
-fi
-
-if test $ac_cv_header_stdc = yes; then
-  # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi.
-  if test "$cross_compiling" = yes; then :
-  :
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-#include <ctype.h>
-#include <stdlib.h>
-#if ((' ' & 0x0FF) == 0x020)
-# define ISLOWER(c) ('a' <= (c) && (c) <= 'z')
-# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c))
-#else
-# define ISLOWER(c) \
-		   (('a' <= (c) && (c) <= 'i') \
-		     || ('j' <= (c) && (c) <= 'r') \
-		     || ('s' <= (c) && (c) <= 'z'))
-# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c))
-#endif
-
-#define XOR(e, f) (((e) && !(f)) || (!(e) && (f)))
-int
-main ()
-{
-  int i;
-  for (i = 0; i < 256; i++)
-    if (XOR (islower (i), ISLOWER (i))
-	|| toupper (i) != TOUPPER (i))
-      return 2;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_run "$LINENO"; then :
-
-else
-  ac_cv_header_stdc=no
-fi
-rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \
-  conftest.$ac_objext conftest.beam conftest.$ac_ext
-fi
-
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5
-$as_echo "$ac_cv_header_stdc" >&6; }
-if test $ac_cv_header_stdc = yes; then
-
-$as_echo "#define STDC_HEADERS 1" >>confdefs.h
-
-fi
-
-# On IRIX 5.3, sys/types and inttypes.h are conflicting.
-for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \
-		  inttypes.h stdint.h unistd.h
-do :
-  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
-ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default
-"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-
-done
-
-
-for ac_header in float.h inttypes.h stddef.h stdlib.h string.h strings.h
-do :
-  as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh`
-ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default"
-if eval test \"x\$"$as_ac_Header"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-
-done
-
-
-# Checks for the CUNIT unit-testing package
-LD_CUNIT=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: Checking for the CUNIT unit-testing package." >&5
-$as_echo "$as_me: Checking for the CUNIT unit-testing package." >&6;}
-ac_fn_c_check_header_mongrel "$LINENO" "CUnit/CUnit.h" "ac_cv_header_CUnit_CUnit_h" "$ac_includes_default"
-if test "x$ac_cv_header_CUnit_CUnit_h" = xyes; then :
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CU_initialize_registry in -lcunit" >&5
-$as_echo_n "checking for CU_initialize_registry in -lcunit... " >&6; }
-if ${ac_cv_lib_cunit_CU_initialize_registry+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-lcunit  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char CU_initialize_registry ();
-int
-main ()
-{
-return CU_initialize_registry ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_cunit_CU_initialize_registry=yes
-else
-  ac_cv_lib_cunit_CU_initialize_registry=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_cunit_CU_initialize_registry" >&5
-$as_echo "$ac_cv_lib_cunit_CU_initialize_registry" >&6; }
-if test "x$ac_cv_lib_cunit_CU_initialize_registry" = xyes; then :
-  LD_CUNIT=-lcunit
-fi
-
-fi
-
-
-
-if test "$LD_CUNIT"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT found.  Enabling unit-tests." >&5
-$as_echo "$as_me: CUNIT found.  Enabling unit-tests." >&6;}
-else
-    { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT not found.  Disabling unit-tests." >&5
-$as_echo "$as_me: CUNIT not found.  Disabling unit-tests." >&6;}
-fi
- if test "$LD_CUNIT"; then
-  HAVE_CUNIT_TRUE=
-  HAVE_CUNIT_FALSE='#'
-else
-  HAVE_CUNIT_TRUE='#'
-  HAVE_CUNIT_FALSE=
-fi
-
-
-# Checks for typedefs, structures, and compiler characteristics.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5
-$as_echo_n "checking for an ANSI C-conforming const... " >&6; }
-if ${ac_cv_c_const+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-/* FIXME: Include the comments suggested by Paul. */
-#ifndef __cplusplus
-  /* Ultrix mips cc rejects this.  */
-  typedef int charset[2];
-  const charset cs;
-  /* SunOS 4.1.1 cc rejects this.  */
-  char const *const *pcpcc;
-  char **ppc;
-  /* NEC SVR4.0.2 mips cc rejects this.  */
-  struct point {int x, y;};
-  static struct point const zero = {0,0};
-  /* AIX XL C 1.02.0.0 rejects this.
-     It does not let you subtract one const X* pointer from another in
-     an arm of an if-expression whose if-part is not a constant
-     expression */
-  const char *g = "string";
-  pcpcc = &g + (g ? g-g : 0);
-  /* HPUX 7.0 cc rejects these. */
-  ++pcpcc;
-  ppc = (char**) pcpcc;
-  pcpcc = (char const *const *) ppc;
-  { /* SCO 3.2v4 cc rejects this.  */
-    char *t;
-    char const *s = 0 ? (char *) 0 : (char const *) 0;
-
-    *t++ = 0;
-    if (s) return 0;
-  }
-  { /* Someone thinks the Sun supposedly-ANSI compiler will reject this.  */
-    int x[] = {25, 17};
-    const int *foo = &x[0];
-    ++foo;
-  }
-  { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */
-    typedef const int *iptr;
-    iptr p = 0;
-    ++p;
-  }
-  { /* AIX XL C 1.02.0.0 rejects this saying
-       "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */
-    struct s { int j; const int *ap[3]; };
-    struct s *b; b->j = 5;
-  }
-  { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */
-    const int foo = 10;
-    if (!foo) return 0;
-  }
-  return !cs[0] && !zero.x;
-#endif
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  ac_cv_c_const=yes
-else
-  ac_cv_c_const=no
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5
-$as_echo "$ac_cv_c_const" >&6; }
-if test $ac_cv_c_const = no; then
-
-$as_echo "#define const /**/" >>confdefs.h
-
-fi
-
-ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default"
-if test "x$ac_cv_type_size_t" = xyes; then :
-
-else
-
-cat >>confdefs.h <<_ACEOF
-#define size_t unsigned int
-_ACEOF
-
-fi
-
-
-# Checks for library functions.
-for ac_func in floor memmove memset modf pow strcasecmp strdup strpbrk
-do :
-  as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
-  cat >>confdefs.h <<_ACEOF
-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
-_ACEOF
-
-fi
-done
-
-
-case `pwd` in
-  *\ * | *\	*)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5
-$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;;
-esac
-
-
-
-macro_version='2.4.2'
-macro_revision='1.3337'
-
-
-
-
-
-
-
-
-
-
-
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-# Make sure we can run config.sub.
-$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 ||
-  as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5
-$as_echo_n "checking build system type... " >&6; }
-if ${ac_cv_build+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_build_alias=$build_alias
-test "x$ac_build_alias" = x &&
-  ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"`
-test "x$ac_build_alias" = x &&
-  as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5
-ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` ||
-  as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5
-$as_echo "$ac_cv_build" >&6; }
-case $ac_cv_build in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;;
-esac
-build=$ac_cv_build
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_build
-shift
-build_cpu=$1
-build_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-build_os=$*
-IFS=$ac_save_IFS
-case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5
-$as_echo_n "checking host system type... " >&6; }
-if ${ac_cv_host+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test "x$host_alias" = x; then
-  ac_cv_host=$ac_cv_build
-else
-  ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` ||
-    as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5
-$as_echo "$ac_cv_host" >&6; }
-case $ac_cv_host in
-*-*-*) ;;
-*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;;
-esac
-host=$ac_cv_host
-ac_save_IFS=$IFS; IFS='-'
-set x $ac_cv_host
-shift
-host_cpu=$1
-host_vendor=$2
-shift; shift
-# Remember, the first character of IFS is used to create $*,
-# except with old shells:
-host_os=$*
-IFS=$ac_save_IFS
-case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac
-
-
-# Backslashify metacharacters that are still active within
-# double-quoted strings.
-sed_quote_subst='s/\(["`$\\]\)/\\\1/g'
-
-# Same as above, but do not quote variable references.
-double_quote_subst='s/\(["`\\]\)/\\\1/g'
-
-# Sed substitution to delay expansion of an escaped shell variable in a
-# double_quote_subst'ed string.
-delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g'
-
-# Sed substitution to delay expansion of an escaped single quote.
-delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g'
-
-# Sed substitution to avoid accidental globbing in evaled expressions
-no_glob_subst='s/\*/\\\*/g'
-
-ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO
-ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5
-$as_echo_n "checking how to print strings... " >&6; }
-# Test print first, because it will be a builtin if present.
-if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \
-   test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then
-  ECHO='print -r --'
-elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then
-  ECHO='printf %s\n'
-else
-  # Use this function as a fallback that always works.
-  func_fallback_echo ()
-  {
-    eval 'cat <<_LTECHO_EOF
-$1
-_LTECHO_EOF'
-  }
-  ECHO='func_fallback_echo'
-fi
-
-# func_echo_all arg...
-# Invoke $ECHO with all args, space-separated.
-func_echo_all ()
-{
-    $ECHO ""
-}
-
-case "$ECHO" in
-  printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5
-$as_echo "printf" >&6; } ;;
-  print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5
-$as_echo "print -r" >&6; } ;;
-  *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5
-$as_echo "cat" >&6; } ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5
-$as_echo_n "checking for a sed that does not truncate output... " >&6; }
-if ${ac_cv_path_SED+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-            ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/
-     for ac_i in 1 2 3 4 5 6 7; do
-       ac_script="$ac_script$as_nl$ac_script"
-     done
-     echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed
-     { ac_script=; unset ac_script;}
-     if test -z "$SED"; then
-  ac_path_SED_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in sed gsed; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_SED="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue
-# Check for GNU ac_path_SED and select it if it is found.
-  # Check for GNU $ac_path_SED
-case `"$ac_path_SED" --version 2>&1` in
-*GNU*)
-  ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo '' >> "conftest.nl"
-    "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_SED_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_SED="$ac_path_SED"
-      ac_path_SED_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_SED_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_SED"; then
-    as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5
-  fi
-else
-  ac_cv_path_SED=$SED
-fi
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5
-$as_echo "$ac_cv_path_SED" >&6; }
- SED="$ac_cv_path_SED"
-  rm -f conftest.sed
-
-test -z "$SED" && SED=sed
-Xsed="$SED -e 1s/^X//"
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5
-$as_echo_n "checking for fgrep... " >&6; }
-if ${ac_cv_path_FGREP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1
-   then ac_cv_path_FGREP="$GREP -F"
-   else
-     if test -z "$FGREP"; then
-  ac_path_FGREP_found=false
-  # Loop through the user's path and test for each of PROGNAME-LIST
-  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_prog in fgrep; do
-    for ac_exec_ext in '' $ac_executable_extensions; do
-      ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext"
-      { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue
-# Check for GNU ac_path_FGREP and select it if it is found.
-  # Check for GNU $ac_path_FGREP
-case `"$ac_path_FGREP" --version 2>&1` in
-*GNU*)
-  ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;;
-*)
-  ac_count=0
-  $as_echo_n 0123456789 >"conftest.in"
-  while :
-  do
-    cat "conftest.in" "conftest.in" >"conftest.tmp"
-    mv "conftest.tmp" "conftest.in"
-    cp "conftest.in" "conftest.nl"
-    $as_echo 'FGREP' >> "conftest.nl"
-    "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break
-    diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break
-    as_fn_arith $ac_count + 1 && ac_count=$as_val
-    if test $ac_count -gt ${ac_path_FGREP_max-0}; then
-      # Best one so far, save it but keep looking for a better one
-      ac_cv_path_FGREP="$ac_path_FGREP"
-      ac_path_FGREP_max=$ac_count
-    fi
-    # 10*(2^10) chars as input seems more than enough
-    test $ac_count -gt 10 && break
-  done
-  rm -f conftest.in conftest.tmp conftest.nl conftest.out;;
-esac
-
-      $ac_path_FGREP_found && break 3
-    done
-  done
-  done
-IFS=$as_save_IFS
-  if test -z "$ac_cv_path_FGREP"; then
-    as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5
-  fi
-else
-  ac_cv_path_FGREP=$FGREP
-fi
-
-   fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5
-$as_echo "$ac_cv_path_FGREP" >&6; }
- FGREP="$ac_cv_path_FGREP"
-
-
-test -z "$GREP" && GREP=grep
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-gnu-ld was given.
-if test "${with_gnu_ld+set}" = set; then :
-  withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes
-else
-  with_gnu_ld=no
-fi
-
-ac_prog=ld
-if test "$GCC" = yes; then
-  # Check if gcc -print-prog-name=ld gives a path.
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5
-$as_echo_n "checking for ld used by $CC... " >&6; }
-  case $host in
-  *-*-mingw*)
-    # gcc leaves a trailing carriage return which upsets mingw
-    ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;;
-  *)
-    ac_prog=`($CC -print-prog-name=ld) 2>&5` ;;
-  esac
-  case $ac_prog in
-    # Accept absolute paths.
-    [\\/]* | ?:[\\/]*)
-      re_direlt='/[^/][^/]*/\.\./'
-      # Canonicalize the pathname of ld
-      ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'`
-      while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do
-	ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"`
-      done
-      test -z "$LD" && LD="$ac_prog"
-      ;;
-  "")
-    # If it fails, then pretend we aren't using GCC.
-    ac_prog=ld
-    ;;
-  *)
-    # If it is relative, then search for the first ld in PATH.
-    with_gnu_ld=unknown
-    ;;
-  esac
-elif test "$with_gnu_ld" = yes; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5
-$as_echo_n "checking for GNU ld... " >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5
-$as_echo_n "checking for non-GNU ld... " >&6; }
-fi
-if ${lt_cv_path_LD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -z "$LD"; then
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  for ac_dir in $PATH; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then
-      lt_cv_path_LD="$ac_dir/$ac_prog"
-      # Check to see if the program is GNU ld.  I'd rather use --version,
-      # but apparently some variants of GNU ld only accept -v.
-      # Break only if it was the GNU/non-GNU ld that we prefer.
-      case `"$lt_cv_path_LD" -v 2>&1 </dev/null` in
-      *GNU* | *'with BFD'*)
-	test "$with_gnu_ld" != no && break
-	;;
-      *)
-	test "$with_gnu_ld" != yes && break
-	;;
-      esac
-    fi
-  done
-  IFS="$lt_save_ifs"
-else
-  lt_cv_path_LD="$LD" # Let the user override the test with a path.
-fi
-fi
-
-LD="$lt_cv_path_LD"
-if test -n "$LD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5
-$as_echo "$LD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5
-$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; }
-if ${lt_cv_prog_gnu_ld+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  # I'd rather use --version here, but apparently some GNU lds only accept -v.
-case `$LD -v 2>&1 </dev/null` in
-*GNU* | *'with BFD'*)
-  lt_cv_prog_gnu_ld=yes
-  ;;
-*)
-  lt_cv_prog_gnu_ld=no
-  ;;
-esac
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_gnu_ld" >&5
-$as_echo "$lt_cv_prog_gnu_ld" >&6; }
-with_gnu_ld=$lt_cv_prog_gnu_ld
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5
-$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; }
-if ${lt_cv_path_NM+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$NM"; then
-  # Let the user override the test.
-  lt_cv_path_NM="$NM"
-else
-  lt_nm_to_check="${ac_tool_prefix}nm"
-  if test -n "$ac_tool_prefix" && test "$build" = "$host"; then
-    lt_nm_to_check="$lt_nm_to_check nm"
-  fi
-  for lt_tmp_nm in $lt_nm_to_check; do
-    lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-    for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do
-      IFS="$lt_save_ifs"
-      test -z "$ac_dir" && ac_dir=.
-      tmp_nm="$ac_dir/$lt_tmp_nm"
-      if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then
-	# Check to see if the nm accepts a BSD-compat flag.
-	# Adding the `sed 1q' prevents false positives on HP-UX, which says:
-	#   nm: unknown option "B" ignored
-	# Tru64's nm complains that /dev/null is an invalid object file
-	case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in
-	*/dev/null* | *'Invalid file or object type'*)
-	  lt_cv_path_NM="$tmp_nm -B"
-	  break
-	  ;;
-	*)
-	  case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in
-	  */dev/null*)
-	    lt_cv_path_NM="$tmp_nm -p"
-	    break
-	    ;;
-	  *)
-	    lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but
-	    continue # so that we can try to find one that supports BSD flags
-	    ;;
-	  esac
-	  ;;
-	esac
-      fi
-    done
-    IFS="$lt_save_ifs"
-  done
-  : ${lt_cv_path_NM=no}
-fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5
-$as_echo "$lt_cv_path_NM" >&6; }
-if test "$lt_cv_path_NM" != "no"; then
-  NM="$lt_cv_path_NM"
-else
-  # Didn't find any BSD compatible name lister, look for dumpbin.
-  if test -n "$DUMPBIN"; then :
-    # Let the user override the test.
-  else
-    if test -n "$ac_tool_prefix"; then
-  for ac_prog in dumpbin "link -dump"
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DUMPBIN+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DUMPBIN"; then
-  ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DUMPBIN=$ac_cv_prog_DUMPBIN
-if test -n "$DUMPBIN"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5
-$as_echo "$DUMPBIN" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$DUMPBIN" && break
-  done
-fi
-if test -z "$DUMPBIN"; then
-  ac_ct_DUMPBIN=$DUMPBIN
-  for ac_prog in dumpbin "link -dump"
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DUMPBIN"; then
-  ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DUMPBIN="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN
-if test -n "$ac_ct_DUMPBIN"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5
-$as_echo "$ac_ct_DUMPBIN" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_DUMPBIN" && break
-done
-
-  if test "x$ac_ct_DUMPBIN" = x; then
-    DUMPBIN=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DUMPBIN=$ac_ct_DUMPBIN
-  fi
-fi
-
-    case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in
-    *COFF*)
-      DUMPBIN="$DUMPBIN -symbols"
-      ;;
-    *)
-      DUMPBIN=:
-      ;;
-    esac
-  fi
-
-  if test "$DUMPBIN" != ":"; then
-    NM="$DUMPBIN"
-  fi
-fi
-test -z "$NM" && NM=nm
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5
-$as_echo_n "checking the name lister ($NM) interface... " >&6; }
-if ${lt_cv_nm_interface+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_nm_interface="BSD nm"
-  echo "int some_variable = 0;" > conftest.$ac_ext
-  (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5)
-  (eval "$ac_compile" 2>conftest.err)
-  cat conftest.err >&5
-  (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5)
-  (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out)
-  cat conftest.err >&5
-  (eval echo "\"\$as_me:$LINENO: output\"" >&5)
-  cat conftest.out >&5
-  if $GREP 'External.*some_variable' conftest.out > /dev/null; then
-    lt_cv_nm_interface="MS dumpbin"
-  fi
-  rm -f conftest*
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5
-$as_echo "$lt_cv_nm_interface" >&6; }
-
-# find the maximum length of command line arguments
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5
-$as_echo_n "checking the maximum length of command line arguments... " >&6; }
-if ${lt_cv_sys_max_cmd_len+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-    i=0
-  teststring="ABCD"
-
-  case $build_os in
-  msdosdjgpp*)
-    # On DJGPP, this test can blow up pretty badly due to problems in libc
-    # (any single argument exceeding 2000 bytes causes a buffer overrun
-    # during glob expansion).  Even if it were fixed, the result of this
-    # check would be larger than it should be.
-    lt_cv_sys_max_cmd_len=12288;    # 12K is about right
-    ;;
-
-  gnu*)
-    # Under GNU Hurd, this test is not required because there is
-    # no limit to the length of command line arguments.
-    # Libtool will interpret -1 as no limit whatsoever
-    lt_cv_sys_max_cmd_len=-1;
-    ;;
-
-  cygwin* | mingw* | cegcc*)
-    # On Win9x/ME, this test blows up -- it succeeds, but takes
-    # about 5 minutes as the teststring grows exponentially.
-    # Worse, since 9x/ME are not pre-emptively multitasking,
-    # you end up with a "frozen" computer, even though with patience
-    # the test eventually succeeds (with a max line length of 256k).
-    # Instead, let's just punt: use the minimum linelength reported by
-    # all of the supported platforms: 8192 (on NT/2K/XP).
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  mint*)
-    # On MiNT this can take a long time and run out of memory.
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  amigaos*)
-    # On AmigaOS with pdksh, this test takes hours, literally.
-    # So we just punt and use a minimum line length of 8192.
-    lt_cv_sys_max_cmd_len=8192;
-    ;;
-
-  netbsd* | freebsd* | openbsd* | darwin* | dragonfly*)
-    # This has been around since 386BSD, at least.  Likely further.
-    if test -x /sbin/sysctl; then
-      lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax`
-    elif test -x /usr/sbin/sysctl; then
-      lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax`
-    else
-      lt_cv_sys_max_cmd_len=65536	# usable default for all BSDs
-    fi
-    # And add a safety zone
-    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
-    lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
-    ;;
-
-  interix*)
-    # We know the value 262144 and hardcode it with a safety zone (like BSD)
-    lt_cv_sys_max_cmd_len=196608
-    ;;
-
-  os2*)
-    # The test takes a long time on OS/2.
-    lt_cv_sys_max_cmd_len=8192
-    ;;
-
-  osf*)
-    # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure
-    # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not
-    # nice to cause kernel panics so lets avoid the loop below.
-    # First set a reasonable default.
-    lt_cv_sys_max_cmd_len=16384
-    #
-    if test -x /sbin/sysconfig; then
-      case `/sbin/sysconfig -q proc exec_disable_arg_limit` in
-        *1*) lt_cv_sys_max_cmd_len=-1 ;;
-      esac
-    fi
-    ;;
-  sco3.2v5*)
-    lt_cv_sys_max_cmd_len=102400
-    ;;
-  sysv5* | sco5v6* | sysv4.2uw2*)
-    kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null`
-    if test -n "$kargmax"; then
-      lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[	 ]//'`
-    else
-      lt_cv_sys_max_cmd_len=32768
-    fi
-    ;;
-  *)
-    lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null`
-    if test -n "$lt_cv_sys_max_cmd_len"; then
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4`
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3`
-    else
-      # Make teststring a little bigger before we do anything with it.
-      # a 1K string should be a reasonable start.
-      for i in 1 2 3 4 5 6 7 8 ; do
-        teststring=$teststring$teststring
-      done
-      SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}}
-      # If test is not a shell built-in, we'll probably end up computing a
-      # maximum length that is only half of the actual maximum length, but
-      # we can't tell.
-      while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \
-	         = "X$teststring$teststring"; } >/dev/null 2>&1 &&
-	      test $i != 17 # 1/2 MB should be enough
-      do
-        i=`expr $i + 1`
-        teststring=$teststring$teststring
-      done
-      # Only check the string length outside the loop.
-      lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1`
-      teststring=
-      # Add a significant safety factor because C++ compilers can tack on
-      # massive amounts of additional arguments before passing them to the
-      # linker.  It appears as though 1/2 is a usable value.
-      lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2`
-    fi
-    ;;
-  esac
-
-fi
-
-if test -n $lt_cv_sys_max_cmd_len ; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5
-$as_echo "$lt_cv_sys_max_cmd_len" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5
-$as_echo "none" >&6; }
-fi
-max_cmd_len=$lt_cv_sys_max_cmd_len
-
-
-
-
-
-
-: ${CP="cp -f"}
-: ${MV="mv -f"}
-: ${RM="rm -f"}
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5
-$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; }
-# Try some XSI features
-xsi_shell=no
-( _lt_dummy="a/b/c"
-  test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \
-      = c,a/b,b/c, \
-    && eval 'test $(( 1 + 1 )) -eq 2 \
-    && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \
-  && xsi_shell=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5
-$as_echo "$xsi_shell" >&6; }
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5
-$as_echo_n "checking whether the shell understands \"+=\"... " >&6; }
-lt_shell_append=no
-( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \
-    >/dev/null 2>&1 \
-  && lt_shell_append=yes
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5
-$as_echo "$lt_shell_append" >&6; }
-
-
-if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then
-  lt_unset=unset
-else
-  lt_unset=false
-fi
-
-
-
-
-
-# test EBCDIC or ASCII
-case `echo X|tr X '\101'` in
- A) # ASCII based system
-    # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
-  lt_SP2NL='tr \040 \012'
-  lt_NL2SP='tr \015\012 \040\040'
-  ;;
- *) # EBCDIC based system
-  lt_SP2NL='tr \100 \n'
-  lt_NL2SP='tr \r\n \100\100'
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5
-$as_echo_n "checking how to convert $build file names to $host format... " >&6; }
-if ${lt_cv_to_host_file_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $host in
-  *-*-mingw* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32
-        ;;
-      *-*-cygwin* )
-        lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32
-        ;;
-      * ) # otherwise, assume *nix
-        lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32
-        ;;
-    esac
-    ;;
-  *-*-cygwin* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin
-        ;;
-      *-*-cygwin* )
-        lt_cv_to_host_file_cmd=func_convert_file_noop
-        ;;
-      * ) # otherwise, assume *nix
-        lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin
-        ;;
-    esac
-    ;;
-  * ) # unhandled hosts (and "normal" native builds)
-    lt_cv_to_host_file_cmd=func_convert_file_noop
-    ;;
-esac
-
-fi
-
-to_host_file_cmd=$lt_cv_to_host_file_cmd
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5
-$as_echo "$lt_cv_to_host_file_cmd" >&6; }
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5
-$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; }
-if ${lt_cv_to_tool_file_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  #assume ordinary cross tools, or native build.
-lt_cv_to_tool_file_cmd=func_convert_file_noop
-case $host in
-  *-*-mingw* )
-    case $build in
-      *-*-mingw* ) # actually msys
-        lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32
-        ;;
-    esac
-    ;;
-esac
-
-fi
-
-to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5
-$as_echo "$lt_cv_to_tool_file_cmd" >&6; }
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5
-$as_echo_n "checking for $LD option to reload object files... " >&6; }
-if ${lt_cv_ld_reload_flag+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_reload_flag='-r'
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5
-$as_echo "$lt_cv_ld_reload_flag" >&6; }
-reload_flag=$lt_cv_ld_reload_flag
-case $reload_flag in
-"" | " "*) ;;
-*) reload_flag=" $reload_flag" ;;
-esac
-reload_cmds='$LD$reload_flag -o $output$reload_objs'
-case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    if test "$GCC" != yes; then
-      reload_cmds=false
-    fi
-    ;;
-  darwin*)
-    if test "$GCC" = yes; then
-      reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs'
-    else
-      reload_cmds='$LD$reload_flag -o $output$reload_objs'
-    fi
-    ;;
-esac
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args.
-set dummy ${ac_tool_prefix}objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OBJDUMP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OBJDUMP"; then
-  ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OBJDUMP=$ac_cv_prog_OBJDUMP
-if test -n "$OBJDUMP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5
-$as_echo "$OBJDUMP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OBJDUMP"; then
-  ac_ct_OBJDUMP=$OBJDUMP
-  # Extract the first word of "objdump", so it can be a program name with args.
-set dummy objdump; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OBJDUMP"; then
-  ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OBJDUMP="objdump"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP
-if test -n "$ac_ct_OBJDUMP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5
-$as_echo "$ac_ct_OBJDUMP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OBJDUMP" = x; then
-    OBJDUMP="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OBJDUMP=$ac_ct_OBJDUMP
-  fi
-else
-  OBJDUMP="$ac_cv_prog_OBJDUMP"
-fi
-
-test -z "$OBJDUMP" && OBJDUMP=objdump
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5
-$as_echo_n "checking how to recognize dependent libraries... " >&6; }
-if ${lt_cv_deplibs_check_method+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_file_magic_cmd='$MAGIC_CMD'
-lt_cv_file_magic_test_file=
-lt_cv_deplibs_check_method='unknown'
-# Need to set the preceding variable on all platforms that support
-# interlibrary dependencies.
-# 'none' -- dependencies not supported.
-# `unknown' -- same as none, but documents that we really don't know.
-# 'pass_all' -- all dependencies passed with no checks.
-# 'test_compile' -- check by making test program.
-# 'file_magic [[regex]]' -- check by looking for files in library path
-# which responds to the $file_magic_cmd with a given extended regex.
-# If you have `file' or equivalent on your system and you're not sure
-# whether `pass_all' will *always* work, you probably want this one.
-
-case $host_os in
-aix[4-9]*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-beos*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-bsdi[45]*)
-  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)'
-  lt_cv_file_magic_cmd='/usr/bin/file -L'
-  lt_cv_file_magic_test_file=/shlib/libc.so
-  ;;
-
-cygwin*)
-  # func_win32_libid is a shell function defined in ltmain.sh
-  lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
-  lt_cv_file_magic_cmd='func_win32_libid'
-  ;;
-
-mingw* | pw32*)
-  # Base MSYS/MinGW do not provide the 'file' command needed by
-  # func_win32_libid shell function, so use a weaker test based on 'objdump',
-  # unless we find 'file', for example because we are cross-compiling.
-  # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin.
-  if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then
-    lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL'
-    lt_cv_file_magic_cmd='func_win32_libid'
-  else
-    # Keep this pattern in sync with the one in func_win32_libid.
-    lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)'
-    lt_cv_file_magic_cmd='$OBJDUMP -f'
-  fi
-  ;;
-
-cegcc*)
-  # use the weaker test based on 'objdump'. See mingw*.
-  lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?'
-  lt_cv_file_magic_cmd='$OBJDUMP -f'
-  ;;
-
-darwin* | rhapsody*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-freebsd* | dragonfly*)
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
-    case $host_cpu in
-    i*86 )
-      # Not sure whether the presence of OpenBSD here was a mistake.
-      # Let's accept both of them until this is cleared up.
-      lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library'
-      lt_cv_file_magic_cmd=/usr/bin/file
-      lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*`
-      ;;
-    esac
-  else
-    lt_cv_deplibs_check_method=pass_all
-  fi
-  ;;
-
-gnu*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-haiku*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-hpux10.20* | hpux11*)
-  lt_cv_file_magic_cmd=/usr/bin/file
-  case $host_cpu in
-  ia64*)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64'
-    lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so
-    ;;
-  hppa*64*)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'
-    lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl
-    ;;
-  *)
-    lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library'
-    lt_cv_file_magic_test_file=/usr/lib/libc.sl
-    ;;
-  esac
-  ;;
-
-interix[3-9]*)
-  # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here
-  lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$'
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $LD in
-  *-32|*"-32 ") libmagic=32-bit;;
-  *-n32|*"-n32 ") libmagic=N32;;
-  *-64|*"-64 ") libmagic=64-bit;;
-  *) libmagic=never-match;;
-  esac
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-netbsd*)
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
-  else
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$'
-  fi
-  ;;
-
-newos6*)
-  lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)'
-  lt_cv_file_magic_cmd=/usr/bin/file
-  lt_cv_file_magic_test_file=/usr/lib/libnls.so
-  ;;
-
-*nto* | *qnx*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-openbsd*)
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$'
-  else
-    lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$'
-  fi
-  ;;
-
-osf3* | osf4* | osf5*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-rdos*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-solaris*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-
-sysv4 | sysv4.3*)
-  case $host_vendor in
-  motorola)
-    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]'
-    lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*`
-    ;;
-  ncr)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  sequent)
-    lt_cv_file_magic_cmd='/bin/file'
-    lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )'
-    ;;
-  sni)
-    lt_cv_file_magic_cmd='/bin/file'
-    lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib"
-    lt_cv_file_magic_test_file=/lib/libc.so
-    ;;
-  siemens)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  pc)
-    lt_cv_deplibs_check_method=pass_all
-    ;;
-  esac
-  ;;
-
-tpf*)
-  lt_cv_deplibs_check_method=pass_all
-  ;;
-esac
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5
-$as_echo "$lt_cv_deplibs_check_method" >&6; }
-
-file_magic_glob=
-want_nocaseglob=no
-if test "$build" = "$host"; then
-  case $host_os in
-  mingw* | pw32*)
-    if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then
-      want_nocaseglob=yes
-    else
-      file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"`
-    fi
-    ;;
-  esac
-fi
-
-file_magic_cmd=$lt_cv_file_magic_cmd
-deplibs_check_method=$lt_cv_deplibs_check_method
-test -z "$deplibs_check_method" && deplibs_check_method=unknown
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args.
-set dummy ${ac_tool_prefix}dlltool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DLLTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DLLTOOL"; then
-  ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DLLTOOL=$ac_cv_prog_DLLTOOL
-if test -n "$DLLTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5
-$as_echo "$DLLTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_DLLTOOL"; then
-  ac_ct_DLLTOOL=$DLLTOOL
-  # Extract the first word of "dlltool", so it can be a program name with args.
-set dummy dlltool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DLLTOOL"; then
-  ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DLLTOOL="dlltool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL
-if test -n "$ac_ct_DLLTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5
-$as_echo "$ac_ct_DLLTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_DLLTOOL" = x; then
-    DLLTOOL="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DLLTOOL=$ac_ct_DLLTOOL
-  fi
-else
-  DLLTOOL="$ac_cv_prog_DLLTOOL"
-fi
-
-test -z "$DLLTOOL" && DLLTOOL=dlltool
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5
-$as_echo_n "checking how to associate runtime and link libraries... " >&6; }
-if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_sharedlib_from_linklib_cmd='unknown'
-
-case $host_os in
-cygwin* | mingw* | pw32* | cegcc*)
-  # two different shell functions defined in ltmain.sh
-  # decide which to use based on capabilities of $DLLTOOL
-  case `$DLLTOOL --help 2>&1` in
-  *--identify-strict*)
-    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib
-    ;;
-  *)
-    lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback
-    ;;
-  esac
-  ;;
-*)
-  # fallback: assume linklib IS sharedlib
-  lt_cv_sharedlib_from_linklib_cmd="$ECHO"
-  ;;
-esac
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5
-$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; }
-sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd
-test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  for ac_prog in ar
-  do
-    # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args.
-set dummy $ac_tool_prefix$ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_AR+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$AR"; then
-  ac_cv_prog_AR="$AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_AR="$ac_tool_prefix$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-AR=$ac_cv_prog_AR
-if test -n "$AR"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5
-$as_echo "$AR" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-    test -n "$AR" && break
-  done
-fi
-if test -z "$AR"; then
-  ac_ct_AR=$AR
-  for ac_prog in ar
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_AR+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_AR"; then
-  ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_AR="$ac_prog"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_AR=$ac_cv_prog_ac_ct_AR
-if test -n "$ac_ct_AR"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5
-$as_echo "$ac_ct_AR" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  test -n "$ac_ct_AR" && break
-done
-
-  if test "x$ac_ct_AR" = x; then
-    AR="false"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    AR=$ac_ct_AR
-  fi
-fi
-
-: ${AR=ar}
-: ${AR_FLAGS=cru}
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5
-$as_echo_n "checking for archiver @FILE support... " >&6; }
-if ${lt_cv_ar_at_file+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ar_at_file=no
-   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_compile "$LINENO"; then :
-  echo conftest.$ac_objext > conftest.lst
-      lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5'
-      { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-  (eval $lt_ar_try) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-      if test "$ac_status" -eq 0; then
-	# Ensure the archiver fails upon bogus file names.
-	rm -f conftest.$ac_objext libconftest.a
-	{ { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5
-  (eval $lt_ar_try) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	if test "$ac_status" -ne 0; then
-          lt_cv_ar_at_file=@
-        fi
-      fi
-      rm -f conftest.* libconftest.a
-
-fi
-rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5
-$as_echo "$lt_cv_ar_at_file" >&6; }
-
-if test "x$lt_cv_ar_at_file" = xno; then
-  archiver_list_spec=
-else
-  archiver_list_spec=$lt_cv_ar_at_file
-fi
-
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args.
-set dummy ${ac_tool_prefix}strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$STRIP"; then
-  ac_cv_prog_STRIP="$STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_STRIP="${ac_tool_prefix}strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-STRIP=$ac_cv_prog_STRIP
-if test -n "$STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5
-$as_echo "$STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_STRIP"; then
-  ac_ct_STRIP=$STRIP
-  # Extract the first word of "strip", so it can be a program name with args.
-set dummy strip; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_STRIP+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_STRIP"; then
-  ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_STRIP="strip"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP
-if test -n "$ac_ct_STRIP"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5
-$as_echo "$ac_ct_STRIP" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_STRIP" = x; then
-    STRIP=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    STRIP=$ac_ct_STRIP
-  fi
-else
-  STRIP="$ac_cv_prog_STRIP"
-fi
-
-test -z "$STRIP" && STRIP=:
-
-
-
-
-
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args.
-set dummy ${ac_tool_prefix}ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_RANLIB+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$RANLIB"; then
-  ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-RANLIB=$ac_cv_prog_RANLIB
-if test -n "$RANLIB"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5
-$as_echo "$RANLIB" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_RANLIB"; then
-  ac_ct_RANLIB=$RANLIB
-  # Extract the first word of "ranlib", so it can be a program name with args.
-set dummy ranlib; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_RANLIB+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_RANLIB"; then
-  ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_RANLIB="ranlib"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB
-if test -n "$ac_ct_RANLIB"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5
-$as_echo "$ac_ct_RANLIB" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_RANLIB" = x; then
-    RANLIB=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    RANLIB=$ac_ct_RANLIB
-  fi
-else
-  RANLIB="$ac_cv_prog_RANLIB"
-fi
-
-test -z "$RANLIB" && RANLIB=:
-
-
-
-
-
-
-# Determine commands to create old-style static archives.
-old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs'
-old_postinstall_cmds='chmod 644 $oldlib'
-old_postuninstall_cmds=
-
-if test -n "$RANLIB"; then
-  case $host_os in
-  openbsd*)
-    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib"
-    ;;
-  *)
-    old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib"
-    ;;
-  esac
-  old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib"
-fi
-
-case $host_os in
-  darwin*)
-    lock_old_archive_extraction=yes ;;
-  *)
-    lock_old_archive_extraction=no ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
-# Check for command to grab the raw symbol name followed by C symbol from nm.
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5
-$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; }
-if ${lt_cv_sys_global_symbol_pipe+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-
-# These are sane defaults that work on at least a few old systems.
-# [They come from Ultrix.  What could be older than Ultrix?!! ;)]
-
-# Character class describing NM global symbol codes.
-symcode='[BCDEGRST]'
-
-# Regexp to match symbols that can be accessed directly from C.
-sympat='\([_A-Za-z][_A-Za-z0-9]*\)'
-
-# Define system-specific variables.
-case $host_os in
-aix*)
-  symcode='[BCDT]'
-  ;;
-cygwin* | mingw* | pw32* | cegcc*)
-  symcode='[ABCDGISTW]'
-  ;;
-hpux*)
-  if test "$host_cpu" = ia64; then
-    symcode='[ABCDEGRST]'
-  fi
-  ;;
-irix* | nonstopux*)
-  symcode='[BCDEGRST]'
-  ;;
-osf*)
-  symcode='[BCDEGQRST]'
-  ;;
-solaris*)
-  symcode='[BDRT]'
-  ;;
-sco3.2v5*)
-  symcode='[DT]'
-  ;;
-sysv4.2uw2*)
-  symcode='[DT]'
-  ;;
-sysv5* | sco5v6* | unixware* | OpenUNIX*)
-  symcode='[ABDT]'
-  ;;
-sysv4)
-  symcode='[DFNSTU]'
-  ;;
-esac
-
-# If we're using GNU nm, then use its standard symbol codes.
-case `$NM -V 2>&1` in
-*GNU* | *'with BFD'*)
-  symcode='[ABCDGIRSTW]' ;;
-esac
-
-# Transform an extracted symbol line into a proper C declaration.
-# Some systems (esp. on ia64) link data and code symbols differently,
-# so use this general approach.
-lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'"
-
-# Transform an extracted symbol line into symbol name and symbol address
-lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"\2\", (void *) \&\2},/p'"
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/  {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/  {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/  {\"lib\2\", (void *) \&\2},/p'"
-
-# Handle CRLF in mingw tool chain
-opt_cr=
-case $build_os in
-mingw*)
-  opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp
-  ;;
-esac
-
-# Try without a prefix underscore, then with it.
-for ac_symprfx in "" "_"; do
-
-  # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol.
-  symxfrm="\\1 $ac_symprfx\\2 \\2"
-
-  # Write the raw and C identifiers.
-  if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-    # Fake it for dumpbin and say T for any non-static function
-    # and D for any global variable.
-    # Also find C++ and __fastcall symbols from MSVC++,
-    # which start with @ or ?.
-    lt_cv_sys_global_symbol_pipe="$AWK '"\
-"     {last_section=section; section=\$ 3};"\
-"     /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\
-"     /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\
-"     \$ 0!~/External *\|/{next};"\
-"     / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\
-"     {if(hide[section]) next};"\
-"     {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\
-"     {split(\$ 0, a, /\||\r/); split(a[2], s)};"\
-"     s[1]~/^[@?]/{print s[1], s[1]; next};"\
-"     s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\
-"     ' prfx=^$ac_symprfx"
-  else
-    lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[	 ]\($symcode$symcode*\)[	 ][	 ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'"
-  fi
-  lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'"
-
-  # Check to see that the pipe works correctly.
-  pipe_works=no
-
-  rm -f conftest*
-  cat > conftest.$ac_ext <<_LT_EOF
-#ifdef __cplusplus
-extern "C" {
-#endif
-char nm_test_var;
-void nm_test_func(void);
-void nm_test_func(void){}
-#ifdef __cplusplus
-}
-#endif
-int main(){nm_test_var='a';nm_test_func();return(0);}
-_LT_EOF
-
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    # Now try to grab the symbols.
-    nlist=conftest.nm
-    if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5
-  (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s "$nlist"; then
-      # Try sorting and uniquifying the output.
-      if sort "$nlist" | uniq > "$nlist"T; then
-	mv -f "$nlist"T "$nlist"
-      else
-	rm -f "$nlist"T
-      fi
-
-      # Make sure that we snagged all the symbols we need.
-      if $GREP ' nm_test_var$' "$nlist" >/dev/null; then
-	if $GREP ' nm_test_func$' "$nlist" >/dev/null; then
-	  cat <<_LT_EOF > conftest.$ac_ext
-/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests.  */
-#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE)
-/* DATA imports from DLLs on WIN32 con't be const, because runtime
-   relocations are performed -- see ld's documentation on pseudo-relocs.  */
-# define LT_DLSYM_CONST
-#elif defined(__osf__)
-/* This system does not cope well with relocations in const data.  */
-# define LT_DLSYM_CONST
-#else
-# define LT_DLSYM_CONST const
-#endif
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-_LT_EOF
-	  # Now generate the symbol file.
-	  eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext'
-
-	  cat <<_LT_EOF >> conftest.$ac_ext
-
-/* The mapping between symbol names and symbols.  */
-LT_DLSYM_CONST struct {
-  const char *name;
-  void       *address;
-}
-lt__PROGRAM__LTX_preloaded_symbols[] =
-{
-  { "@PROGRAM@", (void *) 0 },
-_LT_EOF
-	  $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/  {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext
-	  cat <<\_LT_EOF >> conftest.$ac_ext
-  {0, (void *) 0}
-};
-
-/* This works around a problem in FreeBSD linker */
-#ifdef FREEBSD_WORKAROUND
-static const void *lt_preloaded_setup() {
-  return lt__PROGRAM__LTX_preloaded_symbols;
-}
-#endif
-
-#ifdef __cplusplus
-}
-#endif
-_LT_EOF
-	  # Now try linking the two files.
-	  mv conftest.$ac_objext conftstm.$ac_objext
-	  lt_globsym_save_LIBS=$LIBS
-	  lt_globsym_save_CFLAGS=$CFLAGS
-	  LIBS="conftstm.$ac_objext"
-	  CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag"
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext}; then
-	    pipe_works=yes
-	  fi
-	  LIBS=$lt_globsym_save_LIBS
-	  CFLAGS=$lt_globsym_save_CFLAGS
-	else
-	  echo "cannot find nm_test_func in $nlist" >&5
-	fi
-      else
-	echo "cannot find nm_test_var in $nlist" >&5
-      fi
-    else
-      echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5
-    fi
-  else
-    echo "$progname: failed program was:" >&5
-    cat conftest.$ac_ext >&5
-  fi
-  rm -rf conftest* conftst*
-
-  # Do not use the global_symbol_pipe unless it works.
-  if test "$pipe_works" = yes; then
-    break
-  else
-    lt_cv_sys_global_symbol_pipe=
-  fi
-done
-
-fi
-
-if test -z "$lt_cv_sys_global_symbol_pipe"; then
-  lt_cv_sys_global_symbol_to_cdecl=
-fi
-if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5
-$as_echo "failed" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5
-$as_echo "ok" >&6; }
-fi
-
-# Response file support.
-if test "$lt_cv_nm_interface" = "MS dumpbin"; then
-  nm_file_list_spec='@'
-elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then
-  nm_file_list_spec='@'
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5
-$as_echo_n "checking for sysroot... " >&6; }
-
-# Check whether --with-sysroot was given.
-if test "${with_sysroot+set}" = set; then :
-  withval=$with_sysroot;
-else
-  with_sysroot=no
-fi
-
-
-lt_sysroot=
-case ${with_sysroot} in #(
- yes)
-   if test "$GCC" = yes; then
-     lt_sysroot=`$CC --print-sysroot 2>/dev/null`
-   fi
-   ;; #(
- /*)
-   lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"`
-   ;; #(
- no|'')
-   ;; #(
- *)
-   { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5
-$as_echo "${with_sysroot}" >&6; }
-   as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5
-   ;;
-esac
-
- { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5
-$as_echo "${lt_sysroot:-no}" >&6; }
-
-
-
-
-
-# Check whether --enable-libtool-lock was given.
-if test "${enable_libtool_lock+set}" = set; then :
-  enableval=$enable_libtool_lock;
-fi
-
-test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes
-
-# Some flags need to be propagated to the compiler or linker for good
-# libtool support.
-case $host in
-ia64-*-hpux*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.$ac_objext` in
-      *ELF-32*)
-	HPUX_IA64_MODE="32"
-	;;
-      *ELF-64*)
-	HPUX_IA64_MODE="64"
-	;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-*-*-irix6*)
-  # Find out which ABI we are using.
-  echo '#line '$LINENO' "configure"' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    if test "$lt_cv_prog_gnu_ld" = yes; then
-      case `/usr/bin/file conftest.$ac_objext` in
-	*32-bit*)
-	  LD="${LD-ld} -melf32bsmip"
-	  ;;
-	*N32*)
-	  LD="${LD-ld} -melf32bmipn32"
-	  ;;
-	*64-bit*)
-	  LD="${LD-ld} -melf64bmip"
-	;;
-      esac
-    else
-      case `/usr/bin/file conftest.$ac_objext` in
-	*32-bit*)
-	  LD="${LD-ld} -32"
-	  ;;
-	*N32*)
-	  LD="${LD-ld} -n32"
-	  ;;
-	*64-bit*)
-	  LD="${LD-ld} -64"
-	  ;;
-      esac
-    fi
-  fi
-  rm -rf conftest*
-  ;;
-
-x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \
-s390*-*linux*|s390*-*tpf*|sparc*-*linux*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.o` in
-      *32-bit*)
-	case $host in
-	  x86_64-*kfreebsd*-gnu)
-	    LD="${LD-ld} -m elf_i386_fbsd"
-	    ;;
-	  x86_64-*linux*)
-	    LD="${LD-ld} -m elf_i386"
-	    ;;
-	  ppc64-*linux*|powerpc64-*linux*)
-	    LD="${LD-ld} -m elf32ppclinux"
-	    ;;
-	  s390x-*linux*)
-	    LD="${LD-ld} -m elf_s390"
-	    ;;
-	  sparc64-*linux*)
-	    LD="${LD-ld} -m elf32_sparc"
-	    ;;
-	esac
-	;;
-      *64-bit*)
-	case $host in
-	  x86_64-*kfreebsd*-gnu)
-	    LD="${LD-ld} -m elf_x86_64_fbsd"
-	    ;;
-	  x86_64-*linux*)
-	    LD="${LD-ld} -m elf_x86_64"
-	    ;;
-	  ppc*-*linux*|powerpc*-*linux*)
-	    LD="${LD-ld} -m elf64ppc"
-	    ;;
-	  s390*-*linux*|s390*-*tpf*)
-	    LD="${LD-ld} -m elf64_s390"
-	    ;;
-	  sparc*-*linux*)
-	    LD="${LD-ld} -m elf64_sparc"
-	    ;;
-	esac
-	;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-
-*-*-sco3.2v5*)
-  # On SCO OpenServer 5, we need -belf to get full-featured binaries.
-  SAVE_CFLAGS="$CFLAGS"
-  CFLAGS="$CFLAGS -belf"
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5
-$as_echo_n "checking whether the C compiler needs -belf... " >&6; }
-if ${lt_cv_cc_needs_belf+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-     cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_cc_needs_belf=yes
-else
-  lt_cv_cc_needs_belf=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-     ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5
-$as_echo "$lt_cv_cc_needs_belf" >&6; }
-  if test x"$lt_cv_cc_needs_belf" != x"yes"; then
-    # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf
-    CFLAGS="$SAVE_CFLAGS"
-  fi
-  ;;
-*-*solaris*)
-  # Find out which ABI we are using.
-  echo 'int i;' > conftest.$ac_ext
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-    case `/usr/bin/file conftest.o` in
-    *64-bit*)
-      case $lt_cv_prog_gnu_ld in
-      yes*)
-        case $host in
-        i?86-*-solaris*)
-          LD="${LD-ld} -m elf_x86_64"
-          ;;
-        sparc*-*-solaris*)
-          LD="${LD-ld} -m elf64_sparc"
-          ;;
-        esac
-        # GNU ld 2.21 introduced _sol2 emulations.  Use them if available.
-        if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then
-          LD="${LD-ld}_sol2"
-        fi
-        ;;
-      *)
-	if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then
-	  LD="${LD-ld} -64"
-	fi
-	;;
-      esac
-      ;;
-    esac
-  fi
-  rm -rf conftest*
-  ;;
-esac
-
-need_locks="$enable_libtool_lock"
-
-if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args.
-set dummy ${ac_tool_prefix}mt; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_MANIFEST_TOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$MANIFEST_TOOL"; then
-  ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL
-if test -n "$MANIFEST_TOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5
-$as_echo "$MANIFEST_TOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_MANIFEST_TOOL"; then
-  ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL
-  # Extract the first word of "mt", so it can be a program name with args.
-set dummy mt; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_MANIFEST_TOOL"; then
-  ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_MANIFEST_TOOL="mt"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL
-if test -n "$ac_ct_MANIFEST_TOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5
-$as_echo "$ac_ct_MANIFEST_TOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_MANIFEST_TOOL" = x; then
-    MANIFEST_TOOL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL
-  fi
-else
-  MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL"
-fi
-
-test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5
-$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; }
-if ${lt_cv_path_mainfest_tool+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_path_mainfest_tool=no
-  echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5
-  $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out
-  cat conftest.err >&5
-  if $GREP 'Manifest Tool' conftest.out > /dev/null; then
-    lt_cv_path_mainfest_tool=yes
-  fi
-  rm -f conftest*
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5
-$as_echo "$lt_cv_path_mainfest_tool" >&6; }
-if test "x$lt_cv_path_mainfest_tool" != xyes; then
-  MANIFEST_TOOL=:
-fi
-
-
-
-
-
-
-  case $host_os in
-    rhapsody* | darwin*)
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args.
-set dummy ${ac_tool_prefix}dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_DSYMUTIL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$DSYMUTIL"; then
-  ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-DSYMUTIL=$ac_cv_prog_DSYMUTIL
-if test -n "$DSYMUTIL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5
-$as_echo "$DSYMUTIL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_DSYMUTIL"; then
-  ac_ct_DSYMUTIL=$DSYMUTIL
-  # Extract the first word of "dsymutil", so it can be a program name with args.
-set dummy dsymutil; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_DSYMUTIL"; then
-  ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_DSYMUTIL="dsymutil"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL
-if test -n "$ac_ct_DSYMUTIL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5
-$as_echo "$ac_ct_DSYMUTIL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_DSYMUTIL" = x; then
-    DSYMUTIL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    DSYMUTIL=$ac_ct_DSYMUTIL
-  fi
-else
-  DSYMUTIL="$ac_cv_prog_DSYMUTIL"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args.
-set dummy ${ac_tool_prefix}nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_NMEDIT+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$NMEDIT"; then
-  ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-NMEDIT=$ac_cv_prog_NMEDIT
-if test -n "$NMEDIT"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5
-$as_echo "$NMEDIT" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_NMEDIT"; then
-  ac_ct_NMEDIT=$NMEDIT
-  # Extract the first word of "nmedit", so it can be a program name with args.
-set dummy nmedit; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_NMEDIT"; then
-  ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_NMEDIT="nmedit"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT
-if test -n "$ac_ct_NMEDIT"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5
-$as_echo "$ac_ct_NMEDIT" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_NMEDIT" = x; then
-    NMEDIT=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    NMEDIT=$ac_ct_NMEDIT
-  fi
-else
-  NMEDIT="$ac_cv_prog_NMEDIT"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args.
-set dummy ${ac_tool_prefix}lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_LIPO+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$LIPO"; then
-  ac_cv_prog_LIPO="$LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_LIPO="${ac_tool_prefix}lipo"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-LIPO=$ac_cv_prog_LIPO
-if test -n "$LIPO"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5
-$as_echo "$LIPO" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_LIPO"; then
-  ac_ct_LIPO=$LIPO
-  # Extract the first word of "lipo", so it can be a program name with args.
-set dummy lipo; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_LIPO+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_LIPO"; then
-  ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_LIPO="lipo"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO
-if test -n "$ac_ct_LIPO"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5
-$as_echo "$ac_ct_LIPO" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_LIPO" = x; then
-    LIPO=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    LIPO=$ac_ct_LIPO
-  fi
-else
-  LIPO="$ac_cv_prog_LIPO"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OTOOL"; then
-  ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OTOOL="${ac_tool_prefix}otool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL=$ac_cv_prog_OTOOL
-if test -n "$OTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5
-$as_echo "$OTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL"; then
-  ac_ct_OTOOL=$OTOOL
-  # Extract the first word of "otool", so it can be a program name with args.
-set dummy otool; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OTOOL"; then
-  ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OTOOL="otool"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL
-if test -n "$ac_ct_OTOOL"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5
-$as_echo "$ac_ct_OTOOL" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OTOOL" = x; then
-    OTOOL=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OTOOL=$ac_ct_OTOOL
-  fi
-else
-  OTOOL="$ac_cv_prog_OTOOL"
-fi
-
-    if test -n "$ac_tool_prefix"; then
-  # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args.
-set dummy ${ac_tool_prefix}otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_OTOOL64+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$OTOOL64"; then
-  ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-OTOOL64=$ac_cv_prog_OTOOL64
-if test -n "$OTOOL64"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5
-$as_echo "$OTOOL64" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-fi
-if test -z "$ac_cv_prog_OTOOL64"; then
-  ac_ct_OTOOL64=$OTOOL64
-  # Extract the first word of "otool64", so it can be a program name with args.
-set dummy otool64; ac_word=$2
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5
-$as_echo_n "checking for $ac_word... " >&6; }
-if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  if test -n "$ac_ct_OTOOL64"; then
-  ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    for ac_exec_ext in '' $ac_executable_extensions; do
-  if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then
-    ac_cv_prog_ac_ct_OTOOL64="otool64"
-    $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-  done
-IFS=$as_save_IFS
-
-fi
-fi
-ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64
-if test -n "$ac_ct_OTOOL64"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5
-$as_echo "$ac_ct_OTOOL64" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-  if test "x$ac_ct_OTOOL64" = x; then
-    OTOOL64=":"
-  else
-    case $cross_compiling:$ac_tool_warned in
-yes:)
-{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5
-$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;}
-ac_tool_warned=yes ;;
-esac
-    OTOOL64=$ac_ct_OTOOL64
-  fi
-else
-  OTOOL64="$ac_cv_prog_OTOOL64"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5
-$as_echo_n "checking for -single_module linker flag... " >&6; }
-if ${lt_cv_apple_cc_single_mod+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_apple_cc_single_mod=no
-      if test -z "${LT_MULTI_MODULE}"; then
-	# By default we will add the -single_module flag. You can override
-	# by either setting the environment variable LT_MULTI_MODULE
-	# non-empty at configure time, or by adding -multi_module to the
-	# link flags.
-	rm -rf libconftest.dylib*
-	echo "int foo(void){return 1;}" > conftest.c
-	echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
--dynamiclib -Wl,-single_module conftest.c" >&5
-	$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \
-	  -dynamiclib -Wl,-single_module conftest.c 2>conftest.err
-        _lt_result=$?
-	# If there is a non-empty error log, and "single_module"
-	# appears in it, assume the flag caused a linker warning
-        if test -s conftest.err && $GREP single_module conftest.err; then
-	  cat conftest.err >&5
-	# Otherwise, if the output was created with a 0 exit code from
-	# the compiler, it worked.
-	elif test -f libconftest.dylib && test $_lt_result -eq 0; then
-	  lt_cv_apple_cc_single_mod=yes
-	else
-	  cat conftest.err >&5
-	fi
-	rm -rf libconftest.dylib*
-	rm -f conftest.*
-      fi
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5
-$as_echo "$lt_cv_apple_cc_single_mod" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5
-$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; }
-if ${lt_cv_ld_exported_symbols_list+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_exported_symbols_list=no
-      save_LDFLAGS=$LDFLAGS
-      echo "_main" > conftest.sym
-      LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym"
-      cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_ld_exported_symbols_list=yes
-else
-  lt_cv_ld_exported_symbols_list=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-	LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5
-$as_echo "$lt_cv_ld_exported_symbols_list" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5
-$as_echo_n "checking for -force_load linker flag... " >&6; }
-if ${lt_cv_ld_force_load+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_ld_force_load=no
-      cat > conftest.c << _LT_EOF
-int forced_loaded() { return 2;}
-_LT_EOF
-      echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5
-      $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5
-      echo "$AR cru libconftest.a conftest.o" >&5
-      $AR cru libconftest.a conftest.o 2>&5
-      echo "$RANLIB libconftest.a" >&5
-      $RANLIB libconftest.a 2>&5
-      cat > conftest.c << _LT_EOF
-int main() { return 0;}
-_LT_EOF
-      echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5
-      $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err
-      _lt_result=$?
-      if test -s conftest.err && $GREP force_load conftest.err; then
-	cat conftest.err >&5
-      elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then
-	lt_cv_ld_force_load=yes
-      else
-	cat conftest.err >&5
-      fi
-        rm -f conftest.err libconftest.a conftest conftest.c
-        rm -rf conftest.dSYM
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5
-$as_echo "$lt_cv_ld_force_load" >&6; }
-    case $host_os in
-    rhapsody* | darwin1.[012])
-      _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;;
-    darwin1.*)
-      _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-    darwin*) # darwin 5.x on
-      # if running on 10.5 or later, the deployment target defaults
-      # to the OS version, if on x86, and 10.4, the deployment
-      # target defaults to 10.4. Don't you love it?
-      case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in
-	10.0,*86*-darwin8*|10.0,*-darwin[91]*)
-	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
-	10.[012]*)
-	  _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;;
-	10.*)
-	  _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;;
-      esac
-    ;;
-  esac
-    if test "$lt_cv_apple_cc_single_mod" = "yes"; then
-      _lt_dar_single_mod='$single_module'
-    fi
-    if test "$lt_cv_ld_exported_symbols_list" = "yes"; then
-      _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym'
-    else
-      _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}'
-    fi
-    if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then
-      _lt_dsymutil='~$DSYMUTIL $lib || :'
-    else
-      _lt_dsymutil=
-    fi
-    ;;
-  esac
-
-for ac_header in dlfcn.h
-do :
-  ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default
-"
-if test "x$ac_cv_header_dlfcn_h" = xyes; then :
-  cat >>confdefs.h <<_ACEOF
-#define HAVE_DLFCN_H 1
-_ACEOF
-
-fi
-
-done
-
-
-
-func_stripname_cnf ()
-{
-  case ${2} in
-  .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;;
-  *)  func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;;
-  esac
-} # func_stripname_cnf
-
-
-
-
-
-
-# Set options
-
-
-
-        enable_dlopen=no
-
-
-  enable_win32_dll=no
-
-
-            # Check whether --enable-shared was given.
-if test "${enable_shared+set}" = set; then :
-  enableval=$enable_shared; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_shared=yes ;;
-    no) enable_shared=no ;;
-    *)
-      enable_shared=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_shared=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_shared=yes
-fi
-
-
-
-
-
-
-
-
-
-  # Check whether --enable-static was given.
-if test "${enable_static+set}" = set; then :
-  enableval=$enable_static; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_static=yes ;;
-    no) enable_static=no ;;
-    *)
-     enable_static=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_static=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_static=yes
-fi
-
-
-
-
-
-
-
-
-
-
-# Check whether --with-pic was given.
-if test "${with_pic+set}" = set; then :
-  withval=$with_pic; lt_p=${PACKAGE-default}
-    case $withval in
-    yes|no) pic_mode=$withval ;;
-    *)
-      pic_mode=default
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for lt_pkg in $withval; do
-	IFS="$lt_save_ifs"
-	if test "X$lt_pkg" = "X$lt_p"; then
-	  pic_mode=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  pic_mode=default
-fi
-
-
-test -z "$pic_mode" && pic_mode=default
-
-
-
-
-
-
-
-  # Check whether --enable-fast-install was given.
-if test "${enable_fast_install+set}" = set; then :
-  enableval=$enable_fast_install; p=${PACKAGE-default}
-    case $enableval in
-    yes) enable_fast_install=yes ;;
-    no) enable_fast_install=no ;;
-    *)
-      enable_fast_install=no
-      # Look at the argument we got.  We use all the common list separators.
-      lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR,"
-      for pkg in $enableval; do
-	IFS="$lt_save_ifs"
-	if test "X$pkg" = "X$p"; then
-	  enable_fast_install=yes
-	fi
-      done
-      IFS="$lt_save_ifs"
-      ;;
-    esac
-else
-  enable_fast_install=yes
-fi
-
-
-
-
-
-
-
-
-
-
-
-# This can be used to rebuild libtool when needed
-LIBTOOL_DEPS="$ltmain"
-
-# Always use our own libtool.
-LIBTOOL='$(SHELL) $(top_builddir)/libtool'
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-test -z "$LN_S" && LN_S="ln -s"
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-if test -n "${ZSH_VERSION+set}" ; then
-   setopt NO_GLOB_SUBST
-fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5
-$as_echo_n "checking for objdir... " >&6; }
-if ${lt_cv_objdir+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  rm -f .libs 2>/dev/null
-mkdir .libs 2>/dev/null
-if test -d .libs; then
-  lt_cv_objdir=.libs
-else
-  # MS-DOS does not allow filenames that begin with a dot.
-  lt_cv_objdir=_libs
-fi
-rmdir .libs 2>/dev/null
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5
-$as_echo "$lt_cv_objdir" >&6; }
-objdir=$lt_cv_objdir
-
-
-
-
-
-cat >>confdefs.h <<_ACEOF
-#define LT_OBJDIR "$lt_cv_objdir/"
-_ACEOF
-
-
-
-
-case $host_os in
-aix3*)
-  # AIX sometimes has problems with the GCC collect2 program.  For some
-  # reason, if we set the COLLECT_NAMES environment variable, the problems
-  # vanish in a puff of smoke.
-  if test "X${COLLECT_NAMES+set}" != Xset; then
-    COLLECT_NAMES=
-    export COLLECT_NAMES
-  fi
-  ;;
-esac
-
-# Global variables:
-ofile=libtool
-can_build_shared=yes
-
-# All known linkers require a `.a' archive for static linking (except MSVC,
-# which needs '.lib').
-libext=a
-
-with_gnu_ld="$lt_cv_prog_gnu_ld"
-
-old_CC="$CC"
-old_CFLAGS="$CFLAGS"
-
-# Set sane defaults for various variables
-test -z "$CC" && CC=cc
-test -z "$LTCC" && LTCC=$CC
-test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS
-test -z "$LD" && LD=ld
-test -z "$ac_objext" && ac_objext=o
-
-for cc_temp in $compiler""; do
-  case $cc_temp in
-    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
-    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
-    \-*) ;;
-    *) break;;
-  esac
-done
-cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
-
-
-# Only perform the check for file, if the check method requires it
-test -z "$MAGIC_CMD" && MAGIC_CMD=file
-case $deplibs_check_method in
-file_magic*)
-  if test "$file_magic_cmd" = '$MAGIC_CMD'; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5
-$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $MAGIC_CMD in
-[\\/*] |  ?:[\\/]*)
-  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
-  ;;
-*)
-  lt_save_MAGIC_CMD="$MAGIC_CMD"
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
-  for ac_dir in $ac_dummy; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f $ac_dir/${ac_tool_prefix}file; then
-      lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file"
-      if test -n "$file_magic_test_file"; then
-	case $deplibs_check_method in
-	"file_magic "*)
-	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
-	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
-	    $EGREP "$file_magic_regex" > /dev/null; then
-	    :
-	  else
-	    cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such.  This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem.  Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
-	  fi ;;
-	esac
-      fi
-      break
-    fi
-  done
-  IFS="$lt_save_ifs"
-  MAGIC_CMD="$lt_save_MAGIC_CMD"
-  ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-
-
-
-if test -z "$lt_cv_path_MAGIC_CMD"; then
-  if test -n "$ac_tool_prefix"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5
-$as_echo_n "checking for file... " >&6; }
-if ${lt_cv_path_MAGIC_CMD+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  case $MAGIC_CMD in
-[\\/*] |  ?:[\\/]*)
-  lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path.
-  ;;
-*)
-  lt_save_MAGIC_CMD="$MAGIC_CMD"
-  lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR
-  ac_dummy="/usr/bin$PATH_SEPARATOR$PATH"
-  for ac_dir in $ac_dummy; do
-    IFS="$lt_save_ifs"
-    test -z "$ac_dir" && ac_dir=.
-    if test -f $ac_dir/file; then
-      lt_cv_path_MAGIC_CMD="$ac_dir/file"
-      if test -n "$file_magic_test_file"; then
-	case $deplibs_check_method in
-	"file_magic "*)
-	  file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"`
-	  MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-	  if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null |
-	    $EGREP "$file_magic_regex" > /dev/null; then
-	    :
-	  else
-	    cat <<_LT_EOF 1>&2
-
-*** Warning: the command libtool uses to detect shared libraries,
-*** $file_magic_cmd, produces output that libtool cannot recognize.
-*** The result is that libtool may fail to recognize shared libraries
-*** as such.  This will affect the creation of libtool libraries that
-*** depend on shared libraries, but programs linked with such libtool
-*** libraries will work regardless of this problem.  Nevertheless, you
-*** may want to report the problem to your system manager and/or to
-*** bug-libtool@gnu.org
-
-_LT_EOF
-	  fi ;;
-	esac
-      fi
-      break
-    fi
-  done
-  IFS="$lt_save_ifs"
-  MAGIC_CMD="$lt_save_MAGIC_CMD"
-  ;;
-esac
-fi
-
-MAGIC_CMD="$lt_cv_path_MAGIC_CMD"
-if test -n "$MAGIC_CMD"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5
-$as_echo "$MAGIC_CMD" >&6; }
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-fi
-
-
-  else
-    MAGIC_CMD=:
-  fi
-fi
-
-  fi
-  ;;
-esac
-
-# Use C for the default configuration in the libtool script
-
-lt_save_CC="$CC"
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-# Source file extension for C test sources.
-ac_ext=c
-
-# Object file extension for compiled C test sources.
-objext=o
-objext=$objext
-
-# Code to be used in simple compile tests
-lt_simple_compile_test_code="int some_variable = 0;"
-
-# Code to be used in simple link tests
-lt_simple_link_test_code='int main(){return(0);}'
-
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-# Save the default compiler, since it gets overwritten when the other
-# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP.
-compiler_DEFAULT=$CC
-
-# save warnings/boilerplate of simple test code
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
-ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
-## CAVEAT EMPTOR:
-## There is no encapsulation within the following macros, do not change
-## the running order or otherwise move them around unless you know exactly
-## what you are doing...
-if test -n "$compiler"; then
-
-lt_prog_compiler_no_builtin_flag=
-
-if test "$GCC" = yes; then
-  case $cc_basename in
-  nvcc*)
-    lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;;
-  *)
-    lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;;
-  esac
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5
-$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; }
-if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_rtti_exceptions=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="-fno-rtti -fno-exceptions"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_rtti_exceptions=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5
-$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; }
-
-if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then
-    lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions"
-else
-    :
-fi
-
-fi
-
-
-
-
-
-
-  lt_prog_compiler_wl=
-lt_prog_compiler_pic=
-lt_prog_compiler_static=
-
-
-  if test "$GCC" = yes; then
-    lt_prog_compiler_wl='-Wl,'
-    lt_prog_compiler_static='-static'
-
-    case $host_os in
-      aix*)
-      # All AIX code is PIC.
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static='-Bstatic'
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            lt_prog_compiler_pic='-fPIC'
-        ;;
-      m68k)
-            # FIXME: we need at least 68020 code to build shared libraries, but
-            # adding the `-m68020' flag to GCC prevents building anything better,
-            # like `-m68040'.
-            lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4'
-        ;;
-      esac
-      ;;
-
-    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
-      # PIC is the default for these OSes.
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      # Although the cygwin gcc ignores -fPIC, still need this for old-style
-      # (--disable-auto-import) libraries
-      lt_prog_compiler_pic='-DDLL_EXPORT'
-      ;;
-
-    darwin* | rhapsody*)
-      # PIC is the default on this platform
-      # Common symbols not allowed in MH_DYLIB files
-      lt_prog_compiler_pic='-fno-common'
-      ;;
-
-    haiku*)
-      # PIC is the default for Haiku.
-      # The "-static" flag exists, but is broken.
-      lt_prog_compiler_static=
-      ;;
-
-    hpux*)
-      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
-      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
-      # sets the default TLS model and affects inlining.
-      case $host_cpu in
-      hppa*64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic='-fPIC'
-	;;
-      esac
-      ;;
-
-    interix[3-9]*)
-      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
-      # Instead, we relocate shared libraries at runtime.
-      ;;
-
-    msdosdjgpp*)
-      # Just because we use GCC doesn't mean we suddenly get shared libraries
-      # on systems that don't support them.
-      lt_prog_compiler_can_build_shared=no
-      enable_shared=no
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic='-fPIC -shared'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	lt_prog_compiler_pic=-Kconform_pic
-      fi
-      ;;
-
-    *)
-      lt_prog_compiler_pic='-fPIC'
-      ;;
-    esac
-
-    case $cc_basename in
-    nvcc*) # Cuda Compiler Driver 2.2
-      lt_prog_compiler_wl='-Xlinker '
-      if test -n "$lt_prog_compiler_pic"; then
-        lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic"
-      fi
-      ;;
-    esac
-  else
-    # PORTME Check for flag to pass linker flags through the system compiler.
-    case $host_os in
-    aix*)
-      lt_prog_compiler_wl='-Wl,'
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static='-Bstatic'
-      else
-	lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp'
-      fi
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      lt_prog_compiler_pic='-DDLL_EXPORT'
-      ;;
-
-    hpux9* | hpux10* | hpux11*)
-      lt_prog_compiler_wl='-Wl,'
-      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
-      # not for PA HP-UX.
-      case $host_cpu in
-      hppa*64*|ia64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic='+Z'
-	;;
-      esac
-      # Is there a better lt_prog_compiler_static that works with the bundled CC?
-      lt_prog_compiler_static='${wl}-a ${wl}archive'
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      lt_prog_compiler_wl='-Wl,'
-      # PIC (with -KPIC) is the default.
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    linux* | k*bsd*-gnu | kopensolaris*-gnu)
-      case $cc_basename in
-      # old Intel for x86_64 which still supported -KPIC.
-      ecc*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-KPIC'
-	lt_prog_compiler_static='-static'
-        ;;
-      # icc used to be incompatible with GCC.
-      # ICC 10 doesn't accept -KPIC any more.
-      icc* | ifort*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-fPIC'
-	lt_prog_compiler_static='-static'
-        ;;
-      # Lahey Fortran 8.1.
-      lf95*)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='--shared'
-	lt_prog_compiler_static='--static'
-	;;
-      nagfor*)
-	# NAG Fortran compiler
-	lt_prog_compiler_wl='-Wl,-Wl,,'
-	lt_prog_compiler_pic='-PIC'
-	lt_prog_compiler_static='-Bstatic'
-	;;
-      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
-        # Portland Group compilers (*not* the Pentium gcc compiler,
-	# which looks to be a dead project)
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-fpic'
-	lt_prog_compiler_static='-Bstatic'
-        ;;
-      ccc*)
-        lt_prog_compiler_wl='-Wl,'
-        # All Alpha code is PIC.
-        lt_prog_compiler_static='-non_shared'
-        ;;
-      xl* | bgxl* | bgf* | mpixl*)
-	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
-	lt_prog_compiler_wl='-Wl,'
-	lt_prog_compiler_pic='-qpic'
-	lt_prog_compiler_static='-qstaticlink'
-	;;
-      *)
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
-	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl=''
-	  ;;
-	*Sun\ F* | *Sun*Fortran*)
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl='-Qoption ld '
-	  ;;
-	*Sun\ C*)
-	  # Sun C 5.9
-	  lt_prog_compiler_pic='-KPIC'
-	  lt_prog_compiler_static='-Bstatic'
-	  lt_prog_compiler_wl='-Wl,'
-	  ;;
-        *Intel*\ [CF]*Compiler*)
-	  lt_prog_compiler_wl='-Wl,'
-	  lt_prog_compiler_pic='-fPIC'
-	  lt_prog_compiler_static='-static'
-	  ;;
-	*Portland\ Group*)
-	  lt_prog_compiler_wl='-Wl,'
-	  lt_prog_compiler_pic='-fpic'
-	  lt_prog_compiler_static='-Bstatic'
-	  ;;
-	esac
-	;;
-      esac
-      ;;
-
-    newsos6)
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic='-fPIC -shared'
-      ;;
-
-    osf3* | osf4* | osf5*)
-      lt_prog_compiler_wl='-Wl,'
-      # All OSF/1 code is PIC.
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    rdos*)
-      lt_prog_compiler_static='-non_shared'
-      ;;
-
-    solaris*)
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      case $cc_basename in
-      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
-	lt_prog_compiler_wl='-Qoption ld ';;
-      *)
-	lt_prog_compiler_wl='-Wl,';;
-      esac
-      ;;
-
-    sunos4*)
-      lt_prog_compiler_wl='-Qoption ld '
-      lt_prog_compiler_pic='-PIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    sysv4 | sysv4.2uw2* | sysv4.3*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec ;then
-	lt_prog_compiler_pic='-Kconform_pic'
-	lt_prog_compiler_static='-Bstatic'
-      fi
-      ;;
-
-    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_pic='-KPIC'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    unicos*)
-      lt_prog_compiler_wl='-Wl,'
-      lt_prog_compiler_can_build_shared=no
-      ;;
-
-    uts4*)
-      lt_prog_compiler_pic='-pic'
-      lt_prog_compiler_static='-Bstatic'
-      ;;
-
-    *)
-      lt_prog_compiler_can_build_shared=no
-      ;;
-    esac
-  fi
-
-case $host_os in
-  # For platforms which do not support PIC, -DPIC is meaningless:
-  *djgpp*)
-    lt_prog_compiler_pic=
-    ;;
-  *)
-    lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC"
-    ;;
-esac
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-if ${lt_cv_prog_compiler_pic+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic=$lt_prog_compiler_pic
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5
-$as_echo "$lt_cv_prog_compiler_pic" >&6; }
-lt_prog_compiler_pic=$lt_cv_prog_compiler_pic
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_works=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="$lt_prog_compiler_pic -DPIC"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_pic_works=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works" = xyes; then
-    case $lt_prog_compiler_pic in
-     "" | " "*) ;;
-     *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;;
-     esac
-else
-    lt_prog_compiler_pic=
-     lt_prog_compiler_can_build_shared=no
-fi
-
-fi
-
-
-
-
-
-
-
-
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_static_works=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler_static_works=yes
-       fi
-     else
-       lt_cv_prog_compiler_static_works=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5
-$as_echo "$lt_cv_prog_compiler_static_works" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works" = xyes; then
-    :
-else
-    lt_prog_compiler_static=
-fi
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5
-$as_echo "$lt_cv_prog_compiler_c_o" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then
-  # do not overwrite the value of need_locks provided by the user
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
-  hard_links=yes
-  $RM conftest*
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  touch conftest.a
-  ln conftest.a conftest.b 2>&5 || hard_links=no
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
-  if test "$hard_links" = no; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
-    need_locks=warn
-  fi
-else
-  need_locks=no
-fi
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
-  runpath_var=
-  allow_undefined_flag=
-  always_export_symbols=no
-  archive_cmds=
-  archive_expsym_cmds=
-  compiler_needs_object=no
-  enable_shared_with_static_runtimes=no
-  export_dynamic_flag_spec=
-  export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-  hardcode_automatic=no
-  hardcode_direct=no
-  hardcode_direct_absolute=no
-  hardcode_libdir_flag_spec=
-  hardcode_libdir_separator=
-  hardcode_minus_L=no
-  hardcode_shlibpath_var=unsupported
-  inherit_rpath=no
-  link_all_deplibs=unknown
-  module_cmds=
-  module_expsym_cmds=
-  old_archive_from_new_cmds=
-  old_archive_from_expsyms_cmds=
-  thread_safe_flag_spec=
-  whole_archive_flag_spec=
-  # include_expsyms should be a list of space-separated symbols to be *always*
-  # included in the symbol list
-  include_expsyms=
-  # exclude_expsyms can be an extended regexp of symbols to exclude
-  # it will be wrapped by ` (' and `)$', so one must not match beginning or
-  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
-  # as well as any symbol that contains `d'.
-  exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
-  # platforms (ab)use it in PIC code, but their linkers get confused if
-  # the symbol is explicitly referenced.  Since portable code cannot
-  # rely on this symbol name, it's probably fine to never include it in
-  # preloaded symbol tables.
-  # Exclude shared library initialization/finalization symbols.
-  extract_expsyms_cmds=
-
-  case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    # FIXME: the MSVC++ port hasn't been tested in a loooong time
-    # When not using gcc, we currently assume that we are using
-    # Microsoft Visual C++.
-    if test "$GCC" != yes; then
-      with_gnu_ld=no
-    fi
-    ;;
-  interix*)
-    # we just hope/assume this is gcc and not c89 (= MSVC++)
-    with_gnu_ld=yes
-    ;;
-  openbsd*)
-    with_gnu_ld=no
-    ;;
-  esac
-
-  ld_shlibs=yes
-
-  # On some targets, GNU ld is compatible enough with the native linker
-  # that we're better off using the native interface for both.
-  lt_use_gnu_ld_interface=no
-  if test "$with_gnu_ld" = yes; then
-    case $host_os in
-      aix*)
-	# The AIX port of GNU ld has always aspired to compatibility
-	# with the native linker.  However, as the warning in the GNU ld
-	# block says, versions before 2.19.5* couldn't really create working
-	# shared libraries, regardless of the interface used.
-	case `$LD -v 2>&1` in
-	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
-	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
-	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
-	  *)
-	    lt_use_gnu_ld_interface=yes
-	    ;;
-	esac
-	;;
-      *)
-	lt_use_gnu_ld_interface=yes
-	;;
-    esac
-  fi
-
-  if test "$lt_use_gnu_ld_interface" = yes; then
-    # If archive_cmds runs LD, not CC, wlarc should be empty
-    wlarc='${wl}'
-
-    # Set some defaults for GNU ld with shared library support. These
-    # are reset later if shared libraries are not supported. Putting them
-    # here allows them to be overridden if necessary.
-    runpath_var=LD_RUN_PATH
-    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-    export_dynamic_flag_spec='${wl}--export-dynamic'
-    # ancient GNU ld didn't support --whole-archive et. al.
-    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
-      whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
-    else
-      whole_archive_flag_spec=
-    fi
-    supports_anon_versioning=no
-    case `$LD -v 2>&1` in
-      *GNU\ gold*) supports_anon_versioning=yes ;;
-      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
-      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
-      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
-      *\ 2.11.*) ;; # other 2.11 versions
-      *) supports_anon_versioning=yes ;;
-    esac
-
-    # See if GNU ld supports shared libraries.
-    case $host_os in
-    aix[3-9]*)
-      # On AIX/PPC, the GNU linker is very broken
-      if test "$host_cpu" != ia64; then
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.19, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support.  If you
-*** really care for shared libraries, you may want to install binutils
-*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
-*** You will then need to restart the configuration process.
-
-_LT_EOF
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds=''
-        ;;
-      m68k)
-            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec='-L$libdir'
-            hardcode_minus_L=yes
-        ;;
-      esac
-      ;;
-
-    beos*)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	allow_undefined_flag=unsupported
-	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
-	# support --undefined.  This deserves some investigation.  FIXME
-	archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless,
-      # as there is no search path for DLLs.
-      hardcode_libdir_flag_spec='-L$libdir'
-      export_dynamic_flag_spec='${wl}--export-all-symbols'
-      allow_undefined_flag=unsupported
-      always_export_symbols=no
-      enable_shared_with_static_runtimes=yes
-      export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-      exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
-      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-        archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-	# If the export-symbols file already is a .def file (1st line
-	# is EXPORTS), use it as is; otherwise, prepend...
-	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	  cp $export_symbols $output_objdir/$soname.def;
-	else
-	  echo EXPORTS > $output_objdir/$soname.def;
-	  cat $export_symbols >> $output_objdir/$soname.def;
-	fi~
-	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    haiku*)
-      archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      link_all_deplibs=yes
-      ;;
-
-    interix[3-9]*)
-      hardcode_direct=no
-      hardcode_shlibpath_var=no
-      hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-      export_dynamic_flag_spec='${wl}-E'
-      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
-      # Instead, shared libraries are loaded at an image base (0x10000000 by
-      # default) and relocated if they conflict, which is a slow very memory
-      # consuming and fragmenting process.  To avoid this, we pick a random,
-      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
-      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
-      archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      ;;
-
-    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
-      tmp_diet=no
-      if test "$host_os" = linux-dietlibc; then
-	case $cc_basename in
-	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
-	esac
-      fi
-      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
-	 && test "$tmp_diet" = no
-      then
-	tmp_addflag=' $pic_flag'
-	tmp_sharedflag='-shared'
-	case $cc_basename,$host_cpu in
-        pgcc*)				# Portland Group C compiler
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag'
-	  ;;
-	pgf77* | pgf90* | pgf95* | pgfortran*)
-					# Portland Group f77 and f90 compilers
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag -Mnomain' ;;
-	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
-	  tmp_addflag=' -i_dynamic' ;;
-	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
-	  tmp_addflag=' -i_dynamic -nofor_main' ;;
-	ifc* | ifort*)			# Intel Fortran compiler
-	  tmp_addflag=' -nofor_main' ;;
-	lf95*)				# Lahey Fortran 8.1
-	  whole_archive_flag_spec=
-	  tmp_sharedflag='--shared' ;;
-	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
-	  tmp_sharedflag='-qmkshrobj'
-	  tmp_addflag= ;;
-	nvcc*)	# Cuda Compiler Driver 2.2
-	  whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object=yes
-	  ;;
-	esac
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ C*)			# Sun C 5.9
-	  whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object=yes
-	  tmp_sharedflag='-G' ;;
-	*Sun\ F*)			# Sun Fortran 8.3
-	  tmp_sharedflag='-G' ;;
-	esac
-	archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
-        if test "x$supports_anon_versioning" = xyes; then
-          archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
-	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	    echo "local: *; };" >> $output_objdir/$libname.ver~
-	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
-        fi
-
-	case $cc_basename in
-	xlf* | bgf* | bgxlf* | mpixlf*)
-	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
-	  whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive'
-	  hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-	  archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
-	  if test "x$supports_anon_versioning" = xyes; then
-	    archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~
-	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	      echo "local: *; };" >> $output_objdir/$libname.ver~
-	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-	  fi
-	  ;;
-	esac
-      else
-        ld_shlibs=no
-      fi
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
-	wlarc=
-      else
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      fi
-      ;;
-
-    solaris*)
-      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
-      case `$LD -v 2>&1` in
-        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
-	ld_shlibs=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-	;;
-	*)
-	  # For security reasons, it is highly recommended that you always
-	  # use absolute paths for naming shared libraries, and exclude the
-	  # DT_RUNPATH tag from executables and libraries.  But doing so
-	  # requires that you compile everything twice, which is a pain.
-	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	    hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-	    archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	    archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-	  else
-	    ld_shlibs=no
-	  fi
-	;;
-      esac
-      ;;
-
-    sunos4*)
-      archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      wlarc=
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    *)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs=no
-      fi
-      ;;
-    esac
-
-    if test "$ld_shlibs" = no; then
-      runpath_var=
-      hardcode_libdir_flag_spec=
-      export_dynamic_flag_spec=
-      whole_archive_flag_spec=
-    fi
-  else
-    # PORTME fill in a description of your system's linker (not GNU ld)
-    case $host_os in
-    aix3*)
-      allow_undefined_flag=unsupported
-      always_export_symbols=yes
-      archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
-      # Note: this linker hardcodes the directories in LIBPATH if there
-      # are no directories specified by -L.
-      hardcode_minus_L=yes
-      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
-	# Neither direct hardcoding nor static linking is supported with a
-	# broken collect2.
-	hardcode_direct=unsupported
-      fi
-      ;;
-
-    aix[4-9]*)
-      if test "$host_cpu" = ia64; then
-	# On IA64, the linker does run time linking by default, so we don't
-	# have to do anything special.
-	aix_use_runtimelinking=no
-	exp_sym_flag='-Bexport'
-	no_entry_flag=""
-      else
-	# If we're using GNU nm, then we don't want the "-C" option.
-	# -C means demangle to AIX nm, but means don't demangle with GNU nm
-	# Also, AIX nm treats weak defined symbols like other global
-	# defined symbols, whereas GNU nm marks them as "W".
-	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
-	  export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	else
-	  export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	fi
-	aix_use_runtimelinking=no
-
-	# Test if we are trying to use run time linking or normal
-	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
-	# need to do runtime linking.
-	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
-	  for ld_flag in $LDFLAGS; do
-	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
-	    aix_use_runtimelinking=yes
-	    break
-	  fi
-	  done
-	  ;;
-	esac
-
-	exp_sym_flag='-bexport'
-	no_entry_flag='-bnoentry'
-      fi
-
-      # When large executables or shared objects are built, AIX ld can
-      # have problems creating the table of contents.  If linking a library
-      # or program results in "error TOC overflow" add -mminimal-toc to
-      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
-      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
-      archive_cmds=''
-      hardcode_direct=yes
-      hardcode_direct_absolute=yes
-      hardcode_libdir_separator=':'
-      link_all_deplibs=yes
-      file_list_spec='${wl}-f,'
-
-      if test "$GCC" = yes; then
-	case $host_os in aix4.[012]|aix4.[012].*)
-	# We only want to do this on AIX 4.2 and lower, the check
-	# below for broken collect2 doesn't work under 4.3+
-	  collect2name=`${CC} -print-prog-name=collect2`
-	  if test -f "$collect2name" &&
-	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
-	  then
-	  # We have reworked collect2
-	  :
-	  else
-	  # We have old collect2
-	  hardcode_direct=unsupported
-	  # It fails to find uninstalled libraries when the uninstalled
-	  # path is not listed in the libpath.  Setting hardcode_minus_L
-	  # to unsupported forces relinking
-	  hardcode_minus_L=yes
-	  hardcode_libdir_flag_spec='-L$libdir'
-	  hardcode_libdir_separator=
-	  fi
-	  ;;
-	esac
-	shared_flag='-shared'
-	if test "$aix_use_runtimelinking" = yes; then
-	  shared_flag="$shared_flag "'${wl}-G'
-	fi
-      else
-	# not using gcc
-	if test "$host_cpu" = ia64; then
-	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
-	# chokes on -Wl,-G. The following line is correct:
-	  shared_flag='-G'
-	else
-	  if test "$aix_use_runtimelinking" = yes; then
-	    shared_flag='${wl}-G'
-	  else
-	    shared_flag='${wl}-bM:SRE'
-	  fi
-	fi
-      fi
-
-      export_dynamic_flag_spec='${wl}-bexpall'
-      # It seems that -bexpall does not export symbols beginning with
-      # underscore (_), so it is better to generate a list of symbols to export.
-      always_export_symbols=yes
-      if test "$aix_use_runtimelinking" = yes; then
-	# Warning - without using the other runtime loading flags (-brtl),
-	# -berok will link without error, but may produce a broken library.
-	allow_undefined_flag='-berok'
-        # Determine the default libpath from the value encoded in an
-        # empty executable.
-        if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath_+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath_
-fi
-
-        hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
-        archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-      else
-	if test "$host_cpu" = ia64; then
-	  hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib'
-	  allow_undefined_flag="-z nodefs"
-	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
-	else
-	 # Determine the default libpath from the value encoded in an
-	 # empty executable.
-	 if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath_+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath_"; then
-    lt_cv_aix_libpath_="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath_
-fi
-
-	 hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath"
-	  # Warning - without using the other run time loading flags,
-	  # -berok will link without error, but may produce a broken library.
-	  no_undefined_flag=' ${wl}-bernotok'
-	  allow_undefined_flag=' ${wl}-berok'
-	  if test "$with_gnu_ld" = yes; then
-	    # We only use this code for GNU lds that support --whole-archive.
-	    whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
-	  else
-	    # Exported symbols can be pulled into shared objects from archives
-	    whole_archive_flag_spec='$convenience'
-	  fi
-	  archive_cmds_need_lc=yes
-	  # This is similar to how AIX traditionally builds its shared libraries.
-	  archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
-	fi
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds=''
-        ;;
-      m68k)
-            archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec='-L$libdir'
-            hardcode_minus_L=yes
-        ;;
-      esac
-      ;;
-
-    bsdi[45]*)
-      export_dynamic_flag_spec=-rdynamic
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # When not using gcc, we currently assume that we are using
-      # Microsoft Visual C++.
-      # hardcode_libdir_flag_spec is actually meaningless, as there is
-      # no search path for DLLs.
-      case $cc_basename in
-      cl*)
-	# Native MSVC
-	hardcode_libdir_flag_spec=' '
-	allow_undefined_flag=unsupported
-	always_export_symbols=yes
-	file_list_spec='@'
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-	archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-	  else
-	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-	  fi~
-	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-	  linknames='
-	# The linker will not automatically build a static lib if we build a DLL.
-	# _LT_TAGVAR(old_archive_from_new_cmds, )='true'
-	enable_shared_with_static_runtimes=yes
-	exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
-	export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-	# Don't use ranlib
-	old_postinstall_cmds='chmod 644 $oldlib'
-	postlink_cmds='lt_outputfile="@OUTPUT@"~
-	  lt_tool_outputfile="@TOOL_OUTPUT@"~
-	  case $lt_outputfile in
-	    *.exe|*.EXE) ;;
-	    *)
-	      lt_outputfile="$lt_outputfile.exe"
-	      lt_tool_outputfile="$lt_tool_outputfile.exe"
-	      ;;
-	  esac~
-	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-	    $RM "$lt_outputfile.manifest";
-	  fi'
-	;;
-      *)
-	# Assume MSVC wrapper
-	hardcode_libdir_flag_spec=' '
-	allow_undefined_flag=unsupported
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-	# The linker will automatically build a .lib file if we build a DLL.
-	old_archive_from_new_cmds='true'
-	# FIXME: Should let the user specify the lib program.
-	old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs'
-	enable_shared_with_static_runtimes=yes
-	;;
-      esac
-      ;;
-
-    darwin* | rhapsody*)
-
-
-  archive_cmds_need_lc=no
-  hardcode_direct=no
-  hardcode_automatic=yes
-  hardcode_shlibpath_var=unsupported
-  if test "$lt_cv_ld_force_load" = "yes"; then
-    whole_archive_flag_spec='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
-
-  else
-    whole_archive_flag_spec=''
-  fi
-  link_all_deplibs=yes
-  allow_undefined_flag="$_lt_dar_allow_undefined"
-  case $cc_basename in
-     ifort*) _lt_dar_can_shared=yes ;;
-     *) _lt_dar_can_shared=$GCC ;;
-  esac
-  if test "$_lt_dar_can_shared" = "yes"; then
-    output_verbose_link_cmd=func_echo_all
-    archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
-    module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
-    archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
-    module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
-
-  else
-  ld_shlibs=no
-  fi
-
-      ;;
-
-    dgux*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_shlibpath_var=no
-      ;;
-
-    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
-    # support.  Future versions do this automatically, but an explicit c++rt0.o
-    # does not break anything, and helps significantly (at the cost of a little
-    # extra space).
-    freebsd2.2*)
-      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
-    freebsd2.*)
-      archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct=yes
-      hardcode_minus_L=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
-    freebsd* | dragonfly*)
-      archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    hpux9*)
-      if test "$GCC" = yes; then
-	archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      else
-	archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      fi
-      hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-      hardcode_libdir_separator=:
-      hardcode_direct=yes
-
-      # hardcode_minus_L: Not really in the search PATH,
-      # but as the default location of the library.
-      hardcode_minus_L=yes
-      export_dynamic_flag_spec='${wl}-E'
-      ;;
-
-    hpux10*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator=:
-	hardcode_direct=yes
-	hardcode_direct_absolute=yes
-	export_dynamic_flag_spec='${wl}-E'
-	# hardcode_minus_L: Not really in the search PATH,
-	# but as the default location of the library.
-	hardcode_minus_L=yes
-      fi
-      ;;
-
-    hpux11*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	  archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      else
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-
-	  # Older versions of the 11.00 compiler do not understand -b yet
-	  # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does)
-	  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5
-$as_echo_n "checking if $CC understands -b... " >&6; }
-if ${lt_cv_prog_compiler__b+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler__b=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS -b"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler__b=yes
-       fi
-     else
-       lt_cv_prog_compiler__b=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5
-$as_echo "$lt_cv_prog_compiler__b" >&6; }
-
-if test x"$lt_cv_prog_compiler__b" = xyes; then
-    archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-else
-    archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-fi
-
-	  ;;
-	esac
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator=:
-
-	case $host_cpu in
-	hppa*64*|ia64*)
-	  hardcode_direct=no
-	  hardcode_shlibpath_var=no
-	  ;;
-	*)
-	  hardcode_direct=yes
-	  hardcode_direct_absolute=yes
-	  export_dynamic_flag_spec='${wl}-E'
-
-	  # hardcode_minus_L: Not really in the search PATH,
-	  # but as the default location of the library.
-	  hardcode_minus_L=yes
-	  ;;
-	esac
-      fi
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	# Try to use the -exported_symbol ld option, if it does not
-	# work, assume that -exports_file does not work either and
-	# implicitly export all symbols.
-	# This should be the same for all languages, so no per-tag cache variable.
-	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-if ${lt_cv_irix_exported_symbol+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  save_LDFLAGS="$LDFLAGS"
-	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-	   cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-int foo (void) { return 0; }
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  lt_cv_irix_exported_symbol=yes
-else
-  lt_cv_irix_exported_symbol=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-           LDFLAGS="$save_LDFLAGS"
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-	if test "$lt_cv_irix_exported_symbol" = yes; then
-          archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-	fi
-      else
-	archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      inherit_rpath=yes
-      link_all_deplibs=yes
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
-      else
-	archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
-      fi
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_direct=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    newsos6)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct=yes
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      hardcode_shlibpath_var=no
-      ;;
-
-    *nto* | *qnx*)
-      ;;
-
-    openbsd*)
-      if test -f /usr/libexec/ld.so; then
-	hardcode_direct=yes
-	hardcode_shlibpath_var=no
-	hardcode_direct_absolute=yes
-	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-	  archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
-	  hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-	  export_dynamic_flag_spec='${wl}-E'
-	else
-	  case $host_os in
-	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
-	     archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-	     hardcode_libdir_flag_spec='-R$libdir'
-	     ;;
-	   *)
-	     archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	     hardcode_libdir_flag_spec='${wl}-rpath,$libdir'
-	     ;;
-	  esac
-	fi
-      else
-	ld_shlibs=no
-      fi
-      ;;
-
-    os2*)
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_minus_L=yes
-      allow_undefined_flag=unsupported
-      archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
-      old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
-      ;;
-
-    osf3*)
-      if test "$GCC" = yes; then
-	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-      else
-	allow_undefined_flag=' -expect_unresolved \*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator=:
-      ;;
-
-    osf4* | osf5*)	# as osf3* with the addition of -msym flag
-      if test "$GCC" = yes; then
-	allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir'
-      else
-	allow_undefined_flag=' -expect_unresolved \*'
-	archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
-	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
-	# Both c and cxx compiler support -rpath directly
-	hardcode_libdir_flag_spec='-rpath $libdir'
-      fi
-      archive_cmds_need_lc='no'
-      hardcode_libdir_separator=:
-      ;;
-
-    solaris*)
-      no_undefined_flag=' -z defs'
-      if test "$GCC" = yes; then
-	wlarc='${wl}'
-	archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-      else
-	case `$CC -V 2>&1` in
-	*"Compilers 5.0"*)
-	  wlarc=''
-	  archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
-	  ;;
-	*)
-	  wlarc='${wl}'
-	  archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-	  ;;
-	esac
-      fi
-      hardcode_libdir_flag_spec='-R$libdir'
-      hardcode_shlibpath_var=no
-      case $host_os in
-      solaris2.[0-5] | solaris2.[0-5].*) ;;
-      *)
-	# The compiler driver will combine and reorder linker options,
-	# but understands `-z linker_flag'.  GCC discards it without `$wl',
-	# but is careful enough not to reorder.
-	# Supported since Solaris 2.6 (maybe 2.5.1?)
-	if test "$GCC" = yes; then
-	  whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
-	else
-	  whole_archive_flag_spec='-z allextract$convenience -z defaultextract'
-	fi
-	;;
-      esac
-      link_all_deplibs=yes
-      ;;
-
-    sunos4*)
-      if test "x$host_vendor" = xsequent; then
-	# Use $CC to link under sequent, because it throws in some extra .o
-	# files that make .init and .fini sections work.
-	archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_direct=yes
-      hardcode_minus_L=yes
-      hardcode_shlibpath_var=no
-      ;;
-
-    sysv4)
-      case $host_vendor in
-	sni)
-	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct=yes # is this really true???
-	;;
-	siemens)
-	  ## LD is ld it makes a PLAMLIB
-	  ## CC just makes a GrossModule.
-	  archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags'
-	  reload_cmds='$CC -r -o $output$reload_objs'
-	  hardcode_direct=no
-        ;;
-	motorola)
-	  archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct=no #Motorola manual says yes, but my tests say they lie
-	;;
-      esac
-      runpath_var='LD_RUN_PATH'
-      hardcode_shlibpath_var=no
-      ;;
-
-    sysv4.3*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_shlibpath_var=no
-      export_dynamic_flag_spec='-Bexport'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	hardcode_shlibpath_var=no
-	runpath_var=LD_RUN_PATH
-	hardcode_runpath_var=yes
-	ld_shlibs=yes
-      fi
-      ;;
-
-    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
-      no_undefined_flag='${wl}-z,text'
-      archive_cmds_need_lc=no
-      hardcode_shlibpath_var=no
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6*)
-      # Note: We can NOT use -z defs as we might desire, because we do not
-      # link with -lc, and that would cause any symbols used from libc to
-      # always be unresolved, which means just about no library would
-      # ever link correctly.  If we're not using GNU ld we use -z text
-      # though, which does catch some bad symbols but isn't as heavy-handed
-      # as -z defs.
-      no_undefined_flag='${wl}-z,text'
-      allow_undefined_flag='${wl}-z,nodefs'
-      archive_cmds_need_lc=no
-      hardcode_shlibpath_var=no
-      hardcode_libdir_flag_spec='${wl}-R,$libdir'
-      hardcode_libdir_separator=':'
-      link_all_deplibs=yes
-      export_dynamic_flag_spec='${wl}-Bexport'
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    uts4*)
-      archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec='-L$libdir'
-      hardcode_shlibpath_var=no
-      ;;
-
-    *)
-      ld_shlibs=no
-      ;;
-    esac
-
-    if test x$host_vendor = xsni; then
-      case $host in
-      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
-	export_dynamic_flag_spec='${wl}-Blargedynsym'
-	;;
-      esac
-    fi
-  fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5
-$as_echo "$ld_shlibs" >&6; }
-test "$ld_shlibs" = no && can_build_shared=no
-
-with_gnu_ld=$with_gnu_ld
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc" in
-x|xyes)
-  # Assume -lc should be added
-  archive_cmds_need_lc=yes
-
-  if test "$enable_shared" = yes && test "$GCC" = yes; then
-    case $archive_cmds in
-    *'~'*)
-      # FIXME: we may have to deal with multi-command sequences.
-      ;;
-    '$CC '*)
-      # Test whether the compiler implicitly links with -lc since on some
-      # systems, -lgcc has to come before -lc. If gcc already passes -lc
-      # to ld, don't add -lc before -lgcc.
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
-if ${lt_cv_archive_cmds_need_lc+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  $RM conftest*
-	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } 2>conftest.err; then
-	  soname=conftest
-	  lib=conftest
-	  libobjs=conftest.$ac_objext
-	  deplibs=
-	  wl=$lt_prog_compiler_wl
-	  pic_flag=$lt_prog_compiler_pic
-	  compiler_flags=-v
-	  linker_flags=-v
-	  verstring=
-	  output_objdir=.
-	  libname=conftest
-	  lt_save_allow_undefined_flag=$allow_undefined_flag
-	  allow_undefined_flag=
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
-  (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	  then
-	    lt_cv_archive_cmds_need_lc=no
-	  else
-	    lt_cv_archive_cmds_need_lc=yes
-	  fi
-	  allow_undefined_flag=$lt_save_allow_undefined_flag
-	else
-	  cat conftest.err 1>&5
-	fi
-	$RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5
-$as_echo "$lt_cv_archive_cmds_need_lc" >&6; }
-      archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc
-      ;;
-    esac
-  fi
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-if test "$GCC" = yes; then
-  case $host_os in
-    darwin*) lt_awk_arg="/^libraries:/,/LR/" ;;
-    *) lt_awk_arg="/^libraries:/" ;;
-  esac
-  case $host_os in
-    mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;;
-    *) lt_sed_strip_eq="s,=/,/,g" ;;
-  esac
-  lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq`
-  case $lt_search_path_spec in
-  *\;*)
-    # if the path contains ";" then we assume it to be the separator
-    # otherwise default to the standard path separator (i.e. ":") - it is
-    # assumed that no part of a normal pathname contains ";" but that should
-    # okay in the real world where ";" in dirpaths is itself problematic.
-    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'`
-    ;;
-  *)
-    lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"`
-    ;;
-  esac
-  # Ok, now we have the path, separated by spaces, we can step through it
-  # and add multilib dir if necessary.
-  lt_tmp_lt_search_path_spec=
-  lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null`
-  for lt_sys_path in $lt_search_path_spec; do
-    if test -d "$lt_sys_path/$lt_multi_os_dir"; then
-      lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir"
-    else
-      test -d "$lt_sys_path" && \
-	lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path"
-    fi
-  done
-  lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk '
-BEGIN {RS=" "; FS="/|\n";} {
-  lt_foo="";
-  lt_count=0;
-  for (lt_i = NF; lt_i > 0; lt_i--) {
-    if ($lt_i != "" && $lt_i != ".") {
-      if ($lt_i == "..") {
-        lt_count++;
-      } else {
-        if (lt_count == 0) {
-          lt_foo="/" $lt_i lt_foo;
-        } else {
-          lt_count--;
-        }
-      }
-    }
-  }
-  if (lt_foo != "") { lt_freq[lt_foo]++; }
-  if (lt_freq[lt_foo] == 1) { print lt_foo; }
-}'`
-  # AWK program above erroneously prepends '/' to C:/dos/paths
-  # for these hosts.
-  case $host_os in
-    mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\
-      $SED 's,/\([A-Za-z]:\),\1,g'` ;;
-  esac
-  sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP`
-else
-  sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib"
-fi
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
-  shlibpath_var=LIBPATH
-
-  # AIX 3 has no versioning support, so we append a major version to the name.
-  soname_spec='${libname}${release}${shared_ext}$major'
-  ;;
-
-aix[4-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  hardcode_into_libs=yes
-  if test "$host_cpu" = ia64; then
-    # AIX 5 supports IA64
-    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
-    shlibpath_var=LD_LIBRARY_PATH
-  else
-    # With GCC up to 2.95.x, collect2 would create an import file
-    # for dependence libraries.  The import file would start with
-    # the line `#! .'.  This would cause the generated library to
-    # depend on `.', always an invalid library.  This was fixed in
-    # development snapshots of GCC prior to 3.0.
-    case $host_os in
-      aix4 | aix4.[01] | aix4.[01].*)
-      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
-	   echo ' yes '
-	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
-	:
-      else
-	can_build_shared=no
-      fi
-      ;;
-    esac
-    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
-    # soname into executable. Probably we can add versioning support to
-    # collect2, so additional links can be useful in future.
-    if test "$aix_use_runtimelinking" = yes; then
-      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
-      # instead of lib<name>.a to let people know that these are not
-      # typical AIX shared libraries.
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    else
-      # We preserve .a as extension for shared libraries through AIX4.2
-      # and later when we are not doing run time linking.
-      library_names_spec='${libname}${release}.a $libname.a'
-      soname_spec='${libname}${release}${shared_ext}$major'
-    fi
-    shlibpath_var=LIBPATH
-  fi
-  ;;
-
-amigaos*)
-  case $host_cpu in
-  powerpc)
-    # Since July 2007 AmigaOS4 officially supports .so libraries.
-    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    ;;
-  m68k)
-    library_names_spec='$libname.ixlibrary $libname.a'
-    # Create ${libname}_ixlibrary.a entries in /sys/libs.
-    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
-    ;;
-  esac
-  ;;
-
-beos*)
-  library_names_spec='${libname}${shared_ext}'
-  dynamic_linker="$host_os ld.so"
-  shlibpath_var=LIBRARY_PATH
-  ;;
-
-bsdi[45]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
-  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
-  # the default ld.so.conf also contains /usr/contrib/lib and
-  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
-  # libtool to hard-code these into programs
-  ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
-  version_type=windows
-  shrext_cmds=".dll"
-  need_version=no
-  need_lib_prefix=no
-
-  case $GCC,$cc_basename in
-  yes,*)
-    # gcc
-    library_names_spec='$libname.dll.a'
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname~
-      chmod a+x \$dldir/$dlname~
-      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
-        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
-      fi'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-
-    case $host_os in
-    cygwin*)
-      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
-      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-
-      sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"
-      ;;
-    mingw* | cegcc*)
-      # MinGW DLLs use traditional 'lib' prefix
-      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    pw32*)
-      # pw32 DLLs use 'pw' prefix rather than 'lib'
-      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    esac
-    dynamic_linker='Win32 ld.exe'
-    ;;
-
-  *,cl*)
-    # Native MSVC
-    libname_spec='$name'
-    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-    library_names_spec='${libname}.dll.lib'
-
-    case $build_os in
-    mingw*)
-      sys_lib_search_path_spec=
-      lt_save_ifs=$IFS
-      IFS=';'
-      for lt_path in $LIB
-      do
-        IFS=$lt_save_ifs
-        # Let DOS variable expansion print the short 8.3 style file name.
-        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-      done
-      IFS=$lt_save_ifs
-      # Convert to MSYS style.
-      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-      ;;
-    cygwin*)
-      # Convert to unix form, then to dos form, then back to unix form
-      # but this time dos style (no spaces!) so that the unix form looks
-      # like /cygdrive/c/PROGRA~1:/cygdr...
-      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      ;;
-    *)
-      sys_lib_search_path_spec="$LIB"
-      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-        # It is most probably a Windows format PATH.
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-      else
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      fi
-      # FIXME: find the short name or the path components, as spaces are
-      # common. (e.g. "Program Files" -> "PROGRA~1")
-      ;;
-    esac
-
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-    dynamic_linker='Win32 link.exe'
-    ;;
-
-  *)
-    # Assume MSVC wrapper
-    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-    dynamic_linker='Win32 ld.exe'
-    ;;
-  esac
-  # FIXME: first we should search . and the directory the executable is in
-  shlibpath_var=PATH
-  ;;
-
-darwin* | rhapsody*)
-  dynamic_linker="$host_os dyld"
-  version_type=darwin
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
-  soname_spec='${libname}${release}${major}$shared_ext'
-  shlibpath_overrides_runpath=yes
-  shlibpath_var=DYLD_LIBRARY_PATH
-  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
-  sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"
-  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
-  ;;
-
-dgux*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-freebsd* | dragonfly*)
-  # DragonFly does not have aout.  When/if they implement a new
-  # versioning mechanism, adjust this.
-  if test -x /usr/bin/objformat; then
-    objformat=`/usr/bin/objformat`
-  else
-    case $host_os in
-    freebsd[23].*) objformat=aout ;;
-    *) objformat=elf ;;
-    esac
-  fi
-  version_type=freebsd-$objformat
-  case $version_type in
-    freebsd-elf*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-      need_version=no
-      need_lib_prefix=no
-      ;;
-    freebsd-*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
-      need_version=yes
-      ;;
-  esac
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_os in
-  freebsd2.*)
-    shlibpath_overrides_runpath=yes
-    ;;
-  freebsd3.[01]* | freebsdelf3.[01]*)
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
-  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
-    shlibpath_overrides_runpath=no
-    hardcode_into_libs=yes
-    ;;
-  *) # from 4.6 on, and DragonFly
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  esac
-  ;;
-
-gnu*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-haiku*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  dynamic_linker="$host_os runtime_loader"
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
-  hardcode_into_libs=yes
-  ;;
-
-hpux9* | hpux10* | hpux11*)
-  # Give a soname corresponding to the major version so that dld.sl refuses to
-  # link against other versions.
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  case $host_cpu in
-  ia64*)
-    shrext_cmds='.so'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.so"
-    shlibpath_var=LD_LIBRARY_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    if test "X$HPUX_IA64_MODE" = X32; then
-      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
-    else
-      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
-    fi
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  hppa*64*)
-    shrext_cmds='.sl'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  *)
-    shrext_cmds='.sl'
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=SHLIB_PATH
-    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    ;;
-  esac
-  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
-  postinstall_cmds='chmod 555 $lib'
-  # or fails outright, so override atomically:
-  install_override_mode=555
-  ;;
-
-interix[3-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $host_os in
-    nonstopux*) version_type=nonstopux ;;
-    *)
-	if test "$lt_cv_prog_gnu_ld" = yes; then
-		version_type=linux # correct to gnu/linux during the next big refactor
-	else
-		version_type=irix
-	fi ;;
-  esac
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
-  case $host_os in
-  irix5* | nonstopux*)
-    libsuff= shlibsuff=
-    ;;
-  *)
-    case $LD in # libtool.m4 will add one of these switches to LD
-    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
-      libsuff= shlibsuff= libmagic=32-bit;;
-    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
-      libsuff=32 shlibsuff=N32 libmagic=N32;;
-    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
-      libsuff=64 shlibsuff=64 libmagic=64-bit;;
-    *) libsuff= shlibsuff= libmagic=never-match;;
-    esac
-    ;;
-  esac
-  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
-  shlibpath_overrides_runpath=no
-  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
-  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
-  hardcode_into_libs=yes
-  ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
-  dynamic_linker=no
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-
-  # Some binutils ld are patched to set DT_RUNPATH
-  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_shlibpath_overrides_runpath=no
-    save_LDFLAGS=$LDFLAGS
-    save_libdir=$libdir
-    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \
-	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\""
-    cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
-  lt_cv_shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-    LDFLAGS=$save_LDFLAGS
-    libdir=$save_libdir
-
-fi
-
-  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
-
-  # This implies no fast_install, which is unacceptable.
-  # Some rework will be needed to allow for fast_install
-  # before this can be enabled.
-  hardcode_into_libs=yes
-
-  # Append ld.so.conf contents to the search path
-  if test -f /etc/ld.so.conf; then
-    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
-    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
-  fi
-
-  # We used to test for /lib/ld.so.1 and disable shared libraries on
-  # powerpc, because MkLinux only supported shared libraries with the
-  # GNU dynamic linker.  Since this was broken with cross compilers,
-  # most powerpc-linux boxes support dynamic linking these days and
-  # people can always --disable-shared, the test was removed, and we
-  # assume the GNU/Linux dynamic linker is in use.
-  dynamic_linker='GNU/Linux ld.so'
-  ;;
-
-netbsd*)
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-    dynamic_linker='NetBSD (a.out) ld.so'
-  else
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    dynamic_linker='NetBSD ld.elf_so'
-  fi
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  ;;
-
-newsos6)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  ;;
-
-*nto* | *qnx*)
-  version_type=qnx
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  dynamic_linker='ldqnx.so'
-  ;;
-
-openbsd*)
-  version_type=sunos
-  sys_lib_dlsearch_path_spec="/usr/lib"
-  need_lib_prefix=no
-  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
-  case $host_os in
-    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
-    *)				need_version=no  ;;
-  esac
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    case $host_os in
-      openbsd2.[89] | openbsd2.[89].*)
-	shlibpath_overrides_runpath=no
-	;;
-      *)
-	shlibpath_overrides_runpath=yes
-	;;
-      esac
-  else
-    shlibpath_overrides_runpath=yes
-  fi
-  ;;
-
-os2*)
-  libname_spec='$name'
-  shrext_cmds=".dll"
-  need_lib_prefix=no
-  library_names_spec='$libname${shared_ext} $libname.a'
-  dynamic_linker='OS/2 ld.exe'
-  shlibpath_var=LIBPATH
-  ;;
-
-osf3* | osf4* | osf5*)
-  version_type=osf
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
-  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
-  ;;
-
-rdos*)
-  dynamic_linker=no
-  ;;
-
-solaris*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  # ldd complains unless libraries are executable
-  postinstall_cmds='chmod +x $lib'
-  ;;
-
-sunos4*)
-  version_type=sunos
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  if test "$with_gnu_ld" = yes; then
-    need_lib_prefix=no
-  fi
-  need_version=yes
-  ;;
-
-sysv4 | sysv4.3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_vendor in
-    sni)
-      shlibpath_overrides_runpath=no
-      need_lib_prefix=no
-      runpath_var=LD_RUN_PATH
-      ;;
-    siemens)
-      need_lib_prefix=no
-      ;;
-    motorola)
-      need_lib_prefix=no
-      need_version=no
-      shlibpath_overrides_runpath=no
-      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
-      ;;
-  esac
-  ;;
-
-sysv4*MP*)
-  if test -d /usr/nec ;then
-    version_type=linux # correct to gnu/linux during the next big refactor
-    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
-    soname_spec='$libname${shared_ext}.$major'
-    shlibpath_var=LD_LIBRARY_PATH
-  fi
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  version_type=freebsd-elf
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  if test "$with_gnu_ld" = yes; then
-    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
-  else
-    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
-    case $host_os in
-      sco3.2v5*)
-        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
-	;;
-    esac
-  fi
-  sys_lib_dlsearch_path_spec='/usr/lib'
-  ;;
-
-tpf*)
-  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-uts4*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-*)
-  dynamic_linker=no
-  ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
-  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
-  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
-  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action=
-if test -n "$hardcode_libdir_flag_spec" ||
-   test -n "$runpath_var" ||
-   test "X$hardcode_automatic" = "Xyes" ; then
-
-  # We can hardcode non-existent directories.
-  if test "$hardcode_direct" != no &&
-     # If the only mechanism to avoid hardcoding is shlibpath_var, we
-     # have to relink, otherwise we might link with an installed library
-     # when we should be linking with a yet-to-be-installed one
-     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no &&
-     test "$hardcode_minus_L" != no; then
-    # Linking always hardcodes the temporary library directory.
-    hardcode_action=relink
-  else
-    # We can link without hardcoding, and we can hardcode nonexisting dirs.
-    hardcode_action=immediate
-  fi
-else
-  # We cannot hardcode anything, or else we can only hardcode existing
-  # directories.
-  hardcode_action=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5
-$as_echo "$hardcode_action" >&6; }
-
-if test "$hardcode_action" = relink ||
-   test "$inherit_rpath" = yes; then
-  # Fast installation is not supported
-  enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
-     test "$enable_shared" = no; then
-  # Fast installation is not necessary
-  enable_fast_install=needless
-fi
-
-
-
-
-
-
-  if test "x$enable_dlopen" != xyes; then
-  enable_dlopen=unknown
-  enable_dlopen_self=unknown
-  enable_dlopen_self_static=unknown
-else
-  lt_cv_dlopen=no
-  lt_cv_dlopen_libs=
-
-  case $host_os in
-  beos*)
-    lt_cv_dlopen="load_add_on"
-    lt_cv_dlopen_libs=
-    lt_cv_dlopen_self=yes
-    ;;
-
-  mingw* | pw32* | cegcc*)
-    lt_cv_dlopen="LoadLibrary"
-    lt_cv_dlopen_libs=
-    ;;
-
-  cygwin*)
-    lt_cv_dlopen="dlopen"
-    lt_cv_dlopen_libs=
-    ;;
-
-  darwin*)
-  # if libdl is installed we need to link against it
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dl_dlopen=yes
-else
-  ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
-
-    lt_cv_dlopen="dyld"
-    lt_cv_dlopen_libs=
-    lt_cv_dlopen_self=yes
-
-fi
-
-    ;;
-
-  *)
-    ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load"
-if test "x$ac_cv_func_shl_load" = xyes; then :
-  lt_cv_dlopen="shl_load"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5
-$as_echo_n "checking for shl_load in -ldld... " >&6; }
-if ${ac_cv_lib_dld_shl_load+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char shl_load ();
-int
-main ()
-{
-return shl_load ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dld_shl_load=yes
-else
-  ac_cv_lib_dld_shl_load=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5
-$as_echo "$ac_cv_lib_dld_shl_load" >&6; }
-if test "x$ac_cv_lib_dld_shl_load" = xyes; then :
-  lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"
-else
-  ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen"
-if test "x$ac_cv_func_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5
-$as_echo_n "checking for dlopen in -ldl... " >&6; }
-if ${ac_cv_lib_dl_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldl  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dl_dlopen=yes
-else
-  ac_cv_lib_dl_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5
-$as_echo "$ac_cv_lib_dl_dlopen" >&6; }
-if test "x$ac_cv_lib_dl_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5
-$as_echo_n "checking for dlopen in -lsvld... " >&6; }
-if ${ac_cv_lib_svld_dlopen+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-lsvld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dlopen ();
-int
-main ()
-{
-return dlopen ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_svld_dlopen=yes
-else
-  ac_cv_lib_svld_dlopen=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5
-$as_echo "$ac_cv_lib_svld_dlopen" >&6; }
-if test "x$ac_cv_lib_svld_dlopen" = xyes; then :
-  lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"
-else
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5
-$as_echo_n "checking for dld_link in -ldld... " >&6; }
-if ${ac_cv_lib_dld_dld_link+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  ac_check_lib_save_LIBS=$LIBS
-LIBS="-ldld  $LIBS"
-cat confdefs.h - <<_ACEOF >conftest.$ac_ext
-/* end confdefs.h.  */
-
-/* Override any GCC internal prototype to avoid an error.
-   Use char because int might match the return type of a GCC
-   builtin and then its argument prototype would still apply.  */
-#ifdef __cplusplus
-extern "C"
-#endif
-char dld_link ();
-int
-main ()
-{
-return dld_link ();
-  ;
-  return 0;
-}
-_ACEOF
-if ac_fn_c_try_link "$LINENO"; then :
-  ac_cv_lib_dld_dld_link=yes
-else
-  ac_cv_lib_dld_dld_link=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-LIBS=$ac_check_lib_save_LIBS
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5
-$as_echo "$ac_cv_lib_dld_dld_link" >&6; }
-if test "x$ac_cv_lib_dld_dld_link" = xyes; then :
-  lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-
-fi
-
-    ;;
-  esac
-
-  if test "x$lt_cv_dlopen" != xno; then
-    enable_dlopen=yes
-  else
-    enable_dlopen=no
-  fi
-
-  case $lt_cv_dlopen in
-  dlopen)
-    save_CPPFLAGS="$CPPFLAGS"
-    test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H"
-
-    save_LDFLAGS="$LDFLAGS"
-    wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\"
-
-    save_LIBS="$LIBS"
-    LIBS="$lt_cv_dlopen_libs $LIBS"
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5
-$as_echo_n "checking whether a program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-     if test "$cross_compiling" = yes; then :
-  lt_cv_dlopen_self=cross
-else
-  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
-  lt_status=$lt_dlunknown
-  cat > conftest.$ac_ext <<_LT_EOF
-#line $LINENO "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-#  define LT_DLGLOBAL		RTLD_GLOBAL
-#else
-#  ifdef DL_GLOBAL
-#    define LT_DLGLOBAL		DL_GLOBAL
-#  else
-#    define LT_DLGLOBAL		0
-#  endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
-   find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-#  ifdef RTLD_LAZY
-#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
-#  else
-#    ifdef DL_LAZY
-#      define LT_DLLAZY_OR_NOW		DL_LAZY
-#    else
-#      ifdef RTLD_NOW
-#        define LT_DLLAZY_OR_NOW	RTLD_NOW
-#      else
-#        ifdef DL_NOW
-#          define LT_DLLAZY_OR_NOW	DL_NOW
-#        else
-#          define LT_DLLAZY_OR_NOW	0
-#        endif
-#      endif
-#    endif
-#  endif
-#endif
-
-/* When -fvisbility=hidden is used, assume the code has been annotated
-   correspondingly for the symbols needed.  */
-#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
-int fnord () __attribute__((visibility("default")));
-#endif
-
-int fnord () { return 42; }
-int main ()
-{
-  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-  int status = $lt_dlunknown;
-
-  if (self)
-    {
-      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
-      else
-        {
-	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
-          else puts (dlerror ());
-	}
-      /* dlclose (self); */
-    }
-  else
-    puts (dlerror ());
-
-  return status;
-}
-_LT_EOF
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
-    (./conftest; exit; ) >&5 2>/dev/null
-    lt_status=$?
-    case x$lt_status in
-      x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;;
-      x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;;
-      x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;;
-    esac
-  else :
-    # compilation failed
-    lt_cv_dlopen_self=no
-  fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5
-$as_echo "$lt_cv_dlopen_self" >&6; }
-
-    if test "x$lt_cv_dlopen_self" = xyes; then
-      wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5
-$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; }
-if ${lt_cv_dlopen_self_static+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-     if test "$cross_compiling" = yes; then :
-  lt_cv_dlopen_self_static=cross
-else
-  lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2
-  lt_status=$lt_dlunknown
-  cat > conftest.$ac_ext <<_LT_EOF
-#line $LINENO "configure"
-#include "confdefs.h"
-
-#if HAVE_DLFCN_H
-#include <dlfcn.h>
-#endif
-
-#include <stdio.h>
-
-#ifdef RTLD_GLOBAL
-#  define LT_DLGLOBAL		RTLD_GLOBAL
-#else
-#  ifdef DL_GLOBAL
-#    define LT_DLGLOBAL		DL_GLOBAL
-#  else
-#    define LT_DLGLOBAL		0
-#  endif
-#endif
-
-/* We may have to define LT_DLLAZY_OR_NOW in the command line if we
-   find out it does not work in some platform. */
-#ifndef LT_DLLAZY_OR_NOW
-#  ifdef RTLD_LAZY
-#    define LT_DLLAZY_OR_NOW		RTLD_LAZY
-#  else
-#    ifdef DL_LAZY
-#      define LT_DLLAZY_OR_NOW		DL_LAZY
-#    else
-#      ifdef RTLD_NOW
-#        define LT_DLLAZY_OR_NOW	RTLD_NOW
-#      else
-#        ifdef DL_NOW
-#          define LT_DLLAZY_OR_NOW	DL_NOW
-#        else
-#          define LT_DLLAZY_OR_NOW	0
-#        endif
-#      endif
-#    endif
-#  endif
-#endif
-
-/* When -fvisbility=hidden is used, assume the code has been annotated
-   correspondingly for the symbols needed.  */
-#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3))
-int fnord () __attribute__((visibility("default")));
-#endif
-
-int fnord () { return 42; }
-int main ()
-{
-  void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW);
-  int status = $lt_dlunknown;
-
-  if (self)
-    {
-      if (dlsym (self,"fnord"))       status = $lt_dlno_uscore;
-      else
-        {
-	  if (dlsym( self,"_fnord"))  status = $lt_dlneed_uscore;
-          else puts (dlerror ());
-	}
-      /* dlclose (self); */
-    }
-  else
-    puts (dlerror ());
-
-  return status;
-}
-_LT_EOF
-  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then
-    (./conftest; exit; ) >&5 2>/dev/null
-    lt_status=$?
-    case x$lt_status in
-      x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;;
-      x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;;
-      x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;;
-    esac
-  else :
-    # compilation failed
-    lt_cv_dlopen_self_static=no
-  fi
-fi
-rm -fr conftest*
-
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5
-$as_echo "$lt_cv_dlopen_self_static" >&6; }
-    fi
-
-    CPPFLAGS="$save_CPPFLAGS"
-    LDFLAGS="$save_LDFLAGS"
-    LIBS="$save_LIBS"
-    ;;
-  esac
-
-  case $lt_cv_dlopen_self in
-  yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;;
-  *) enable_dlopen_self=unknown ;;
-  esac
-
-  case $lt_cv_dlopen_self_static in
-  yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;;
-  *) enable_dlopen_self_static=unknown ;;
-  esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-striplib=
-old_striplib=
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5
-$as_echo_n "checking whether stripping libraries is possible... " >&6; }
-if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then
-  test -z "$old_striplib" && old_striplib="$STRIP --strip-debug"
-  test -z "$striplib" && striplib="$STRIP --strip-unneeded"
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-else
-# FIXME - insert some real tests, host_os isn't really good enough
-  case $host_os in
-  darwin*)
-    if test -n "$STRIP" ; then
-      striplib="$STRIP -x"
-      old_striplib="$STRIP -S"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5
-$as_echo "yes" >&6; }
-    else
-      { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-    fi
-    ;;
-  *)
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5
-$as_echo "no" >&6; }
-    ;;
-  esac
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-  # Report which library types will actually be built
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
-$as_echo_n "checking if libtool supports shared libraries... " >&6; }
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
-$as_echo "$can_build_shared" >&6; }
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
-$as_echo_n "checking whether to build shared libraries... " >&6; }
-  test "$can_build_shared" = "no" && enable_shared=no
-
-  # On AIX, shared libraries and static libraries use the same namespace, and
-  # are all built from PIC.
-  case $host_os in
-  aix3*)
-    test "$enable_shared" = yes && enable_static=no
-    if test -n "$RANLIB"; then
-      archive_cmds="$archive_cmds~\$RANLIB \$lib"
-      postinstall_cmds='$RANLIB $lib'
-    fi
-    ;;
-
-  aix[4-9]*)
-    if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
-      test "$enable_shared" = yes && enable_static=no
-    fi
-    ;;
-  esac
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
-$as_echo "$enable_shared" >&6; }
-
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
-$as_echo_n "checking whether to build static libraries... " >&6; }
-  # Make sure either enable_shared or enable_static is yes.
-  test "$enable_shared" = yes || enable_static=yes
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
-$as_echo "$enable_static" >&6; }
-
-
-
-
-fi
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-CC="$lt_save_CC"
-
-
-
-
-
-      ac_ext=${ac_fc_srcext-f}
-ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5'
-ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_fc_compiler_gnu
-
-
-if test -z "$FC" || test "X$FC" = "Xno"; then
-  _lt_disable_FC=yes
-fi
-
-archive_cmds_need_lc_FC=no
-allow_undefined_flag_FC=
-always_export_symbols_FC=no
-archive_expsym_cmds_FC=
-export_dynamic_flag_spec_FC=
-hardcode_direct_FC=no
-hardcode_direct_absolute_FC=no
-hardcode_libdir_flag_spec_FC=
-hardcode_libdir_separator_FC=
-hardcode_minus_L_FC=no
-hardcode_automatic_FC=no
-inherit_rpath_FC=no
-module_cmds_FC=
-module_expsym_cmds_FC=
-link_all_deplibs_FC=unknown
-old_archive_cmds_FC=$old_archive_cmds
-reload_flag_FC=$reload_flag
-reload_cmds_FC=$reload_cmds
-no_undefined_flag_FC=
-whole_archive_flag_spec_FC=
-enable_shared_with_static_runtimes_FC=no
-
-# Source file extension for fc test sources.
-ac_ext=${ac_fc_srcext-f}
-
-# Object file extension for compiled fc test sources.
-objext=o
-objext_FC=$objext
-
-# No sense in running all these tests if we already determined that
-# the FC compiler isn't working.  Some variables (like enable_shared)
-# are currently assumed to apply to all compilers on this platform,
-# and will be corrupted by setting them based on a non-working compiler.
-if test "$_lt_disable_FC" != yes; then
-  # Code to be used in simple compile tests
-  lt_simple_compile_test_code="\
-      subroutine t
-      return
-      end
-"
-
-  # Code to be used in simple link tests
-  lt_simple_link_test_code="\
-      program t
-      end
-"
-
-  # ltmain only uses $CC for tagged configurations so make sure $CC is set.
-
-
-
-
-
-
-# If no C compiler was specified, use CC.
-LTCC=${LTCC-"$CC"}
-
-# If no C compiler flags were specified, use CFLAGS.
-LTCFLAGS=${LTCFLAGS-"$CFLAGS"}
-
-# Allow CC to be a program name with arguments.
-compiler=$CC
-
-
-  # save warnings/boilerplate of simple test code
-  ac_outfile=conftest.$ac_objext
-echo "$lt_simple_compile_test_code" >conftest.$ac_ext
-eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_compiler_boilerplate=`cat conftest.err`
-$RM conftest*
-
-  ac_outfile=conftest.$ac_objext
-echo "$lt_simple_link_test_code" >conftest.$ac_ext
-eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err
-_lt_linker_boilerplate=`cat conftest.err`
-$RM -r conftest*
-
-
-  # Allow CC to be a program name with arguments.
-  lt_save_CC="$CC"
-  lt_save_GCC=$GCC
-  lt_save_CFLAGS=$CFLAGS
-  CC=${FC-"f95"}
-  CFLAGS=$FCFLAGS
-  compiler=$CC
-  GCC=$ac_cv_fc_compiler_gnu
-
-  compiler_FC=$CC
-  for cc_temp in $compiler""; do
-  case $cc_temp in
-    compile | *[\\/]compile | ccache | *[\\/]ccache ) ;;
-    distcc | *[\\/]distcc | purify | *[\\/]purify ) ;;
-    \-*) ;;
-    *) break;;
-  esac
-done
-cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"`
-
-
-  if test -n "$compiler"; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5
-$as_echo_n "checking if libtool supports shared libraries... " >&6; }
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5
-$as_echo "$can_build_shared" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5
-$as_echo_n "checking whether to build shared libraries... " >&6; }
-    test "$can_build_shared" = "no" && enable_shared=no
-
-    # On AIX, shared libraries and static libraries use the same namespace, and
-    # are all built from PIC.
-    case $host_os in
-      aix3*)
-        test "$enable_shared" = yes && enable_static=no
-        if test -n "$RANLIB"; then
-          archive_cmds="$archive_cmds~\$RANLIB \$lib"
-          postinstall_cmds='$RANLIB $lib'
-        fi
-        ;;
-      aix[4-9]*)
-	if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then
-	  test "$enable_shared" = yes && enable_static=no
-	fi
-        ;;
-    esac
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5
-$as_echo "$enable_shared" >&6; }
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5
-$as_echo_n "checking whether to build static libraries... " >&6; }
-    # Make sure either enable_shared or enable_static is yes.
-    test "$enable_shared" = yes || enable_static=yes
-    { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5
-$as_echo "$enable_static" >&6; }
-
-    GCC_FC="$ac_cv_fc_compiler_gnu"
-    LD_FC="$LD"
-
-    ## CAVEAT EMPTOR:
-    ## There is no encapsulation within the following macros, do not change
-    ## the running order or otherwise move them around unless you know exactly
-    ## what you are doing...
-    # Dependencies to place before and after the object being linked:
-predep_objects_FC=
-postdep_objects_FC=
-predeps_FC=
-postdeps_FC=
-compiler_lib_search_path_FC=
-
-cat > conftest.$ac_ext <<_LT_EOF
-      subroutine foo
-      implicit none
-      integer a
-      a=0
-      return
-      end
-_LT_EOF
-
-
-_lt_libdeps_save_CFLAGS=$CFLAGS
-case "$CC $CFLAGS " in #(
-*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;;
-*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;;
-*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;;
-esac
-
-if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }; then
-  # Parse the compiler output and extract the necessary
-  # objects, libraries and library flags.
-
-  # Sentinel used to keep track of whether or not we are before
-  # the conftest object file.
-  pre_test_object_deps_done=no
-
-  for p in `eval "$output_verbose_link_cmd"`; do
-    case ${prev}${p} in
-
-    -L* | -R* | -l*)
-       # Some compilers place space between "-{L,R}" and the path.
-       # Remove the space.
-       if test $p = "-L" ||
-          test $p = "-R"; then
-	 prev=$p
-	 continue
-       fi
-
-       # Expand the sysroot to ease extracting the directories later.
-       if test -z "$prev"; then
-         case $p in
-         -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;;
-         -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;;
-         -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;;
-         esac
-       fi
-       case $p in
-       =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;;
-       esac
-       if test "$pre_test_object_deps_done" = no; then
-	 case ${prev} in
-	 -L | -R)
-	   # Internal compiler library paths should come after those
-	   # provided the user.  The postdeps already come after the
-	   # user supplied libs so there is no need to process them.
-	   if test -z "$compiler_lib_search_path_FC"; then
-	     compiler_lib_search_path_FC="${prev}${p}"
-	   else
-	     compiler_lib_search_path_FC="${compiler_lib_search_path_FC} ${prev}${p}"
-	   fi
-	   ;;
-	 # The "-l" case would never come before the object being
-	 # linked, so don't bother handling this case.
-	 esac
-       else
-	 if test -z "$postdeps_FC"; then
-	   postdeps_FC="${prev}${p}"
-	 else
-	   postdeps_FC="${postdeps_FC} ${prev}${p}"
-	 fi
-       fi
-       prev=
-       ;;
-
-    *.lto.$objext) ;; # Ignore GCC LTO objects
-    *.$objext)
-       # This assumes that the test object file only shows up
-       # once in the compiler output.
-       if test "$p" = "conftest.$objext"; then
-	 pre_test_object_deps_done=yes
-	 continue
-       fi
-
-       if test "$pre_test_object_deps_done" = no; then
-	 if test -z "$predep_objects_FC"; then
-	   predep_objects_FC="$p"
-	 else
-	   predep_objects_FC="$predep_objects_FC $p"
-	 fi
-       else
-	 if test -z "$postdep_objects_FC"; then
-	   postdep_objects_FC="$p"
-	 else
-	   postdep_objects_FC="$postdep_objects_FC $p"
-	 fi
-       fi
-       ;;
-
-    *) ;; # Ignore the rest.
-
-    esac
-  done
-
-  # Clean up.
-  rm -f a.out a.exe
-else
-  echo "libtool.m4: error: problem compiling FC test program"
-fi
-
-$RM -f confest.$objext
-CFLAGS=$_lt_libdeps_save_CFLAGS
-
-# PORTME: override above test on systems where it is broken
-
-
-case " $postdeps_FC " in
-*" -lc "*) archive_cmds_need_lc_FC=no ;;
-esac
- compiler_lib_search_dirs_FC=
-if test -n "${compiler_lib_search_path_FC}"; then
- compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | ${SED} -e 's! -L! !g' -e 's!^ !!'`
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    lt_prog_compiler_wl_FC=
-lt_prog_compiler_pic_FC=
-lt_prog_compiler_static_FC=
-
-
-  if test "$GCC" = yes; then
-    lt_prog_compiler_wl_FC='-Wl,'
-    lt_prog_compiler_static_FC='-static'
-
-    case $host_os in
-      aix*)
-      # All AIX code is PIC.
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static_FC='-Bstatic'
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            lt_prog_compiler_pic_FC='-fPIC'
-        ;;
-      m68k)
-            # FIXME: we need at least 68020 code to build shared libraries, but
-            # adding the `-m68020' flag to GCC prevents building anything better,
-            # like `-m68040'.
-            lt_prog_compiler_pic_FC='-m68020 -resident32 -malways-restore-a4'
-        ;;
-      esac
-      ;;
-
-    beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*)
-      # PIC is the default for these OSes.
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      # Although the cygwin gcc ignores -fPIC, still need this for old-style
-      # (--disable-auto-import) libraries
-      lt_prog_compiler_pic_FC='-DDLL_EXPORT'
-      ;;
-
-    darwin* | rhapsody*)
-      # PIC is the default on this platform
-      # Common symbols not allowed in MH_DYLIB files
-      lt_prog_compiler_pic_FC='-fno-common'
-      ;;
-
-    haiku*)
-      # PIC is the default for Haiku.
-      # The "-static" flag exists, but is broken.
-      lt_prog_compiler_static_FC=
-      ;;
-
-    hpux*)
-      # PIC is the default for 64-bit PA HP-UX, but not for 32-bit
-      # PA HP-UX.  On IA64 HP-UX, PIC is the default but the pic flag
-      # sets the default TLS model and affects inlining.
-      case $host_cpu in
-      hppa*64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic_FC='-fPIC'
-	;;
-      esac
-      ;;
-
-    interix[3-9]*)
-      # Interix 3.x gcc -fpic/-fPIC options generate broken code.
-      # Instead, we relocate shared libraries at runtime.
-      ;;
-
-    msdosdjgpp*)
-      # Just because we use GCC doesn't mean we suddenly get shared libraries
-      # on systems that don't support them.
-      lt_prog_compiler_can_build_shared_FC=no
-      enable_shared=no
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic_FC='-fPIC -shared'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	lt_prog_compiler_pic_FC=-Kconform_pic
-      fi
-      ;;
-
-    *)
-      lt_prog_compiler_pic_FC='-fPIC'
-      ;;
-    esac
-
-    case $cc_basename in
-    nvcc*) # Cuda Compiler Driver 2.2
-      lt_prog_compiler_wl_FC='-Xlinker '
-      if test -n "$lt_prog_compiler_pic_FC"; then
-        lt_prog_compiler_pic_FC="-Xcompiler $lt_prog_compiler_pic_FC"
-      fi
-      ;;
-    esac
-  else
-    # PORTME Check for flag to pass linker flags through the system compiler.
-    case $host_os in
-    aix*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      if test "$host_cpu" = ia64; then
-	# AIX 5 now supports IA64 processor
-	lt_prog_compiler_static_FC='-Bstatic'
-      else
-	lt_prog_compiler_static_FC='-bnso -bI:/lib/syscalls.exp'
-      fi
-      ;;
-
-    mingw* | cygwin* | pw32* | os2* | cegcc*)
-      # This hack is so that the source file can tell whether it is being
-      # built for inclusion in a dll (and should export symbols for example).
-      lt_prog_compiler_pic_FC='-DDLL_EXPORT'
-      ;;
-
-    hpux9* | hpux10* | hpux11*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but
-      # not for PA HP-UX.
-      case $host_cpu in
-      hppa*64*|ia64*)
-	# +Z the default
-	;;
-      *)
-	lt_prog_compiler_pic_FC='+Z'
-	;;
-      esac
-      # Is there a better lt_prog_compiler_static that works with the bundled CC?
-      lt_prog_compiler_static_FC='${wl}-a ${wl}archive'
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # PIC (with -KPIC) is the default.
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    linux* | k*bsd*-gnu | kopensolaris*-gnu)
-      case $cc_basename in
-      # old Intel for x86_64 which still supported -KPIC.
-      ecc*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-KPIC'
-	lt_prog_compiler_static_FC='-static'
-        ;;
-      # icc used to be incompatible with GCC.
-      # ICC 10 doesn't accept -KPIC any more.
-      icc* | ifort*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-fPIC'
-	lt_prog_compiler_static_FC='-static'
-        ;;
-      # Lahey Fortran 8.1.
-      lf95*)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='--shared'
-	lt_prog_compiler_static_FC='--static'
-	;;
-      nagfor*)
-	# NAG Fortran compiler
-	lt_prog_compiler_wl_FC='-Wl,-Wl,,'
-	lt_prog_compiler_pic_FC='-PIC'
-	lt_prog_compiler_static_FC='-Bstatic'
-	;;
-      pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*)
-        # Portland Group compilers (*not* the Pentium gcc compiler,
-	# which looks to be a dead project)
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-fpic'
-	lt_prog_compiler_static_FC='-Bstatic'
-        ;;
-      ccc*)
-        lt_prog_compiler_wl_FC='-Wl,'
-        # All Alpha code is PIC.
-        lt_prog_compiler_static_FC='-non_shared'
-        ;;
-      xl* | bgxl* | bgf* | mpixl*)
-	# IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene
-	lt_prog_compiler_wl_FC='-Wl,'
-	lt_prog_compiler_pic_FC='-qpic'
-	lt_prog_compiler_static_FC='-qstaticlink'
-	;;
-      *)
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*)
-	  # Sun Fortran 8.3 passes all unrecognized flags to the linker
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC=''
-	  ;;
-	*Sun\ F* | *Sun*Fortran*)
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC='-Qoption ld '
-	  ;;
-	*Sun\ C*)
-	  # Sun C 5.9
-	  lt_prog_compiler_pic_FC='-KPIC'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  ;;
-        *Intel*\ [CF]*Compiler*)
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  lt_prog_compiler_pic_FC='-fPIC'
-	  lt_prog_compiler_static_FC='-static'
-	  ;;
-	*Portland\ Group*)
-	  lt_prog_compiler_wl_FC='-Wl,'
-	  lt_prog_compiler_pic_FC='-fpic'
-	  lt_prog_compiler_static_FC='-Bstatic'
-	  ;;
-	esac
-	;;
-      esac
-      ;;
-
-    newsos6)
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    *nto* | *qnx*)
-      # QNX uses GNU C++, but need to define -shared option too, otherwise
-      # it will coredump.
-      lt_prog_compiler_pic_FC='-fPIC -shared'
-      ;;
-
-    osf3* | osf4* | osf5*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      # All OSF/1 code is PIC.
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    rdos*)
-      lt_prog_compiler_static_FC='-non_shared'
-      ;;
-
-    solaris*)
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      case $cc_basename in
-      f77* | f90* | f95* | sunf77* | sunf90* | sunf95*)
-	lt_prog_compiler_wl_FC='-Qoption ld ';;
-      *)
-	lt_prog_compiler_wl_FC='-Wl,';;
-      esac
-      ;;
-
-    sunos4*)
-      lt_prog_compiler_wl_FC='-Qoption ld '
-      lt_prog_compiler_pic_FC='-PIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    sysv4 | sysv4.2uw2* | sysv4.3*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec ;then
-	lt_prog_compiler_pic_FC='-Kconform_pic'
-	lt_prog_compiler_static_FC='-Bstatic'
-      fi
-      ;;
-
-    sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_pic_FC='-KPIC'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    unicos*)
-      lt_prog_compiler_wl_FC='-Wl,'
-      lt_prog_compiler_can_build_shared_FC=no
-      ;;
-
-    uts4*)
-      lt_prog_compiler_pic_FC='-pic'
-      lt_prog_compiler_static_FC='-Bstatic'
-      ;;
-
-    *)
-      lt_prog_compiler_can_build_shared_FC=no
-      ;;
-    esac
-  fi
-
-case $host_os in
-  # For platforms which do not support PIC, -DPIC is meaningless:
-  *djgpp*)
-    lt_prog_compiler_pic_FC=
-    ;;
-  *)
-    lt_prog_compiler_pic_FC="$lt_prog_compiler_pic_FC"
-    ;;
-esac
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5
-$as_echo_n "checking for $compiler option to produce PIC... " >&6; }
-if ${lt_cv_prog_compiler_pic_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_FC=$lt_prog_compiler_pic_FC
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_FC" >&5
-$as_echo "$lt_cv_prog_compiler_pic_FC" >&6; }
-lt_prog_compiler_pic_FC=$lt_cv_prog_compiler_pic_FC
-
-#
-# Check to make sure the PIC flag actually works.
-#
-if test -n "$lt_prog_compiler_pic_FC"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_FC works" >&5
-$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_FC works... " >&6; }
-if ${lt_cv_prog_compiler_pic_works_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_pic_works_FC=no
-   ac_outfile=conftest.$ac_objext
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-   lt_compiler_flag="$lt_prog_compiler_pic_FC"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   # The option is referenced via a variable to avoid confusing sed.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>conftest.err)
-   ac_status=$?
-   cat conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s "$ac_outfile"; then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings other than the usual output.
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp
-     $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-     if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_pic_works_FC=yes
-     fi
-   fi
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_FC" >&5
-$as_echo "$lt_cv_prog_compiler_pic_works_FC" >&6; }
-
-if test x"$lt_cv_prog_compiler_pic_works_FC" = xyes; then
-    case $lt_prog_compiler_pic_FC in
-     "" | " "*) ;;
-     *) lt_prog_compiler_pic_FC=" $lt_prog_compiler_pic_FC" ;;
-     esac
-else
-    lt_prog_compiler_pic_FC=
-     lt_prog_compiler_can_build_shared_FC=no
-fi
-
-fi
-
-
-
-
-
-#
-# Check to make sure the static flag actually works.
-#
-wl=$lt_prog_compiler_wl_FC eval lt_tmp_static_flag=\"$lt_prog_compiler_static_FC\"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5
-$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; }
-if ${lt_cv_prog_compiler_static_works_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_static_works_FC=no
-   save_LDFLAGS="$LDFLAGS"
-   LDFLAGS="$LDFLAGS $lt_tmp_static_flag"
-   echo "$lt_simple_link_test_code" > conftest.$ac_ext
-   if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then
-     # The linker can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     if test -s conftest.err; then
-       # Append any errors to the config.log.
-       cat conftest.err 1>&5
-       $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp
-       $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2
-       if diff conftest.exp conftest.er2 >/dev/null; then
-         lt_cv_prog_compiler_static_works_FC=yes
-       fi
-     else
-       lt_cv_prog_compiler_static_works_FC=yes
-     fi
-   fi
-   $RM -r conftest*
-   LDFLAGS="$save_LDFLAGS"
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_FC" >&5
-$as_echo "$lt_cv_prog_compiler_static_works_FC" >&6; }
-
-if test x"$lt_cv_prog_compiler_static_works_FC" = xyes; then
-    :
-else
-    lt_prog_compiler_static_FC=
-fi
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o_FC=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o_FC=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; }
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5
-$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; }
-if ${lt_cv_prog_compiler_c_o_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_prog_compiler_c_o_FC=no
-   $RM -r conftest 2>/dev/null
-   mkdir conftest
-   cd conftest
-   mkdir out
-   echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-   lt_compiler_flag="-o out/conftest2.$ac_objext"
-   # Insert the option either (1) after the last *FLAGS variable, or
-   # (2) before a word containing "conftest.", or (3) at the end.
-   # Note that $ac_compile itself does not contain backslashes and begins
-   # with a dollar sign (not a hyphen), so the echo should work correctly.
-   lt_compile=`echo "$ac_compile" | $SED \
-   -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \
-   -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \
-   -e 's:$: $lt_compiler_flag:'`
-   (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5)
-   (eval "$lt_compile" 2>out/conftest.err)
-   ac_status=$?
-   cat out/conftest.err >&5
-   echo "$as_me:$LINENO: \$? = $ac_status" >&5
-   if (exit $ac_status) && test -s out/conftest2.$ac_objext
-   then
-     # The compiler can only warn and ignore the option if not recognized
-     # So say no if there are warnings
-     $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp
-     $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2
-     if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then
-       lt_cv_prog_compiler_c_o_FC=yes
-     fi
-   fi
-   chmod u+w . 2>&5
-   $RM conftest*
-   # SGI C++ compiler will create directory out/ii_files/ for
-   # template instantiation
-   test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files
-   $RM out/* && rmdir out
-   cd ..
-   $RM -r conftest
-   $RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5
-$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; }
-
-
-
-
-hard_links="nottested"
-if test "$lt_cv_prog_compiler_c_o_FC" = no && test "$need_locks" != no; then
-  # do not overwrite the value of need_locks provided by the user
-  { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5
-$as_echo_n "checking if we can lock with hard links... " >&6; }
-  hard_links=yes
-  $RM conftest*
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  touch conftest.a
-  ln conftest.a conftest.b 2>&5 || hard_links=no
-  ln conftest.a conftest.b 2>/dev/null && hard_links=no
-  { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5
-$as_echo "$hard_links" >&6; }
-  if test "$hard_links" = no; then
-    { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5
-$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;}
-    need_locks=warn
-  fi
-else
-  need_locks=no
-fi
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5
-$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; }
-
-  runpath_var=
-  allow_undefined_flag_FC=
-  always_export_symbols_FC=no
-  archive_cmds_FC=
-  archive_expsym_cmds_FC=
-  compiler_needs_object_FC=no
-  enable_shared_with_static_runtimes_FC=no
-  export_dynamic_flag_spec_FC=
-  export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols'
-  hardcode_automatic_FC=no
-  hardcode_direct_FC=no
-  hardcode_direct_absolute_FC=no
-  hardcode_libdir_flag_spec_FC=
-  hardcode_libdir_separator_FC=
-  hardcode_minus_L_FC=no
-  hardcode_shlibpath_var_FC=unsupported
-  inherit_rpath_FC=no
-  link_all_deplibs_FC=unknown
-  module_cmds_FC=
-  module_expsym_cmds_FC=
-  old_archive_from_new_cmds_FC=
-  old_archive_from_expsyms_cmds_FC=
-  thread_safe_flag_spec_FC=
-  whole_archive_flag_spec_FC=
-  # include_expsyms should be a list of space-separated symbols to be *always*
-  # included in the symbol list
-  include_expsyms_FC=
-  # exclude_expsyms can be an extended regexp of symbols to exclude
-  # it will be wrapped by ` (' and `)$', so one must not match beginning or
-  # end of line.  Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc',
-  # as well as any symbol that contains `d'.
-  exclude_expsyms_FC='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'
-  # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out
-  # platforms (ab)use it in PIC code, but their linkers get confused if
-  # the symbol is explicitly referenced.  Since portable code cannot
-  # rely on this symbol name, it's probably fine to never include it in
-  # preloaded symbol tables.
-  # Exclude shared library initialization/finalization symbols.
-  extract_expsyms_cmds=
-
-  case $host_os in
-  cygwin* | mingw* | pw32* | cegcc*)
-    # FIXME: the MSVC++ port hasn't been tested in a loooong time
-    # When not using gcc, we currently assume that we are using
-    # Microsoft Visual C++.
-    if test "$GCC" != yes; then
-      with_gnu_ld=no
-    fi
-    ;;
-  interix*)
-    # we just hope/assume this is gcc and not c89 (= MSVC++)
-    with_gnu_ld=yes
-    ;;
-  openbsd*)
-    with_gnu_ld=no
-    ;;
-  esac
-
-  ld_shlibs_FC=yes
-
-  # On some targets, GNU ld is compatible enough with the native linker
-  # that we're better off using the native interface for both.
-  lt_use_gnu_ld_interface=no
-  if test "$with_gnu_ld" = yes; then
-    case $host_os in
-      aix*)
-	# The AIX port of GNU ld has always aspired to compatibility
-	# with the native linker.  However, as the warning in the GNU ld
-	# block says, versions before 2.19.5* couldn't really create working
-	# shared libraries, regardless of the interface used.
-	case `$LD -v 2>&1` in
-	  *\ \(GNU\ Binutils\)\ 2.19.5*) ;;
-	  *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;;
-	  *\ \(GNU\ Binutils\)\ [3-9]*) ;;
-	  *)
-	    lt_use_gnu_ld_interface=yes
-	    ;;
-	esac
-	;;
-      *)
-	lt_use_gnu_ld_interface=yes
-	;;
-    esac
-  fi
-
-  if test "$lt_use_gnu_ld_interface" = yes; then
-    # If archive_cmds runs LD, not CC, wlarc should be empty
-    wlarc='${wl}'
-
-    # Set some defaults for GNU ld with shared library support. These
-    # are reset later if shared libraries are not supported. Putting them
-    # here allows them to be overridden if necessary.
-    runpath_var=LD_RUN_PATH
-    hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-    export_dynamic_flag_spec_FC='${wl}--export-dynamic'
-    # ancient GNU ld didn't support --whole-archive et. al.
-    if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then
-      whole_archive_flag_spec_FC="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive'
-    else
-      whole_archive_flag_spec_FC=
-    fi
-    supports_anon_versioning=no
-    case `$LD -v 2>&1` in
-      *GNU\ gold*) supports_anon_versioning=yes ;;
-      *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11
-      *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ...
-      *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ...
-      *\ 2.11.*) ;; # other 2.11 versions
-      *) supports_anon_versioning=yes ;;
-    esac
-
-    # See if GNU ld supports shared libraries.
-    case $host_os in
-    aix[3-9]*)
-      # On AIX/PPC, the GNU linker is very broken
-      if test "$host_cpu" != ia64; then
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: the GNU linker, at least up to release 2.19, is reported
-*** to be unable to reliably create shared libraries on AIX.
-*** Therefore, libtool is disabling shared libraries support.  If you
-*** really care for shared libraries, you may want to install binutils
-*** 2.20 or above, or modify your PATH so that a non-GNU linker is found.
-*** You will then need to restart the configuration process.
-
-_LT_EOF
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds_FC=''
-        ;;
-      m68k)
-            archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec_FC='-L$libdir'
-            hardcode_minus_L_FC=yes
-        ;;
-      esac
-      ;;
-
-    beos*)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	allow_undefined_flag_FC=unsupported
-	# Joseph Beckenbach <jrb3@best.com> says some releases of gcc
-	# support --undefined.  This deserves some investigation.  FIXME
-	archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # _LT_TAGVAR(hardcode_libdir_flag_spec, FC) is actually meaningless,
-      # as there is no search path for DLLs.
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      export_dynamic_flag_spec_FC='${wl}--export-all-symbols'
-      allow_undefined_flag_FC=unsupported
-      always_export_symbols_FC=no
-      enable_shared_with_static_runtimes_FC=yes
-      export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols'
-      exclude_expsyms_FC='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'
-
-      if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then
-        archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-	# If the export-symbols file already is a .def file (1st line
-	# is EXPORTS), use it as is; otherwise, prepend...
-	archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	  cp $export_symbols $output_objdir/$soname.def;
-	else
-	  echo EXPORTS > $output_objdir/$soname.def;
-	  cat $export_symbols >> $output_objdir/$soname.def;
-	fi~
-	$CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    haiku*)
-      archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-      link_all_deplibs_FC=yes
-      ;;
-
-    interix[3-9]*)
-      hardcode_direct_FC=no
-      hardcode_shlibpath_var_FC=no
-      hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-      export_dynamic_flag_spec_FC='${wl}-E'
-      # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc.
-      # Instead, shared libraries are loaded at an image base (0x10000000 by
-      # default) and relocated if they conflict, which is a slow very memory
-      # consuming and fragmenting process.  To avoid this, we pick a random,
-      # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link
-      # time.  Moving up from 0x10000000 also allows more sbrk(2) space.
-      archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      archive_expsym_cmds_FC='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib'
-      ;;
-
-    gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu)
-      tmp_diet=no
-      if test "$host_os" = linux-dietlibc; then
-	case $cc_basename in
-	  diet\ *) tmp_diet=yes;;	# linux-dietlibc with static linking (!diet-dyn)
-	esac
-      fi
-      if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \
-	 && test "$tmp_diet" = no
-      then
-	tmp_addflag=' $pic_flag'
-	tmp_sharedflag='-shared'
-	case $cc_basename,$host_cpu in
-        pgcc*)				# Portland Group C compiler
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag'
-	  ;;
-	pgf77* | pgf90* | pgf95* | pgfortran*)
-					# Portland Group f77 and f90 compilers
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  tmp_addflag=' $pic_flag -Mnomain' ;;
-	ecc*,ia64* | icc*,ia64*)	# Intel C compiler on ia64
-	  tmp_addflag=' -i_dynamic' ;;
-	efc*,ia64* | ifort*,ia64*)	# Intel Fortran compiler on ia64
-	  tmp_addflag=' -i_dynamic -nofor_main' ;;
-	ifc* | ifort*)			# Intel Fortran compiler
-	  tmp_addflag=' -nofor_main' ;;
-	lf95*)				# Lahey Fortran 8.1
-	  whole_archive_flag_spec_FC=
-	  tmp_sharedflag='--shared' ;;
-	xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below)
-	  tmp_sharedflag='-qmkshrobj'
-	  tmp_addflag= ;;
-	nvcc*)	# Cuda Compiler Driver 2.2
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object_FC=yes
-	  ;;
-	esac
-	case `$CC -V 2>&1 | sed 5q` in
-	*Sun\ C*)			# Sun C 5.9
-	  whole_archive_flag_spec_FC='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive'
-	  compiler_needs_object_FC=yes
-	  tmp_sharedflag='-G' ;;
-	*Sun\ F*)			# Sun Fortran 8.3
-	  tmp_sharedflag='-G' ;;
-	esac
-	archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-
-        if test "x$supports_anon_versioning" = xyes; then
-          archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~
-	    cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	    echo "local: *; };" >> $output_objdir/$libname.ver~
-	    $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib'
-        fi
-
-	case $cc_basename in
-	xlf* | bgf* | bgxlf* | mpixlf*)
-	  # IBM XL Fortran 10.1 on PPC cannot create shared libs itself
-	  whole_archive_flag_spec_FC='--whole-archive$convenience --no-whole-archive'
-	  hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-	  archive_cmds_FC='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib'
-	  if test "x$supports_anon_versioning" = xyes; then
-	    archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~
-	      cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~
-	      echo "local: *; };" >> $output_objdir/$libname.ver~
-	      $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib'
-	  fi
-	  ;;
-	esac
-      else
-        ld_shlibs_FC=no
-      fi
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds_FC='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib'
-	wlarc=
-      else
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      fi
-      ;;
-
-    solaris*)
-      if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: The releases 2.8.* of the GNU linker cannot reliably
-*** create shared libraries on Solaris systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.9.1 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-      elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*)
-      case `$LD -v 2>&1` in
-        *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*)
-	ld_shlibs_FC=no
-	cat <<_LT_EOF 1>&2
-
-*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not
-*** reliably create shared libraries on SCO systems.  Therefore, libtool
-*** is disabling shared libraries support.  We urge you to upgrade GNU
-*** binutils to release 2.16.91.0.3 or newer.  Another option is to modify
-*** your PATH or compiler configuration so that the native linker is
-*** used, and then restart.
-
-_LT_EOF
-	;;
-	*)
-	  # For security reasons, it is highly recommended that you always
-	  # use absolute paths for naming shared libraries, and exclude the
-	  # DT_RUNPATH tag from executables and libraries.  But doing so
-	  # requires that you compile everything twice, which is a pain.
-	  if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	    hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-	    archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	    archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-	  else
-	    ld_shlibs_FC=no
-	  fi
-	;;
-      esac
-      ;;
-
-    sunos4*)
-      archive_cmds_FC='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      wlarc=
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *)
-      if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib'
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-    esac
-
-    if test "$ld_shlibs_FC" = no; then
-      runpath_var=
-      hardcode_libdir_flag_spec_FC=
-      export_dynamic_flag_spec_FC=
-      whole_archive_flag_spec_FC=
-    fi
-  else
-    # PORTME fill in a description of your system's linker (not GNU ld)
-    case $host_os in
-    aix3*)
-      allow_undefined_flag_FC=unsupported
-      always_export_symbols_FC=yes
-      archive_expsym_cmds_FC='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname'
-      # Note: this linker hardcodes the directories in LIBPATH if there
-      # are no directories specified by -L.
-      hardcode_minus_L_FC=yes
-      if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then
-	# Neither direct hardcoding nor static linking is supported with a
-	# broken collect2.
-	hardcode_direct_FC=unsupported
-      fi
-      ;;
-
-    aix[4-9]*)
-      if test "$host_cpu" = ia64; then
-	# On IA64, the linker does run time linking by default, so we don't
-	# have to do anything special.
-	aix_use_runtimelinking=no
-	exp_sym_flag='-Bexport'
-	no_entry_flag=""
-      else
-	# If we're using GNU nm, then we don't want the "-C" option.
-	# -C means demangle to AIX nm, but means don't demangle with GNU nm
-	# Also, AIX nm treats weak defined symbols like other global
-	# defined symbols, whereas GNU nm marks them as "W".
-	if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then
-	  export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	else
-	  export_symbols_cmds_FC='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols'
-	fi
-	aix_use_runtimelinking=no
-
-	# Test if we are trying to use run time linking or normal
-	# AIX style linking. If -brtl is somewhere in LDFLAGS, we
-	# need to do runtime linking.
-	case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*)
-	  for ld_flag in $LDFLAGS; do
-	  if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then
-	    aix_use_runtimelinking=yes
-	    break
-	  fi
-	  done
-	  ;;
-	esac
-
-	exp_sym_flag='-bexport'
-	no_entry_flag='-bnoentry'
-      fi
-
-      # When large executables or shared objects are built, AIX ld can
-      # have problems creating the table of contents.  If linking a library
-      # or program results in "error TOC overflow" add -mminimal-toc to
-      # CXXFLAGS/CFLAGS for g++/gcc.  In the cases where that is not
-      # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS.
-
-      archive_cmds_FC=''
-      hardcode_direct_FC=yes
-      hardcode_direct_absolute_FC=yes
-      hardcode_libdir_separator_FC=':'
-      link_all_deplibs_FC=yes
-      file_list_spec_FC='${wl}-f,'
-
-      if test "$GCC" = yes; then
-	case $host_os in aix4.[012]|aix4.[012].*)
-	# We only want to do this on AIX 4.2 and lower, the check
-	# below for broken collect2 doesn't work under 4.3+
-	  collect2name=`${CC} -print-prog-name=collect2`
-	  if test -f "$collect2name" &&
-	   strings "$collect2name" | $GREP resolve_lib_name >/dev/null
-	  then
-	  # We have reworked collect2
-	  :
-	  else
-	  # We have old collect2
-	  hardcode_direct_FC=unsupported
-	  # It fails to find uninstalled libraries when the uninstalled
-	  # path is not listed in the libpath.  Setting hardcode_minus_L
-	  # to unsupported forces relinking
-	  hardcode_minus_L_FC=yes
-	  hardcode_libdir_flag_spec_FC='-L$libdir'
-	  hardcode_libdir_separator_FC=
-	  fi
-	  ;;
-	esac
-	shared_flag='-shared'
-	if test "$aix_use_runtimelinking" = yes; then
-	  shared_flag="$shared_flag "'${wl}-G'
-	fi
-      else
-	# not using gcc
-	if test "$host_cpu" = ia64; then
-	# VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release
-	# chokes on -Wl,-G. The following line is correct:
-	  shared_flag='-G'
-	else
-	  if test "$aix_use_runtimelinking" = yes; then
-	    shared_flag='${wl}-G'
-	  else
-	    shared_flag='${wl}-bM:SRE'
-	  fi
-	fi
-      fi
-
-      export_dynamic_flag_spec_FC='${wl}-bexpall'
-      # It seems that -bexpall does not export symbols beginning with
-      # underscore (_), so it is better to generate a list of symbols to export.
-      always_export_symbols_FC=yes
-      if test "$aix_use_runtimelinking" = yes; then
-	# Warning - without using the other runtime loading flags (-brtl),
-	# -berok will link without error, but may produce a broken library.
-	allow_undefined_flag_FC='-berok'
-        # Determine the default libpath from the value encoded in an
-        # empty executable.
-        if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath__FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath__FC
-fi
-
-        hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath"
-        archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag"
-      else
-	if test "$host_cpu" = ia64; then
-	  hardcode_libdir_flag_spec_FC='${wl}-R $libdir:/usr/lib:/lib'
-	  allow_undefined_flag_FC="-z nodefs"
-	  archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols"
-	else
-	 # Determine the default libpath from the value encoded in an
-	 # empty executable.
-	 if test "${lt_cv_aix_libpath+set}" = set; then
-  aix_libpath=$lt_cv_aix_libpath
-else
-  if ${lt_cv_aix_libpath__FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-
-  lt_aix_libpath_sed='
-      /Import File Strings/,/^$/ {
-	  /^0/ {
-	      s/^0  *\([^ ]*\) *$/\1/
-	      p
-	  }
-      }'
-  lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  # Check for a 64-bit object if we didn't find anything.
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"`
-  fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-  if test -z "$lt_cv_aix_libpath__FC"; then
-    lt_cv_aix_libpath__FC="/usr/lib:/lib"
-  fi
-
-fi
-
-  aix_libpath=$lt_cv_aix_libpath__FC
-fi
-
-	 hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath"
-	  # Warning - without using the other run time loading flags,
-	  # -berok will link without error, but may produce a broken library.
-	  no_undefined_flag_FC=' ${wl}-bernotok'
-	  allow_undefined_flag_FC=' ${wl}-berok'
-	  if test "$with_gnu_ld" = yes; then
-	    # We only use this code for GNU lds that support --whole-archive.
-	    whole_archive_flag_spec_FC='${wl}--whole-archive$convenience ${wl}--no-whole-archive'
-	  else
-	    # Exported symbols can be pulled into shared objects from archives
-	    whole_archive_flag_spec_FC='$convenience'
-	  fi
-	  archive_cmds_need_lc_FC=yes
-	  # This is similar to how AIX traditionally builds its shared libraries.
-	  archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname'
-	fi
-      fi
-      ;;
-
-    amigaos*)
-      case $host_cpu in
-      powerpc)
-            # see comment about AmigaOS4 .so support
-            archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib'
-            archive_expsym_cmds_FC=''
-        ;;
-      m68k)
-            archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)'
-            hardcode_libdir_flag_spec_FC='-L$libdir'
-            hardcode_minus_L_FC=yes
-        ;;
-      esac
-      ;;
-
-    bsdi[45]*)
-      export_dynamic_flag_spec_FC=-rdynamic
-      ;;
-
-    cygwin* | mingw* | pw32* | cegcc*)
-      # When not using gcc, we currently assume that we are using
-      # Microsoft Visual C++.
-      # hardcode_libdir_flag_spec is actually meaningless, as there is
-      # no search path for DLLs.
-      case $cc_basename in
-      cl*)
-	# Native MSVC
-	hardcode_libdir_flag_spec_FC=' '
-	allow_undefined_flag_FC=unsupported
-	always_export_symbols_FC=yes
-	file_list_spec_FC='@'
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames='
-	archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then
-	    sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp;
-	  else
-	    sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp;
-	  fi~
-	  $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~
-	  linknames='
-	# The linker will not automatically build a static lib if we build a DLL.
-	# _LT_TAGVAR(old_archive_from_new_cmds, FC)='true'
-	enable_shared_with_static_runtimes_FC=yes
-	exclude_expsyms_FC='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*'
-	export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols'
-	# Don't use ranlib
-	old_postinstall_cmds_FC='chmod 644 $oldlib'
-	postlink_cmds_FC='lt_outputfile="@OUTPUT@"~
-	  lt_tool_outputfile="@TOOL_OUTPUT@"~
-	  case $lt_outputfile in
-	    *.exe|*.EXE) ;;
-	    *)
-	      lt_outputfile="$lt_outputfile.exe"
-	      lt_tool_outputfile="$lt_tool_outputfile.exe"
-	      ;;
-	  esac~
-	  if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then
-	    $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1;
-	    $RM "$lt_outputfile.manifest";
-	  fi'
-	;;
-      *)
-	# Assume MSVC wrapper
-	hardcode_libdir_flag_spec_FC=' '
-	allow_undefined_flag_FC=unsupported
-	# Tell ltmain to make .lib files, not .a files.
-	libext=lib
-	# Tell ltmain to make .dll files, not .so files.
-	shrext_cmds=".dll"
-	# FIXME: Setting linknames here is a bad hack.
-	archive_cmds_FC='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames='
-	# The linker will automatically build a .lib file if we build a DLL.
-	old_archive_from_new_cmds_FC='true'
-	# FIXME: Should let the user specify the lib program.
-	old_archive_cmds_FC='lib -OUT:$oldlib$oldobjs$old_deplibs'
-	enable_shared_with_static_runtimes_FC=yes
-	;;
-      esac
-      ;;
-
-    darwin* | rhapsody*)
-
-
-  archive_cmds_need_lc_FC=no
-  hardcode_direct_FC=no
-  hardcode_automatic_FC=yes
-  hardcode_shlibpath_var_FC=unsupported
-  if test "$lt_cv_ld_force_load" = "yes"; then
-    whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test  -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`'
-    compiler_needs_object_FC=yes
-  else
-    whole_archive_flag_spec_FC=''
-  fi
-  link_all_deplibs_FC=yes
-  allow_undefined_flag_FC="$_lt_dar_allow_undefined"
-  case $cc_basename in
-     ifort*) _lt_dar_can_shared=yes ;;
-     *) _lt_dar_can_shared=$GCC ;;
-  esac
-  if test "$_lt_dar_can_shared" = "yes"; then
-    output_verbose_link_cmd=func_echo_all
-    archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}"
-    module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}"
-    archive_expsym_cmds_FC="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}"
-    module_expsym_cmds_FC="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}"
-
-  else
-  ld_shlibs_FC=no
-  fi
-
-      ;;
-
-    dgux*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor
-    # support.  Future versions do this automatically, but an explicit c++rt0.o
-    # does not break anything, and helps significantly (at the cost of a little
-    # extra space).
-    freebsd2.2*)
-      archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o'
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # Unfortunately, older versions of FreeBSD 2 do not have this feature.
-    freebsd2.*)
-      archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct_FC=yes
-      hardcode_minus_L_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    # FreeBSD 3 and greater uses gcc -shared to do shared libraries.
-    freebsd* | dragonfly*)
-      archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    hpux9*)
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      else
-	archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib'
-      fi
-      hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      hardcode_direct_FC=yes
-
-      # hardcode_minus_L: Not really in the search PATH,
-      # but as the default location of the library.
-      hardcode_minus_L_FC=yes
-      export_dynamic_flag_spec_FC='${wl}-E'
-      ;;
-
-    hpux10*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator_FC=:
-	hardcode_direct_FC=yes
-	hardcode_direct_absolute_FC=yes
-	export_dynamic_flag_spec_FC='${wl}-E'
-	# hardcode_minus_L: Not really in the search PATH,
-	# but as the default location of the library.
-	hardcode_minus_L_FC=yes
-      fi
-      ;;
-
-    hpux11*)
-      if test "$GCC" = yes && test "$with_gnu_ld" = no; then
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds_FC='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	  archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      else
-	case $host_cpu in
-	hppa*64*)
-	  archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	ia64*)
-	  archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	*)
-	archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags'
-	  ;;
-	esac
-      fi
-      if test "$with_gnu_ld" = no; then
-	hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir'
-	hardcode_libdir_separator_FC=:
-
-	case $host_cpu in
-	hppa*64*|ia64*)
-	  hardcode_direct_FC=no
-	  hardcode_shlibpath_var_FC=no
-	  ;;
-	*)
-	  hardcode_direct_FC=yes
-	  hardcode_direct_absolute_FC=yes
-	  export_dynamic_flag_spec_FC='${wl}-E'
-
-	  # hardcode_minus_L: Not really in the search PATH,
-	  # but as the default location of the library.
-	  hardcode_minus_L_FC=yes
-	  ;;
-	esac
-      fi
-      ;;
-
-    irix5* | irix6* | nonstopux*)
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	# Try to use the -exported_symbol ld option, if it does not
-	# work, assume that -exports_file does not work either and
-	# implicitly export all symbols.
-	# This should be the same for all languages, so no per-tag cache variable.
-	{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5
-$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; }
-if ${lt_cv_irix_exported_symbol+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  save_LDFLAGS="$LDFLAGS"
-	   LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null"
-	   cat > conftest.$ac_ext <<_ACEOF
-
-      subroutine foo
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-  lt_cv_irix_exported_symbol=yes
-else
-  lt_cv_irix_exported_symbol=no
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-           LDFLAGS="$save_LDFLAGS"
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5
-$as_echo "$lt_cv_irix_exported_symbol" >&6; }
-	if test "$lt_cv_irix_exported_symbol" = yes; then
-          archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib'
-	fi
-      else
-	archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      inherit_rpath_FC=yes
-      link_all_deplibs_FC=yes
-      ;;
-
-    netbsd*)
-      if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-	archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'  # a.out
-      else
-	archive_cmds_FC='$LD -shared -o $lib $libobjs $deplibs $linker_flags'      # ELF
-      fi
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_direct_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    newsos6)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_direct_FC=yes
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *nto* | *qnx*)
-      ;;
-
-    openbsd*)
-      if test -f /usr/libexec/ld.so; then
-	hardcode_direct_FC=yes
-	hardcode_shlibpath_var_FC=no
-	hardcode_direct_absolute_FC=yes
-	if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-	  archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols'
-	  hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-	  export_dynamic_flag_spec_FC='${wl}-E'
-	else
-	  case $host_os in
-	   openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*)
-	     archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags'
-	     hardcode_libdir_flag_spec_FC='-R$libdir'
-	     ;;
-	   *)
-	     archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags'
-	     hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir'
-	     ;;
-	  esac
-	fi
-      else
-	ld_shlibs_FC=no
-      fi
-      ;;
-
-    os2*)
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_minus_L_FC=yes
-      allow_undefined_flag_FC=unsupported
-      archive_cmds_FC='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def'
-      old_archive_from_new_cmds_FC='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def'
-      ;;
-
-    osf3*)
-      if test "$GCC" = yes; then
-	allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-      else
-	allow_undefined_flag_FC=' -expect_unresolved \*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      hardcode_libdir_separator_FC=:
-      ;;
-
-    osf4* | osf5*)	# as osf3* with the addition of -msym flag
-      if test "$GCC" = yes; then
-	allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib'
-	hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir'
-      else
-	allow_undefined_flag_FC=' -expect_unresolved \*'
-	archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib'
-	archive_expsym_cmds_FC='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~
-	$CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp'
-
-	# Both c and cxx compiler support -rpath directly
-	hardcode_libdir_flag_spec_FC='-rpath $libdir'
-      fi
-      archive_cmds_need_lc_FC='no'
-      hardcode_libdir_separator_FC=:
-      ;;
-
-    solaris*)
-      no_undefined_flag_FC=' -z defs'
-      if test "$GCC" = yes; then
-	wlarc='${wl}'
-	archive_cmds_FC='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-      else
-	case `$CC -V 2>&1` in
-	*"Compilers 5.0"*)
-	  wlarc=''
-	  archive_cmds_FC='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp'
-	  ;;
-	*)
-	  wlarc='${wl}'
-	  archive_cmds_FC='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags'
-	  archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~
-	  $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp'
-	  ;;
-	esac
-      fi
-      hardcode_libdir_flag_spec_FC='-R$libdir'
-      hardcode_shlibpath_var_FC=no
-      case $host_os in
-      solaris2.[0-5] | solaris2.[0-5].*) ;;
-      *)
-	# The compiler driver will combine and reorder linker options,
-	# but understands `-z linker_flag'.  GCC discards it without `$wl',
-	# but is careful enough not to reorder.
-	# Supported since Solaris 2.6 (maybe 2.5.1?)
-	if test "$GCC" = yes; then
-	  whole_archive_flag_spec_FC='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract'
-	else
-	  whole_archive_flag_spec_FC='-z allextract$convenience -z defaultextract'
-	fi
-	;;
-      esac
-      link_all_deplibs_FC=yes
-      ;;
-
-    sunos4*)
-      if test "x$host_vendor" = xsequent; then
-	# Use $CC to link under sequent, because it throws in some extra .o
-	# files that make .init and .fini sections work.
-	archive_cmds_FC='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags'
-      fi
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_direct_FC=yes
-      hardcode_minus_L_FC=yes
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    sysv4)
-      case $host_vendor in
-	sni)
-	  archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct_FC=yes # is this really true???
-	;;
-	siemens)
-	  ## LD is ld it makes a PLAMLIB
-	  ## CC just makes a GrossModule.
-	  archive_cmds_FC='$LD -G -o $lib $libobjs $deplibs $linker_flags'
-	  reload_cmds_FC='$CC -r -o $output$reload_objs'
-	  hardcode_direct_FC=no
-        ;;
-	motorola)
-	  archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	  hardcode_direct_FC=no #Motorola manual says yes, but my tests say they lie
-	;;
-      esac
-      runpath_var='LD_RUN_PATH'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    sysv4.3*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_shlibpath_var_FC=no
-      export_dynamic_flag_spec_FC='-Bexport'
-      ;;
-
-    sysv4*MP*)
-      if test -d /usr/nec; then
-	archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-	hardcode_shlibpath_var_FC=no
-	runpath_var=LD_RUN_PATH
-	hardcode_runpath_var=yes
-	ld_shlibs_FC=yes
-      fi
-      ;;
-
-    sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*)
-      no_undefined_flag_FC='${wl}-z,text'
-      archive_cmds_need_lc_FC=no
-      hardcode_shlibpath_var_FC=no
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    sysv5* | sco3.2v5* | sco5v6*)
-      # Note: We can NOT use -z defs as we might desire, because we do not
-      # link with -lc, and that would cause any symbols used from libc to
-      # always be unresolved, which means just about no library would
-      # ever link correctly.  If we're not using GNU ld we use -z text
-      # though, which does catch some bad symbols but isn't as heavy-handed
-      # as -z defs.
-      no_undefined_flag_FC='${wl}-z,text'
-      allow_undefined_flag_FC='${wl}-z,nodefs'
-      archive_cmds_need_lc_FC=no
-      hardcode_shlibpath_var_FC=no
-      hardcode_libdir_flag_spec_FC='${wl}-R,$libdir'
-      hardcode_libdir_separator_FC=':'
-      link_all_deplibs_FC=yes
-      export_dynamic_flag_spec_FC='${wl}-Bexport'
-      runpath_var='LD_RUN_PATH'
-
-      if test "$GCC" = yes; then
-	archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      else
-	archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-	archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags'
-      fi
-      ;;
-
-    uts4*)
-      archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags'
-      hardcode_libdir_flag_spec_FC='-L$libdir'
-      hardcode_shlibpath_var_FC=no
-      ;;
-
-    *)
-      ld_shlibs_FC=no
-      ;;
-    esac
-
-    if test x$host_vendor = xsni; then
-      case $host in
-      sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*)
-	export_dynamic_flag_spec_FC='${wl}-Blargedynsym'
-	;;
-      esac
-    fi
-  fi
-
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_FC" >&5
-$as_echo "$ld_shlibs_FC" >&6; }
-test "$ld_shlibs_FC" = no && can_build_shared=no
-
-with_gnu_ld_FC=$with_gnu_ld
-
-
-
-
-
-
-#
-# Do we need to explicitly link libc?
-#
-case "x$archive_cmds_need_lc_FC" in
-x|xyes)
-  # Assume -lc should be added
-  archive_cmds_need_lc_FC=yes
-
-  if test "$enable_shared" = yes && test "$GCC" = yes; then
-    case $archive_cmds_FC in
-    *'~'*)
-      # FIXME: we may have to deal with multi-command sequences.
-      ;;
-    '$CC '*)
-      # Test whether the compiler implicitly links with -lc since on some
-      # systems, -lgcc has to come before -lc. If gcc already passes -lc
-      # to ld, don't add -lc before -lgcc.
-      { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5
-$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; }
-if ${lt_cv_archive_cmds_need_lc_FC+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  $RM conftest*
-	echo "$lt_simple_compile_test_code" > conftest.$ac_ext
-
-	if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5
-  (eval $ac_compile) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; } 2>conftest.err; then
-	  soname=conftest
-	  lib=conftest
-	  libobjs=conftest.$ac_objext
-	  deplibs=
-	  wl=$lt_prog_compiler_wl_FC
-	  pic_flag=$lt_prog_compiler_pic_FC
-	  compiler_flags=-v
-	  linker_flags=-v
-	  verstring=
-	  output_objdir=.
-	  libname=conftest
-	  lt_save_allow_undefined_flag=$allow_undefined_flag_FC
-	  allow_undefined_flag_FC=
-	  if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5
-  (eval $archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5
-  ac_status=$?
-  $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5
-  test $ac_status = 0; }
-	  then
-	    lt_cv_archive_cmds_need_lc_FC=no
-	  else
-	    lt_cv_archive_cmds_need_lc_FC=yes
-	  fi
-	  allow_undefined_flag_FC=$lt_save_allow_undefined_flag
-	else
-	  cat conftest.err 1>&5
-	fi
-	$RM conftest*
-
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_FC" >&5
-$as_echo "$lt_cv_archive_cmds_need_lc_FC" >&6; }
-      archive_cmds_need_lc_FC=$lt_cv_archive_cmds_need_lc_FC
-      ;;
-    esac
-  fi
-  ;;
-esac
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5
-$as_echo_n "checking dynamic linker characteristics... " >&6; }
-
-library_names_spec=
-libname_spec='lib$name'
-soname_spec=
-shrext_cmds=".so"
-postinstall_cmds=
-postuninstall_cmds=
-finish_cmds=
-finish_eval=
-shlibpath_var=
-shlibpath_overrides_runpath=unknown
-version_type=none
-dynamic_linker="$host_os ld.so"
-sys_lib_dlsearch_path_spec="/lib /usr/lib"
-need_lib_prefix=unknown
-hardcode_into_libs=no
-
-# when you set need_version to no, make sure it does not cause -set_version
-# flags to be left without arguments
-need_version=unknown
-
-case $host_os in
-aix3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a'
-  shlibpath_var=LIBPATH
-
-  # AIX 3 has no versioning support, so we append a major version to the name.
-  soname_spec='${libname}${release}${shared_ext}$major'
-  ;;
-
-aix[4-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  hardcode_into_libs=yes
-  if test "$host_cpu" = ia64; then
-    # AIX 5 supports IA64
-    library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}'
-    shlibpath_var=LD_LIBRARY_PATH
-  else
-    # With GCC up to 2.95.x, collect2 would create an import file
-    # for dependence libraries.  The import file would start with
-    # the line `#! .'.  This would cause the generated library to
-    # depend on `.', always an invalid library.  This was fixed in
-    # development snapshots of GCC prior to 3.0.
-    case $host_os in
-      aix4 | aix4.[01] | aix4.[01].*)
-      if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)'
-	   echo ' yes '
-	   echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then
-	:
-      else
-	can_build_shared=no
-      fi
-      ;;
-    esac
-    # AIX (on Power*) has no versioning support, so currently we can not hardcode correct
-    # soname into executable. Probably we can add versioning support to
-    # collect2, so additional links can be useful in future.
-    if test "$aix_use_runtimelinking" = yes; then
-      # If using run time linking (on AIX 4.2 or later) use lib<name>.so
-      # instead of lib<name>.a to let people know that these are not
-      # typical AIX shared libraries.
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    else
-      # We preserve .a as extension for shared libraries through AIX4.2
-      # and later when we are not doing run time linking.
-      library_names_spec='${libname}${release}.a $libname.a'
-      soname_spec='${libname}${release}${shared_ext}$major'
-    fi
-    shlibpath_var=LIBPATH
-  fi
-  ;;
-
-amigaos*)
-  case $host_cpu in
-  powerpc)
-    # Since July 2007 AmigaOS4 officially supports .so libraries.
-    # When compiling the executable, add -use-dynld -Lsobjs: to the compileline.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    ;;
-  m68k)
-    library_names_spec='$libname.ixlibrary $libname.a'
-    # Create ${libname}_ixlibrary.a entries in /sys/libs.
-    finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done'
-    ;;
-  esac
-  ;;
-
-beos*)
-  library_names_spec='${libname}${shared_ext}'
-  dynamic_linker="$host_os ld.so"
-  shlibpath_var=LIBRARY_PATH
-  ;;
-
-bsdi[45]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib"
-  sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib"
-  # the default ld.so.conf also contains /usr/contrib/lib and
-  # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow
-  # libtool to hard-code these into programs
-  ;;
-
-cygwin* | mingw* | pw32* | cegcc*)
-  version_type=windows
-  shrext_cmds=".dll"
-  need_version=no
-  need_lib_prefix=no
-
-  case $GCC,$cc_basename in
-  yes,*)
-    # gcc
-    library_names_spec='$libname.dll.a'
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname~
-      chmod a+x \$dldir/$dlname~
-      if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then
-        eval '\''$striplib \$dldir/$dlname'\'' || exit \$?;
-      fi'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-
-    case $host_os in
-    cygwin*)
-      # Cygwin DLLs use 'cyg' prefix rather than 'lib'
-      soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-
-      ;;
-    mingw* | cegcc*)
-      # MinGW DLLs use traditional 'lib' prefix
-      soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    pw32*)
-      # pw32 DLLs use 'pw' prefix rather than 'lib'
-      library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-      ;;
-    esac
-    dynamic_linker='Win32 ld.exe'
-    ;;
-
-  *,cl*)
-    # Native MSVC
-    libname_spec='$name'
-    soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}'
-    library_names_spec='${libname}.dll.lib'
-
-    case $build_os in
-    mingw*)
-      sys_lib_search_path_spec=
-      lt_save_ifs=$IFS
-      IFS=';'
-      for lt_path in $LIB
-      do
-        IFS=$lt_save_ifs
-        # Let DOS variable expansion print the short 8.3 style file name.
-        lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"`
-        sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path"
-      done
-      IFS=$lt_save_ifs
-      # Convert to MSYS style.
-      sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'`
-      ;;
-    cygwin*)
-      # Convert to unix form, then to dos form, then back to unix form
-      # but this time dos style (no spaces!) so that the unix form looks
-      # like /cygdrive/c/PROGRA~1:/cygdr...
-      sys_lib_search_path_spec=`cygpath --path --unix "$LIB"`
-      sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null`
-      sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      ;;
-    *)
-      sys_lib_search_path_spec="$LIB"
-      if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then
-        # It is most probably a Windows format PATH.
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'`
-      else
-        sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"`
-      fi
-      # FIXME: find the short name or the path components, as spaces are
-      # common. (e.g. "Program Files" -> "PROGRA~1")
-      ;;
-    esac
-
-    # DLL is installed to $(libdir)/../bin by postinstall_cmds
-    postinstall_cmds='base_file=`basename \${file}`~
-      dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~
-      dldir=$destdir/`dirname \$dlpath`~
-      test -d \$dldir || mkdir -p \$dldir~
-      $install_prog $dir/$dlname \$dldir/$dlname'
-    postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~
-      dlpath=$dir/\$dldll~
-       $RM \$dlpath'
-    shlibpath_overrides_runpath=yes
-    dynamic_linker='Win32 link.exe'
-    ;;
-
-  *)
-    # Assume MSVC wrapper
-    library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib'
-    dynamic_linker='Win32 ld.exe'
-    ;;
-  esac
-  # FIXME: first we should search . and the directory the executable is in
-  shlibpath_var=PATH
-  ;;
-
-darwin* | rhapsody*)
-  dynamic_linker="$host_os dyld"
-  version_type=darwin
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext'
-  soname_spec='${libname}${release}${major}$shared_ext'
-  shlibpath_overrides_runpath=yes
-  shlibpath_var=DYLD_LIBRARY_PATH
-  shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`'
-
-  sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib'
-  ;;
-
-dgux*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-freebsd* | dragonfly*)
-  # DragonFly does not have aout.  When/if they implement a new
-  # versioning mechanism, adjust this.
-  if test -x /usr/bin/objformat; then
-    objformat=`/usr/bin/objformat`
-  else
-    case $host_os in
-    freebsd[23].*) objformat=aout ;;
-    *) objformat=elf ;;
-    esac
-  fi
-  version_type=freebsd-$objformat
-  case $version_type in
-    freebsd-elf*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-      need_version=no
-      need_lib_prefix=no
-      ;;
-    freebsd-*)
-      library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix'
-      need_version=yes
-      ;;
-  esac
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_os in
-  freebsd2.*)
-    shlibpath_overrides_runpath=yes
-    ;;
-  freebsd3.[01]* | freebsdelf3.[01]*)
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  freebsd3.[2-9]* | freebsdelf3.[2-9]* | \
-  freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1)
-    shlibpath_overrides_runpath=no
-    hardcode_into_libs=yes
-    ;;
-  *) # from 4.6 on, and DragonFly
-    shlibpath_overrides_runpath=yes
-    hardcode_into_libs=yes
-    ;;
-  esac
-  ;;
-
-gnu*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-haiku*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  dynamic_linker="$host_os runtime_loader"
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib'
-  hardcode_into_libs=yes
-  ;;
-
-hpux9* | hpux10* | hpux11*)
-  # Give a soname corresponding to the major version so that dld.sl refuses to
-  # link against other versions.
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  case $host_cpu in
-  ia64*)
-    shrext_cmds='.so'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.so"
-    shlibpath_var=LD_LIBRARY_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    if test "X$HPUX_IA64_MODE" = X32; then
-      sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib"
-    else
-      sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64"
-    fi
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  hppa*64*)
-    shrext_cmds='.sl'
-    hardcode_into_libs=yes
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH
-    shlibpath_overrides_runpath=yes # Unless +noenvvar is specified.
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64"
-    sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec
-    ;;
-  *)
-    shrext_cmds='.sl'
-    dynamic_linker="$host_os dld.sl"
-    shlibpath_var=SHLIB_PATH
-    shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    ;;
-  esac
-  # HP-UX runs *really* slowly unless shared libraries are mode 555, ...
-  postinstall_cmds='chmod 555 $lib'
-  # or fails outright, so override atomically:
-  install_override_mode=555
-  ;;
-
-interix[3-9]*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-irix5* | irix6* | nonstopux*)
-  case $host_os in
-    nonstopux*) version_type=nonstopux ;;
-    *)
-	if test "$lt_cv_prog_gnu_ld" = yes; then
-		version_type=linux # correct to gnu/linux during the next big refactor
-	else
-		version_type=irix
-	fi ;;
-  esac
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}'
-  case $host_os in
-  irix5* | nonstopux*)
-    libsuff= shlibsuff=
-    ;;
-  *)
-    case $LD in # libtool.m4 will add one of these switches to LD
-    *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ")
-      libsuff= shlibsuff= libmagic=32-bit;;
-    *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ")
-      libsuff=32 shlibsuff=N32 libmagic=N32;;
-    *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ")
-      libsuff=64 shlibsuff=64 libmagic=64-bit;;
-    *) libsuff= shlibsuff= libmagic=never-match;;
-    esac
-    ;;
-  esac
-  shlibpath_var=LD_LIBRARY${shlibsuff}_PATH
-  shlibpath_overrides_runpath=no
-  sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}"
-  sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}"
-  hardcode_into_libs=yes
-  ;;
-
-# No shared lib support for Linux oldld, aout, or coff.
-linux*oldld* | linux*aout* | linux*coff*)
-  dynamic_linker=no
-  ;;
-
-# This must be glibc/ELF.
-linux* | k*bsd*-gnu | kopensolaris*-gnu)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-
-  # Some binutils ld are patched to set DT_RUNPATH
-  if ${lt_cv_shlibpath_overrides_runpath+:} false; then :
-  $as_echo_n "(cached) " >&6
-else
-  lt_cv_shlibpath_overrides_runpath=no
-    save_LDFLAGS=$LDFLAGS
-    save_libdir=$libdir
-    eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_FC\"; \
-	 LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_FC\""
-    cat > conftest.$ac_ext <<_ACEOF
-      program main
-
-      end
-_ACEOF
-if ac_fn_fc_try_link "$LINENO"; then :
-  if  ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then :
-  lt_cv_shlibpath_overrides_runpath=yes
-fi
-fi
-rm -f core conftest.err conftest.$ac_objext \
-    conftest$ac_exeext conftest.$ac_ext
-    LDFLAGS=$save_LDFLAGS
-    libdir=$save_libdir
-
-fi
-
-  shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath
-
-  # This implies no fast_install, which is unacceptable.
-  # Some rework will be needed to allow for fast_install
-  # before this can be enabled.
-  hardcode_into_libs=yes
-
-  # Append ld.so.conf contents to the search path
-  if test -f /etc/ld.so.conf; then
-    lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[	 ]*hwcap[	 ]/d;s/[:,	]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '`
-    sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra"
-  fi
-
-  # We used to test for /lib/ld.so.1 and disable shared libraries on
-  # powerpc, because MkLinux only supported shared libraries with the
-  # GNU dynamic linker.  Since this was broken with cross compilers,
-  # most powerpc-linux boxes support dynamic linking these days and
-  # people can always --disable-shared, the test was removed, and we
-  # assume the GNU/Linux dynamic linker is in use.
-  dynamic_linker='GNU/Linux ld.so'
-  ;;
-
-netbsd*)
-  version_type=sunos
-  need_lib_prefix=no
-  need_version=no
-  if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-    finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-    dynamic_linker='NetBSD (a.out) ld.so'
-  else
-    library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}'
-    soname_spec='${libname}${release}${shared_ext}$major'
-    dynamic_linker='NetBSD ld.elf_so'
-  fi
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  ;;
-
-newsos6)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  ;;
-
-*nto* | *qnx*)
-  version_type=qnx
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  dynamic_linker='ldqnx.so'
-  ;;
-
-openbsd*)
-  version_type=sunos
-  sys_lib_dlsearch_path_spec="/usr/lib"
-  need_lib_prefix=no
-  # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs.
-  case $host_os in
-    openbsd3.3 | openbsd3.3.*)	need_version=yes ;;
-    *)				need_version=no  ;;
-  esac
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then
-    case $host_os in
-      openbsd2.[89] | openbsd2.[89].*)
-	shlibpath_overrides_runpath=no
-	;;
-      *)
-	shlibpath_overrides_runpath=yes
-	;;
-      esac
-  else
-    shlibpath_overrides_runpath=yes
-  fi
-  ;;
-
-os2*)
-  libname_spec='$name'
-  shrext_cmds=".dll"
-  need_lib_prefix=no
-  library_names_spec='$libname${shared_ext} $libname.a'
-  dynamic_linker='OS/2 ld.exe'
-  shlibpath_var=LIBPATH
-  ;;
-
-osf3* | osf4* | osf5*)
-  version_type=osf
-  need_lib_prefix=no
-  need_version=no
-  soname_spec='${libname}${release}${shared_ext}$major'
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib"
-  sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec"
-  ;;
-
-rdos*)
-  dynamic_linker=no
-  ;;
-
-solaris*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  # ldd complains unless libraries are executable
-  postinstall_cmds='chmod +x $lib'
-  ;;
-
-sunos4*)
-  version_type=sunos
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix'
-  finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  if test "$with_gnu_ld" = yes; then
-    need_lib_prefix=no
-  fi
-  need_version=yes
-  ;;
-
-sysv4 | sysv4.3*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  case $host_vendor in
-    sni)
-      shlibpath_overrides_runpath=no
-      need_lib_prefix=no
-      runpath_var=LD_RUN_PATH
-      ;;
-    siemens)
-      need_lib_prefix=no
-      ;;
-    motorola)
-      need_lib_prefix=no
-      need_version=no
-      shlibpath_overrides_runpath=no
-      sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib'
-      ;;
-  esac
-  ;;
-
-sysv4*MP*)
-  if test -d /usr/nec ;then
-    version_type=linux # correct to gnu/linux during the next big refactor
-    library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}'
-    soname_spec='$libname${shared_ext}.$major'
-    shlibpath_var=LD_LIBRARY_PATH
-  fi
-  ;;
-
-sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*)
-  version_type=freebsd-elf
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=yes
-  hardcode_into_libs=yes
-  if test "$with_gnu_ld" = yes; then
-    sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib'
-  else
-    sys_lib_search_path_spec='/usr/ccs/lib /usr/lib'
-    case $host_os in
-      sco3.2v5*)
-        sys_lib_search_path_spec="$sys_lib_search_path_spec /lib"
-	;;
-    esac
-  fi
-  sys_lib_dlsearch_path_spec='/usr/lib'
-  ;;
-
-tpf*)
-  # TPF is a cross-target only.  Preferred cross-host = GNU/Linux.
-  version_type=linux # correct to gnu/linux during the next big refactor
-  need_lib_prefix=no
-  need_version=no
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  shlibpath_var=LD_LIBRARY_PATH
-  shlibpath_overrides_runpath=no
-  hardcode_into_libs=yes
-  ;;
-
-uts4*)
-  version_type=linux # correct to gnu/linux during the next big refactor
-  library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}'
-  soname_spec='${libname}${release}${shared_ext}$major'
-  shlibpath_var=LD_LIBRARY_PATH
-  ;;
-
-*)
-  dynamic_linker=no
-  ;;
-esac
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5
-$as_echo "$dynamic_linker" >&6; }
-test "$dynamic_linker" = no && can_build_shared=no
-
-variables_saved_for_relink="PATH $shlibpath_var $runpath_var"
-if test "$GCC" = yes; then
-  variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH"
-fi
-
-if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then
-  sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec"
-fi
-if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then
-  sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec"
-fi
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5
-$as_echo_n "checking how to hardcode library paths into programs... " >&6; }
-hardcode_action_FC=
-if test -n "$hardcode_libdir_flag_spec_FC" ||
-   test -n "$runpath_var_FC" ||
-   test "X$hardcode_automatic_FC" = "Xyes" ; then
-
-  # We can hardcode non-existent directories.
-  if test "$hardcode_direct_FC" != no &&
-     # If the only mechanism to avoid hardcoding is shlibpath_var, we
-     # have to relink, otherwise we might link with an installed library
-     # when we should be linking with a yet-to-be-installed one
-     ## test "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" != no &&
-     test "$hardcode_minus_L_FC" != no; then
-    # Linking always hardcodes the temporary library directory.
-    hardcode_action_FC=relink
-  else
-    # We can link without hardcoding, and we can hardcode nonexisting dirs.
-    hardcode_action_FC=immediate
-  fi
-else
-  # We cannot hardcode anything, or else we can only hardcode existing
-  # directories.
-  hardcode_action_FC=unsupported
-fi
-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_FC" >&5
-$as_echo "$hardcode_action_FC" >&6; }
-
-if test "$hardcode_action_FC" = relink ||
-   test "$inherit_rpath_FC" = yes; then
-  # Fast installation is not supported
-  enable_fast_install=no
-elif test "$shlibpath_overrides_runpath" = yes ||
-     test "$enable_shared" = no; then
-  # Fast installation is not necessary
-  enable_fast_install=needless
-fi
-
-
-
-
-
-
-
-  fi # test -n "$compiler"
-
-  GCC=$lt_save_GCC
-  CC=$lt_save_CC
-  CFLAGS=$lt_save_CFLAGS
-fi # test "$_lt_disable_FC" != yes
-
-ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-
-
-
-
-
-
-
-
-
-        ac_config_commands="$ac_config_commands libtool"
-
-
-
-
-# Only expand once:
-
-
-
-ac_config_files="$ac_config_files Makefile lib/Makefile lib/xmlFailures/Makefile lib/xmlSuccesses/Makefile prog/Makefile"
-
-cat >confcache <<\_ACEOF
-# This file is a shell script that caches the results of configure
-# tests run on this system so they can be shared between configure
-# scripts and configure runs, see configure's option --config-cache.
-# It is not useful on other systems.  If it contains results you don't
-# want to keep, you may remove or edit it.
-#
-# config.status only pays attention to the cache file if you give it
-# the --recheck option to rerun configure.
-#
-# `ac_cv_env_foo' variables (set or unset) will be overridden when
-# loading this file, other *unset* `ac_cv_foo' will be assigned the
-# following values.
-
-_ACEOF
-
-# The following way of writing the cache mishandles newlines in values,
-# but we know of no workaround that is simple, portable, and efficient.
-# So, we kill variables containing newlines.
-# Ultrix sh set writes to stderr and can't be redirected directly,
-# and sets the high bit in the cache file unless we assign to the vars.
-(
-  for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do
-    eval ac_val=\$$ac_var
-    case $ac_val in #(
-    *${as_nl}*)
-      case $ac_var in #(
-      *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5
-$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;;
-      esac
-      case $ac_var in #(
-      _ | IFS | as_nl) ;; #(
-      BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #(
-      *) { eval $ac_var=; unset $ac_var;} ;;
-      esac ;;
-    esac
-  done
-
-  (set) 2>&1 |
-    case $as_nl`(ac_space=' '; set) 2>&1` in #(
-    *${as_nl}ac_space=\ *)
-      # `set' does not quote correctly, so add quotes: double-quote
-      # substitution turns \\\\ into \\, and sed turns \\ into \.
-      sed -n \
-	"s/'/'\\\\''/g;
-	  s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p"
-      ;; #(
-    *)
-      # `set' quotes correctly as required by POSIX, so do not add quotes.
-      sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p"
-      ;;
-    esac |
-    sort
-) |
-  sed '
-     /^ac_cv_env_/b end
-     t clear
-     :clear
-     s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/
-     t end
-     s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/
-     :end' >>confcache
-if diff "$cache_file" confcache >/dev/null 2>&1; then :; else
-  if test -w "$cache_file"; then
-    if test "x$cache_file" != "x/dev/null"; then
-      { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5
-$as_echo "$as_me: updating cache $cache_file" >&6;}
-      if test ! -f "$cache_file" || test -h "$cache_file"; then
-	cat confcache >"$cache_file"
-      else
-        case $cache_file in #(
-        */* | ?:*)
-	  mv -f confcache "$cache_file"$$ &&
-	  mv -f "$cache_file"$$ "$cache_file" ;; #(
-        *)
-	  mv -f confcache "$cache_file" ;;
-	esac
-      fi
-    fi
-  else
-    { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5
-$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;}
-  fi
-fi
-rm -f confcache
-
-test "x$prefix" = xNONE && prefix=$ac_default_prefix
-# Let make expand exec_prefix.
-test "x$exec_prefix" = xNONE && exec_prefix='${prefix}'
-
-DEFS=-DHAVE_CONFIG_H
-
-ac_libobjs=
-ac_ltlibobjs=
-U=
-for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue
-  # 1. Remove the extension, and $U if already installed.
-  ac_script='s/\$U\././;s/\.o$//;s/\.obj$//'
-  ac_i=`$as_echo "$ac_i" | sed "$ac_script"`
-  # 2. Prepend LIBOBJDIR.  When used with automake>=1.10 LIBOBJDIR
-  #    will be set to the directory where LIBOBJS objects are built.
-  as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext"
-  as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo'
-done
-LIBOBJS=$ac_libobjs
-
-LTLIBOBJS=$ac_ltlibobjs
-
-
- if test -n "$EXEEXT"; then
-  am__EXEEXT_TRUE=
-  am__EXEEXT_FALSE='#'
-else
-  am__EXEEXT_TRUE='#'
-  am__EXEEXT_FALSE=
-fi
-
-if test -z "${DEBUG_TRUE}" && test -z "${DEBUG_FALSE}"; then
-  as_fn_error $? "conditional \"DEBUG\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then
-  as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then
-  as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then
-  as_fn_error $? "conditional \"AMDEP\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then
-  as_fn_error $? "conditional \"am__fastdepCC\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-if test -z "${HAVE_CUNIT_TRUE}" && test -z "${HAVE_CUNIT_FALSE}"; then
-  as_fn_error $? "conditional \"HAVE_CUNIT\" was never defined.
-Usually this means the macro was only invoked conditionally." "$LINENO" 5
-fi
-
-: "${CONFIG_STATUS=./config.status}"
-ac_write_fail=0
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files $CONFIG_STATUS"
-{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5
-$as_echo "$as_me: creating $CONFIG_STATUS" >&6;}
-as_write_fail=0
-cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1
-#! $SHELL
-# Generated by $as_me.
-# Run this file to recreate the current configuration.
-# Compiler output produced by configure, useful for debugging
-# configure, is in config.log if it exists.
-
-debug=false
-ac_cs_recheck=false
-ac_cs_silent=false
-
-SHELL=\${CONFIG_SHELL-$SHELL}
-export SHELL
-_ASEOF
-cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1
-## -------------------- ##
-## M4sh Initialization. ##
-## -------------------- ##
-
-# Be more Bourne compatible
-DUALCASE=1; export DUALCASE # for MKS sh
-if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then :
-  emulate sh
-  NULLCMD=:
-  # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which
-  # is contrary to our usage.  Disable this feature.
-  alias -g '${1+"$@"}'='"$@"'
-  setopt NO_GLOB_SUBST
-else
-  case `(set -o) 2>/dev/null` in #(
-  *posix*) :
-    set -o posix ;; #(
-  *) :
-     ;;
-esac
-fi
-
-
-as_nl='
-'
-export as_nl
-# Printing a long string crashes Solaris 7 /usr/bin/printf.
-as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\'
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo
-as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo
-# Prefer a ksh shell builtin over an external printf program on Solaris,
-# but without wasting forks for bash or zsh.
-if test -z "$BASH_VERSION$ZSH_VERSION" \
-    && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='print -r --'
-  as_echo_n='print -rn --'
-elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then
-  as_echo='printf %s\n'
-  as_echo_n='printf %s'
-else
-  if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then
-    as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"'
-    as_echo_n='/usr/ucb/echo -n'
-  else
-    as_echo_body='eval expr "X$1" : "X\\(.*\\)"'
-    as_echo_n_body='eval
-      arg=$1;
-      case $arg in #(
-      *"$as_nl"*)
-	expr "X$arg" : "X\\(.*\\)$as_nl";
-	arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;;
-      esac;
-      expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl"
-    '
-    export as_echo_n_body
-    as_echo_n='sh -c $as_echo_n_body as_echo'
-  fi
-  export as_echo_body
-  as_echo='sh -c $as_echo_body as_echo'
-fi
-
-# The user is always right.
-if test "${PATH_SEPARATOR+set}" != set; then
-  PATH_SEPARATOR=:
-  (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && {
-    (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 ||
-      PATH_SEPARATOR=';'
-  }
-fi
-
-
-# IFS
-# We need space, tab and new line, in precisely that order.  Quoting is
-# there to prevent editors from complaining about space-tab.
-# (If _AS_PATH_WALK were called with IFS unset, it would disable word
-# splitting by setting IFS to empty value.)
-IFS=" ""	$as_nl"
-
-# Find who we are.  Look in the path if we contain no directory separator.
-as_myself=
-case $0 in #((
-  *[\\/]* ) as_myself=$0 ;;
-  *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-    test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break
-  done
-IFS=$as_save_IFS
-
-     ;;
-esac
-# We did not find ourselves, most probably we were run as `sh COMMAND'
-# in which case we are not to be found in the path.
-if test "x$as_myself" = x; then
-  as_myself=$0
-fi
-if test ! -f "$as_myself"; then
-  $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2
-  exit 1
-fi
-
-# Unset variables that we do not need and which cause bugs (e.g. in
-# pre-3.0 UWIN ksh).  But do not cause bugs in bash 2.01; the "|| exit 1"
-# suppresses any "Segmentation fault" message there.  '((' could
-# trigger a bug in pdksh 5.2.14.
-for as_var in BASH_ENV ENV MAIL MAILPATH
-do eval test x\${$as_var+set} = xset \
-  && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || :
-done
-PS1='$ '
-PS2='> '
-PS4='+ '
-
-# NLS nuisances.
-LC_ALL=C
-export LC_ALL
-LANGUAGE=C
-export LANGUAGE
-
-# CDPATH.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-
-# as_fn_error STATUS ERROR [LINENO LOG_FD]
-# ----------------------------------------
-# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are
-# provided, also output the error to LOG_FD, referencing LINENO. Then exit the
-# script with STATUS, using 1 if that was 0.
-as_fn_error ()
-{
-  as_status=$1; test $as_status -eq 0 && as_status=1
-  if test "$4"; then
-    as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack
-    $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4
-  fi
-  $as_echo "$as_me: error: $2" >&2
-  as_fn_exit $as_status
-} # as_fn_error
-
-
-# as_fn_set_status STATUS
-# -----------------------
-# Set $? to STATUS, without forking.
-as_fn_set_status ()
-{
-  return $1
-} # as_fn_set_status
-
-# as_fn_exit STATUS
-# -----------------
-# Exit the shell with STATUS, even in a "trap 0" or "set -e" context.
-as_fn_exit ()
-{
-  set +e
-  as_fn_set_status $1
-  exit $1
-} # as_fn_exit
-
-# as_fn_unset VAR
-# ---------------
-# Portably unset VAR.
-as_fn_unset ()
-{
-  { eval $1=; unset $1;}
-}
-as_unset=as_fn_unset
-# as_fn_append VAR VALUE
-# ----------------------
-# Append the text in VALUE to the end of the definition contained in VAR. Take
-# advantage of any shell optimizations that allow amortized linear growth over
-# repeated appends, instead of the typical quadratic growth present in naive
-# implementations.
-if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then :
-  eval 'as_fn_append ()
-  {
-    eval $1+=\$2
-  }'
-else
-  as_fn_append ()
-  {
-    eval $1=\$$1\$2
-  }
-fi # as_fn_append
-
-# as_fn_arith ARG...
-# ------------------
-# Perform arithmetic evaluation on the ARGs, and store the result in the
-# global $as_val. Take advantage of shells that can avoid forks. The arguments
-# must be portable across $(()) and expr.
-if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then :
-  eval 'as_fn_arith ()
-  {
-    as_val=$(( $* ))
-  }'
-else
-  as_fn_arith ()
-  {
-    as_val=`expr "$@" || test $? -eq 1`
-  }
-fi # as_fn_arith
-
-
-if expr a : '\(a\)' >/dev/null 2>&1 &&
-   test "X`expr 00001 : '.*\(...\)'`" = X001; then
-  as_expr=expr
-else
-  as_expr=false
-fi
-
-if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then
-  as_basename=basename
-else
-  as_basename=false
-fi
-
-if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then
-  as_dirname=dirname
-else
-  as_dirname=false
-fi
-
-as_me=`$as_basename -- "$0" ||
-$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \
-	 X"$0" : 'X\(//\)$' \| \
-	 X"$0" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X/"$0" |
-    sed '/^.*\/\([^/][^/]*\)\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\/\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-
-# Avoid depending upon Character Ranges.
-as_cr_letters='abcdefghijklmnopqrstuvwxyz'
-as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ'
-as_cr_Letters=$as_cr_letters$as_cr_LETTERS
-as_cr_digits='0123456789'
-as_cr_alnum=$as_cr_Letters$as_cr_digits
-
-ECHO_C= ECHO_N= ECHO_T=
-case `echo -n x` in #(((((
--n*)
-  case `echo 'xy\c'` in
-  *c*) ECHO_T='	';;	# ECHO_T is single tab character.
-  xy)  ECHO_C='\c';;
-  *)   echo `echo ksh88 bug on AIX 6.1` > /dev/null
-       ECHO_T='	';;
-  esac;;
-*)
-  ECHO_N='-n';;
-esac
-
-rm -f conf$$ conf$$.exe conf$$.file
-if test -d conf$$.dir; then
-  rm -f conf$$.dir/conf$$.file
-else
-  rm -f conf$$.dir
-  mkdir conf$$.dir 2>/dev/null
-fi
-if (echo >conf$$.file) 2>/dev/null; then
-  if ln -s conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s='ln -s'
-    # ... but there are two gotchas:
-    # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail.
-    # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable.
-    # In both cases, we have to default to `cp -p'.
-    ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe ||
-      as_ln_s='cp -p'
-  elif ln conf$$.file conf$$ 2>/dev/null; then
-    as_ln_s=ln
-  else
-    as_ln_s='cp -p'
-  fi
-else
-  as_ln_s='cp -p'
-fi
-rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file
-rmdir conf$$.dir 2>/dev/null
-
-
-# as_fn_mkdir_p
-# -------------
-# Create "$as_dir" as a directory, including parents if necessary.
-as_fn_mkdir_p ()
-{
-
-  case $as_dir in #(
-  -*) as_dir=./$as_dir;;
-  esac
-  test -d "$as_dir" || eval $as_mkdir_p || {
-    as_dirs=
-    while :; do
-      case $as_dir in #(
-      *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'(
-      *) as_qdir=$as_dir;;
-      esac
-      as_dirs="'$as_qdir' $as_dirs"
-      as_dir=`$as_dirname -- "$as_dir" ||
-$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$as_dir" : 'X\(//\)[^/]' \| \
-	 X"$as_dir" : 'X\(//\)$' \| \
-	 X"$as_dir" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$as_dir" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      test -d "$as_dir" && break
-    done
-    test -z "$as_dirs" || eval "mkdir $as_dirs"
-  } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir"
-
-
-} # as_fn_mkdir_p
-if mkdir -p . 2>/dev/null; then
-  as_mkdir_p='mkdir -p "$as_dir"'
-else
-  test -d ./-p && rmdir ./-p
-  as_mkdir_p=false
-fi
-
-if test -x / >/dev/null 2>&1; then
-  as_test_x='test -x'
-else
-  if ls -dL / >/dev/null 2>&1; then
-    as_ls_L_option=L
-  else
-    as_ls_L_option=
-  fi
-  as_test_x='
-    eval sh -c '\''
-      if test -d "$1"; then
-	test -d "$1/.";
-      else
-	case $1 in #(
-	-*)set "./$1";;
-	esac;
-	case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #((
-	???[sx]*):;;*)false;;esac;fi
-    '\'' sh
-  '
-fi
-as_executable_p=$as_test_x
-
-# Sed expression to map a string onto a valid CPP name.
-as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'"
-
-# Sed expression to map a string onto a valid variable name.
-as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'"
-
-
-exec 6>&1
-## ----------------------------------- ##
-## Main body of $CONFIG_STATUS script. ##
-## ----------------------------------- ##
-_ASEOF
-test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# Save the log message, to keep $0 and so on meaningful, and to
-# report actual input values of CONFIG_FILES etc. instead of their
-# values after options handling.
-ac_log="
-This file was extended by UDUNITS $as_me 2.2.17, which was
-generated by GNU Autoconf 2.68.  Invocation command line was
-
-  CONFIG_FILES    = $CONFIG_FILES
-  CONFIG_HEADERS  = $CONFIG_HEADERS
-  CONFIG_LINKS    = $CONFIG_LINKS
-  CONFIG_COMMANDS = $CONFIG_COMMANDS
-  $ $0 $@
-
-on `(hostname || uname -n) 2>/dev/null | sed 1q`
-"
-
-_ACEOF
-
-case $ac_config_files in *"
-"*) set x $ac_config_files; shift; ac_config_files=$*;;
-esac
-
-case $ac_config_headers in *"
-"*) set x $ac_config_headers; shift; ac_config_headers=$*;;
-esac
-
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-# Files that config.status was made for.
-config_files="$ac_config_files"
-config_headers="$ac_config_headers"
-config_commands="$ac_config_commands"
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-ac_cs_usage="\
-\`$as_me' instantiates files and other configuration actions
-from templates according to the current configuration.  Unless the files
-and actions are specified as TAGs, all are instantiated by default.
-
-Usage: $0 [OPTION]... [TAG]...
-
-  -h, --help       print this help, then exit
-  -V, --version    print version number and configuration settings, then exit
-      --config     print configuration, then exit
-  -q, --quiet, --silent
-                   do not print progress messages
-  -d, --debug      don't remove temporary files
-      --recheck    update $as_me by reconfiguring in the same conditions
-      --file=FILE[:TEMPLATE]
-                   instantiate the configuration file FILE
-      --header=FILE[:TEMPLATE]
-                   instantiate the configuration header FILE
-
-Configuration files:
-$config_files
-
-Configuration headers:
-$config_headers
-
-Configuration commands:
-$config_commands
-
-Report bugs to <support-udunits@unidata.ucar.edu>."
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
-ac_cs_version="\\
-UDUNITS config.status 2.2.17
-configured by $0, generated by GNU Autoconf 2.68,
-  with options \\"\$ac_cs_config\\"
-
-Copyright (C) 2010 Free Software Foundation, Inc.
-This config.status script is free software; the Free Software Foundation
-gives unlimited permission to copy, distribute and modify it."
-
-ac_pwd='$ac_pwd'
-srcdir='$srcdir'
-INSTALL='$INSTALL'
-MKDIR_P='$MKDIR_P'
-AWK='$AWK'
-test -n "\$AWK" || AWK=awk
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# The default lists apply if the user does not specify any file.
-ac_need_defaults=:
-while test $# != 0
-do
-  case $1 in
-  --*=?*)
-    ac_option=`expr "X$1" : 'X\([^=]*\)='`
-    ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'`
-    ac_shift=:
-    ;;
-  --*=)
-    ac_option=`expr "X$1" : 'X\([^=]*\)='`
-    ac_optarg=
-    ac_shift=:
-    ;;
-  *)
-    ac_option=$1
-    ac_optarg=$2
-    ac_shift=shift
-    ;;
-  esac
-
-  case $ac_option in
-  # Handling of the options.
-  -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r)
-    ac_cs_recheck=: ;;
-  --version | --versio | --versi | --vers | --ver | --ve | --v | -V )
-    $as_echo "$ac_cs_version"; exit ;;
-  --config | --confi | --conf | --con | --co | --c )
-    $as_echo "$ac_cs_config"; exit ;;
-  --debug | --debu | --deb | --de | --d | -d )
-    debug=: ;;
-  --file | --fil | --fi | --f )
-    $ac_shift
-    case $ac_optarg in
-    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    '') as_fn_error $? "missing file argument" ;;
-    esac
-    as_fn_append CONFIG_FILES " '$ac_optarg'"
-    ac_need_defaults=false;;
-  --header | --heade | --head | --hea )
-    $ac_shift
-    case $ac_optarg in
-    *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;;
-    esac
-    as_fn_append CONFIG_HEADERS " '$ac_optarg'"
-    ac_need_defaults=false;;
-  --he | --h)
-    # Conflict between --help and --header
-    as_fn_error $? "ambiguous option: \`$1'
-Try \`$0 --help' for more information.";;
-  --help | --hel | -h )
-    $as_echo "$ac_cs_usage"; exit ;;
-  -q | -quiet | --quiet | --quie | --qui | --qu | --q \
-  | -silent | --silent | --silen | --sile | --sil | --si | --s)
-    ac_cs_silent=: ;;
-
-  # This is an error.
-  -*) as_fn_error $? "unrecognized option: \`$1'
-Try \`$0 --help' for more information." ;;
-
-  *) as_fn_append ac_config_targets " $1"
-     ac_need_defaults=false ;;
-
-  esac
-  shift
-done
-
-ac_configure_extra_args=
-
-if $ac_cs_silent; then
-  exec 6>/dev/null
-  ac_configure_extra_args="$ac_configure_extra_args --silent"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-if \$ac_cs_recheck; then
-  set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion
-  shift
-  \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6
-  CONFIG_SHELL='$SHELL'
-  export CONFIG_SHELL
-  exec "\$@"
-fi
-
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-exec 5>>config.log
-{
-  echo
-  sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX
-## Running $as_me. ##
-_ASBOX
-  $as_echo "$ac_log"
-} >&5
-
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-#
-# INIT-COMMANDS
-#
-AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"
-
-
-# The HP-UX ksh and POSIX shell print the target directory to stdout
-# if CDPATH is set.
-(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
-
-sed_quote_subst='$sed_quote_subst'
-double_quote_subst='$double_quote_subst'
-delay_variable_subst='$delay_variable_subst'
-macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`'
-macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`'
-enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`'
-enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`'
-pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`'
-enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`'
-SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`'
-ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`'
-PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`'
-host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`'
-host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`'
-host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`'
-build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`'
-build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`'
-build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`'
-SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`'
-Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`'
-GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`'
-EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`'
-FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`'
-LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`'
-NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`'
-LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`'
-max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`'
-ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`'
-exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`'
-lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`'
-lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`'
-lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`'
-lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`'
-lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`'
-reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`'
-reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`'
-OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`'
-deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`'
-file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`'
-file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`'
-want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`'
-DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`'
-sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`'
-AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`'
-AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`'
-archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`'
-STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`'
-RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`'
-old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
-old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`'
-lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`'
-CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`'
-CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`'
-compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`'
-GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`'
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`'
-nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`'
-lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`'
-objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`'
-MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`'
-need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`'
-MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`'
-DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`'
-NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`'
-LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`'
-OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`'
-OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`'
-libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`'
-shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`'
-extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
-archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`'
-export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`'
-whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`'
-compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`'
-old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`'
-archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`'
-archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`'
-module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`'
-module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`'
-with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`'
-allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`'
-no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`'
-hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`'
-hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`'
-hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`'
-hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`'
-inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`'
-link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`'
-always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`'
-export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`'
-exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`'
-include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`'
-prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`'
-postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`'
-file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`'
-variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`'
-need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`'
-need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`'
-version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`'
-runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`'
-shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`'
-shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`'
-libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`'
-library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`'
-soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`'
-install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`'
-postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`'
-postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`'
-finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`'
-finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`'
-hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`'
-sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`'
-sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`'
-hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`'
-enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`'
-enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`'
-enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`'
-old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`'
-striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`'
-predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`'
-postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`'
-predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`'
-postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`'
-LD_FC='`$ECHO "$LD_FC" | $SED "$delay_single_quote_subst"`'
-reload_flag_FC='`$ECHO "$reload_flag_FC" | $SED "$delay_single_quote_subst"`'
-reload_cmds_FC='`$ECHO "$reload_cmds_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_cmds_FC='`$ECHO "$old_archive_cmds_FC" | $SED "$delay_single_quote_subst"`'
-compiler_FC='`$ECHO "$compiler_FC" | $SED "$delay_single_quote_subst"`'
-GCC_FC='`$ECHO "$GCC_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_no_builtin_flag_FC='`$ECHO "$lt_prog_compiler_no_builtin_flag_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_pic_FC='`$ECHO "$lt_prog_compiler_pic_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_wl_FC='`$ECHO "$lt_prog_compiler_wl_FC" | $SED "$delay_single_quote_subst"`'
-lt_prog_compiler_static_FC='`$ECHO "$lt_prog_compiler_static_FC" | $SED "$delay_single_quote_subst"`'
-lt_cv_prog_compiler_c_o_FC='`$ECHO "$lt_cv_prog_compiler_c_o_FC" | $SED "$delay_single_quote_subst"`'
-archive_cmds_need_lc_FC='`$ECHO "$archive_cmds_need_lc_FC" | $SED "$delay_single_quote_subst"`'
-enable_shared_with_static_runtimes_FC='`$ECHO "$enable_shared_with_static_runtimes_FC" | $SED "$delay_single_quote_subst"`'
-export_dynamic_flag_spec_FC='`$ECHO "$export_dynamic_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-whole_archive_flag_spec_FC='`$ECHO "$whole_archive_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-compiler_needs_object_FC='`$ECHO "$compiler_needs_object_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_from_new_cmds_FC='`$ECHO "$old_archive_from_new_cmds_FC" | $SED "$delay_single_quote_subst"`'
-old_archive_from_expsyms_cmds_FC='`$ECHO "$old_archive_from_expsyms_cmds_FC" | $SED "$delay_single_quote_subst"`'
-archive_cmds_FC='`$ECHO "$archive_cmds_FC" | $SED "$delay_single_quote_subst"`'
-archive_expsym_cmds_FC='`$ECHO "$archive_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`'
-module_cmds_FC='`$ECHO "$module_cmds_FC" | $SED "$delay_single_quote_subst"`'
-module_expsym_cmds_FC='`$ECHO "$module_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`'
-with_gnu_ld_FC='`$ECHO "$with_gnu_ld_FC" | $SED "$delay_single_quote_subst"`'
-allow_undefined_flag_FC='`$ECHO "$allow_undefined_flag_FC" | $SED "$delay_single_quote_subst"`'
-no_undefined_flag_FC='`$ECHO "$no_undefined_flag_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_flag_spec_FC='`$ECHO "$hardcode_libdir_flag_spec_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_libdir_separator_FC='`$ECHO "$hardcode_libdir_separator_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_FC='`$ECHO "$hardcode_direct_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_direct_absolute_FC='`$ECHO "$hardcode_direct_absolute_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_minus_L_FC='`$ECHO "$hardcode_minus_L_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_shlibpath_var_FC='`$ECHO "$hardcode_shlibpath_var_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_automatic_FC='`$ECHO "$hardcode_automatic_FC" | $SED "$delay_single_quote_subst"`'
-inherit_rpath_FC='`$ECHO "$inherit_rpath_FC" | $SED "$delay_single_quote_subst"`'
-link_all_deplibs_FC='`$ECHO "$link_all_deplibs_FC" | $SED "$delay_single_quote_subst"`'
-always_export_symbols_FC='`$ECHO "$always_export_symbols_FC" | $SED "$delay_single_quote_subst"`'
-export_symbols_cmds_FC='`$ECHO "$export_symbols_cmds_FC" | $SED "$delay_single_quote_subst"`'
-exclude_expsyms_FC='`$ECHO "$exclude_expsyms_FC" | $SED "$delay_single_quote_subst"`'
-include_expsyms_FC='`$ECHO "$include_expsyms_FC" | $SED "$delay_single_quote_subst"`'
-prelink_cmds_FC='`$ECHO "$prelink_cmds_FC" | $SED "$delay_single_quote_subst"`'
-postlink_cmds_FC='`$ECHO "$postlink_cmds_FC" | $SED "$delay_single_quote_subst"`'
-file_list_spec_FC='`$ECHO "$file_list_spec_FC" | $SED "$delay_single_quote_subst"`'
-hardcode_action_FC='`$ECHO "$hardcode_action_FC" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_dirs_FC='`$ECHO "$compiler_lib_search_dirs_FC" | $SED "$delay_single_quote_subst"`'
-predep_objects_FC='`$ECHO "$predep_objects_FC" | $SED "$delay_single_quote_subst"`'
-postdep_objects_FC='`$ECHO "$postdep_objects_FC" | $SED "$delay_single_quote_subst"`'
-predeps_FC='`$ECHO "$predeps_FC" | $SED "$delay_single_quote_subst"`'
-postdeps_FC='`$ECHO "$postdeps_FC" | $SED "$delay_single_quote_subst"`'
-compiler_lib_search_path_FC='`$ECHO "$compiler_lib_search_path_FC" | $SED "$delay_single_quote_subst"`'
-
-LTCC='$LTCC'
-LTCFLAGS='$LTCFLAGS'
-compiler='$compiler_DEFAULT'
-
-# A function that is used when there is no print builtin or printf.
-func_fallback_echo ()
-{
-  eval 'cat <<_LTECHO_EOF
-\$1
-_LTECHO_EOF'
-}
-
-# Quote evaled strings.
-for var in SHELL \
-ECHO \
-PATH_SEPARATOR \
-SED \
-GREP \
-EGREP \
-FGREP \
-LD \
-NM \
-LN_S \
-lt_SP2NL \
-lt_NL2SP \
-reload_flag \
-OBJDUMP \
-deplibs_check_method \
-file_magic_cmd \
-file_magic_glob \
-want_nocaseglob \
-DLLTOOL \
-sharedlib_from_linklib_cmd \
-AR \
-AR_FLAGS \
-archiver_list_spec \
-STRIP \
-RANLIB \
-CC \
-CFLAGS \
-compiler \
-lt_cv_sys_global_symbol_pipe \
-lt_cv_sys_global_symbol_to_cdecl \
-lt_cv_sys_global_symbol_to_c_name_address \
-lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \
-nm_file_list_spec \
-lt_prog_compiler_no_builtin_flag \
-lt_prog_compiler_pic \
-lt_prog_compiler_wl \
-lt_prog_compiler_static \
-lt_cv_prog_compiler_c_o \
-need_locks \
-MANIFEST_TOOL \
-DSYMUTIL \
-NMEDIT \
-LIPO \
-OTOOL \
-OTOOL64 \
-shrext_cmds \
-export_dynamic_flag_spec \
-whole_archive_flag_spec \
-compiler_needs_object \
-with_gnu_ld \
-allow_undefined_flag \
-no_undefined_flag \
-hardcode_libdir_flag_spec \
-hardcode_libdir_separator \
-exclude_expsyms \
-include_expsyms \
-file_list_spec \
-variables_saved_for_relink \
-libname_spec \
-library_names_spec \
-soname_spec \
-install_override_mode \
-finish_eval \
-old_striplib \
-striplib \
-compiler_lib_search_dirs \
-predep_objects \
-postdep_objects \
-predeps \
-postdeps \
-compiler_lib_search_path \
-LD_FC \
-reload_flag_FC \
-compiler_FC \
-lt_prog_compiler_no_builtin_flag_FC \
-lt_prog_compiler_pic_FC \
-lt_prog_compiler_wl_FC \
-lt_prog_compiler_static_FC \
-lt_cv_prog_compiler_c_o_FC \
-export_dynamic_flag_spec_FC \
-whole_archive_flag_spec_FC \
-compiler_needs_object_FC \
-with_gnu_ld_FC \
-allow_undefined_flag_FC \
-no_undefined_flag_FC \
-hardcode_libdir_flag_spec_FC \
-hardcode_libdir_separator_FC \
-exclude_expsyms_FC \
-include_expsyms_FC \
-file_list_spec_FC \
-compiler_lib_search_dirs_FC \
-predep_objects_FC \
-postdep_objects_FC \
-predeps_FC \
-postdeps_FC \
-compiler_lib_search_path_FC; do
-    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
-    *[\\\\\\\`\\"\\\$]*)
-      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\""
-      ;;
-    *)
-      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
-      ;;
-    esac
-done
-
-# Double-quote double-evaled strings.
-for var in reload_cmds \
-old_postinstall_cmds \
-old_postuninstall_cmds \
-old_archive_cmds \
-extract_expsyms_cmds \
-old_archive_from_new_cmds \
-old_archive_from_expsyms_cmds \
-archive_cmds \
-archive_expsym_cmds \
-module_cmds \
-module_expsym_cmds \
-export_symbols_cmds \
-prelink_cmds \
-postlink_cmds \
-postinstall_cmds \
-postuninstall_cmds \
-finish_cmds \
-sys_lib_search_path_spec \
-sys_lib_dlsearch_path_spec \
-reload_cmds_FC \
-old_archive_cmds_FC \
-old_archive_from_new_cmds_FC \
-old_archive_from_expsyms_cmds_FC \
-archive_cmds_FC \
-archive_expsym_cmds_FC \
-module_cmds_FC \
-module_expsym_cmds_FC \
-export_symbols_cmds_FC \
-prelink_cmds_FC \
-postlink_cmds_FC; do
-    case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in
-    *[\\\\\\\`\\"\\\$]*)
-      eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\""
-      ;;
-    *)
-      eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\""
-      ;;
-    esac
-done
-
-ac_aux_dir='$ac_aux_dir'
-xsi_shell='$xsi_shell'
-lt_shell_append='$lt_shell_append'
-
-# See if we are running on zsh, and set the options which allow our
-# commands through without removal of \ escapes INIT.
-if test -n "\${ZSH_VERSION+set}" ; then
-   setopt NO_GLOB_SUBST
-fi
-
-
-    PACKAGE='$PACKAGE'
-    VERSION='$VERSION'
-    TIMESTAMP='$TIMESTAMP'
-    RM='$RM'
-    ofile='$ofile'
-
-
-
-
-
-
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-
-# Handling of arguments.
-for ac_config_target in $ac_config_targets
-do
-  case $ac_config_target in
-    "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;;
-    "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;;
-    "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;;
-    "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;;
-    "lib/Makefile") CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;;
-    "lib/xmlFailures/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlFailures/Makefile" ;;
-    "lib/xmlSuccesses/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlSuccesses/Makefile" ;;
-    "prog/Makefile") CONFIG_FILES="$CONFIG_FILES prog/Makefile" ;;
-
-  *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;;
-  esac
-done
-
-
-# If the user did not use the arguments to specify the items to instantiate,
-# then the envvar interface is used.  Set only those that are not.
-# We use the long form for the default assignment because of an extremely
-# bizarre bug on SunOS 4.1.3.
-if $ac_need_defaults; then
-  test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files
-  test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers
-  test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands
-fi
-
-# Have a temporary directory for convenience.  Make it in the build tree
-# simply because there is no reason against having it here, and in addition,
-# creating and moving files from /tmp can sometimes cause problems.
-# Hook for its removal unless debugging.
-# Note that there is a small window in which the directory will not be cleaned:
-# after its creation but before its name has been assigned to `$tmp'.
-$debug ||
-{
-  tmp= ac_tmp=
-  trap 'exit_status=$?
-  : "${ac_tmp:=$tmp}"
-  { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status
-' 0
-  trap 'as_fn_exit 1' 1 2 13 15
-}
-# Create a (secure) tmp directory for tmp files.
-
-{
-  tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` &&
-  test -d "$tmp"
-}  ||
-{
-  tmp=./conf$$-$RANDOM
-  (umask 077 && mkdir "$tmp")
-} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5
-ac_tmp=$tmp
-
-# Set up the scripts for CONFIG_FILES section.
-# No need to generate them if there are no CONFIG_FILES.
-# This happens for instance with `./config.status config.h'.
-if test -n "$CONFIG_FILES"; then
-
-
-ac_cr=`echo X | tr X '\015'`
-# On cygwin, bash can eat \r inside `` if the user requested igncr.
-# But we know of no other shell where ac_cr would be empty at this
-# point, so we can use a bashism as a fallback.
-if test "x$ac_cr" = x; then
-  eval ac_cr=\$\'\\r\'
-fi
-ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' </dev/null 2>/dev/null`
-if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then
-  ac_cs_awk_cr='\\r'
-else
-  ac_cs_awk_cr=$ac_cr
-fi
-
-echo 'BEGIN {' >"$ac_tmp/subs1.awk" &&
-_ACEOF
-
-
-{
-  echo "cat >conf$$subs.awk <<_ACEOF" &&
-  echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' &&
-  echo "_ACEOF"
-} >conf$$subs.sh ||
-  as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'`
-ac_delim='%!_!# '
-for ac_last_try in false false false false false :; do
-  . ./conf$$subs.sh ||
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-
-  ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X`
-  if test $ac_delim_n = $ac_delim_num; then
-    break
-  elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5
-  else
-    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
-  fi
-done
-rm -f conf$$subs.sh
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK &&
-_ACEOF
-sed -n '
-h
-s/^/S["/; s/!.*/"]=/
-p
-g
-s/^[^!]*!//
-:repl
-t repl
-s/'"$ac_delim"'$//
-t delim
-:nl
-h
-s/\(.\{148\}\)..*/\1/
-t more1
-s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/
-p
-n
-b repl
-:more1
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t nl
-:delim
-h
-s/\(.\{148\}\)..*/\1/
-t more2
-s/["\\]/\\&/g; s/^/"/; s/$/"/
-p
-b
-:more2
-s/["\\]/\\&/g; s/^/"/; s/$/"\\/
-p
-g
-s/.\{148\}//
-t delim
-' <conf$$subs.awk | sed '
-/^[^""]/{
-  N
-  s/\n//
-}
-' >>$CONFIG_STATUS || ac_write_fail=1
-rm -f conf$$subs.awk
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-_ACAWK
-cat >>"\$ac_tmp/subs1.awk" <<_ACAWK &&
-  for (key in S) S_is_set[key] = 1
-  FS = ""
-
-}
-{
-  line = $ 0
-  nfields = split(line, field, "@")
-  substed = 0
-  len = length(field[1])
-  for (i = 2; i < nfields; i++) {
-    key = field[i]
-    keylen = length(key)
-    if (S_is_set[key]) {
-      value = S[key]
-      line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3)
-      len += length(value) + length(field[++i])
-      substed = 1
-    } else
-      len += 1 + keylen
-  }
-
-  print line
-}
-
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then
-  sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g"
-else
-  cat
-fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \
-  || as_fn_error $? "could not setup config files machinery" "$LINENO" 5
-_ACEOF
-
-# VPATH may cause trouble with some makes, so we remove sole $(srcdir),
-# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and
-# trailing colons and then remove the whole line if VPATH becomes empty
-# (actually we leave an empty line to preserve line numbers).
-if test "x$srcdir" = x.; then
-  ac_vpsub='/^[	 ]*VPATH[	 ]*=[	 ]*/{
-h
-s///
-s/^/:/
-s/[	 ]*$/:/
-s/:\$(srcdir):/:/g
-s/:\${srcdir}:/:/g
-s/:@srcdir@:/:/g
-s/^:*//
-s/:*$//
-x
-s/\(=[	 ]*\).*/\1/
-G
-s/\n//
-s/^[^=]*=[	 ]*$//
-}'
-fi
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-fi # test -n "$CONFIG_FILES"
-
-# Set up the scripts for CONFIG_HEADERS section.
-# No need to generate them if there are no CONFIG_HEADERS.
-# This happens for instance with `./config.status Makefile'.
-if test -n "$CONFIG_HEADERS"; then
-cat >"$ac_tmp/defines.awk" <<\_ACAWK ||
-BEGIN {
-_ACEOF
-
-# Transform confdefs.h into an awk script `defines.awk', embedded as
-# here-document in config.status, that substitutes the proper values into
-# config.h.in to produce config.h.
-
-# Create a delimiter string that does not exist in confdefs.h, to ease
-# handling of long lines.
-ac_delim='%!_!# '
-for ac_last_try in false false :; do
-  ac_tt=`sed -n "/$ac_delim/p" confdefs.h`
-  if test -z "$ac_tt"; then
-    break
-  elif $ac_last_try; then
-    as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5
-  else
-    ac_delim="$ac_delim!$ac_delim _$ac_delim!! "
-  fi
-done
-
-# For the awk script, D is an array of macro values keyed by name,
-# likewise P contains macro parameters if any.  Preserve backslash
-# newline sequences.
-
-ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]*
-sed -n '
-s/.\{148\}/&'"$ac_delim"'/g
-t rset
-:rset
-s/^[	 ]*#[	 ]*define[	 ][	 ]*/ /
-t def
-d
-:def
-s/\\$//
-t bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3"/p
-s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2"/p
-d
-:bsnl
-s/["\\]/\\&/g
-s/^ \('"$ac_word_re"'\)\(([^()]*)\)[	 ]*\(.*\)/P["\1"]="\2"\
-D["\1"]=" \3\\\\\\n"\\/p
-t cont
-s/^ \('"$ac_word_re"'\)[	 ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p
-t cont
-d
-:cont
-n
-s/.\{148\}/&'"$ac_delim"'/g
-t clear
-:clear
-s/\\$//
-t bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/"/p
-d
-:bsnlc
-s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p
-b cont
-' <confdefs.h | sed '
-s/'"$ac_delim"'/"\\\
-"/g' >>$CONFIG_STATUS || ac_write_fail=1
-
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-  for (key in D) D_is_set[key] = 1
-  FS = ""
-}
-/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ {
-  line = \$ 0
-  split(line, arg, " ")
-  if (arg[1] == "#") {
-    defundef = arg[2]
-    mac1 = arg[3]
-  } else {
-    defundef = substr(arg[1], 2)
-    mac1 = arg[2]
-  }
-  split(mac1, mac2, "(") #)
-  macro = mac2[1]
-  prefix = substr(line, 1, index(line, defundef) - 1)
-  if (D_is_set[macro]) {
-    # Preserve the white space surrounding the "#".
-    print prefix "define", macro P[macro] D[macro]
-    next
-  } else {
-    # Replace #undef with comments.  This is necessary, for example,
-    # in the case of _POSIX_SOURCE, which is predefined and required
-    # on some systems where configure will not decide to define it.
-    if (defundef == "undef") {
-      print "/*", prefix defundef, macro, "*/"
-      next
-    }
-  }
-}
-{ print }
-_ACAWK
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-  as_fn_error $? "could not setup config headers machinery" "$LINENO" 5
-fi # test -n "$CONFIG_HEADERS"
-
-
-eval set X "  :F $CONFIG_FILES  :H $CONFIG_HEADERS    :C $CONFIG_COMMANDS"
-shift
-for ac_tag
-do
-  case $ac_tag in
-  :[FHLC]) ac_mode=$ac_tag; continue;;
-  esac
-  case $ac_mode$ac_tag in
-  :[FHL]*:*);;
-  :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;;
-  :[FH]-) ac_tag=-:-;;
-  :[FH]*) ac_tag=$ac_tag:$ac_tag.in;;
-  esac
-  ac_save_IFS=$IFS
-  IFS=:
-  set x $ac_tag
-  IFS=$ac_save_IFS
-  shift
-  ac_file=$1
-  shift
-
-  case $ac_mode in
-  :L) ac_source=$1;;
-  :[FH])
-    ac_file_inputs=
-    for ac_f
-    do
-      case $ac_f in
-      -) ac_f="$ac_tmp/stdin";;
-      *) # Look for the file first in the build tree, then in the source tree
-	 # (if the path is not absolute).  The absolute path cannot be DOS-style,
-	 # because $ac_f cannot contain `:'.
-	 test -f "$ac_f" ||
-	   case $ac_f in
-	   [\\/$]*) false;;
-	   *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";;
-	   esac ||
-	   as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;;
-      esac
-      case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac
-      as_fn_append ac_file_inputs " '$ac_f'"
-    done
-
-    # Let's still pretend it is `configure' which instantiates (i.e., don't
-    # use $as_me), people would be surprised to read:
-    #    /* config.h.  Generated by config.status.  */
-    configure_input='Generated from '`
-	  $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g'
-	`' by configure.'
-    if test x"$ac_file" != x-; then
-      configure_input="$ac_file.  $configure_input"
-      { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5
-$as_echo "$as_me: creating $ac_file" >&6;}
-    fi
-    # Neutralize special characters interpreted by sed in replacement strings.
-    case $configure_input in #(
-    *\&* | *\|* | *\\* )
-       ac_sed_conf_input=`$as_echo "$configure_input" |
-       sed 's/[\\\\&|]/\\\\&/g'`;; #(
-    *) ac_sed_conf_input=$configure_input;;
-    esac
-
-    case $ac_tag in
-    *:-:* | *:-) cat >"$ac_tmp/stdin" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;;
-    esac
-    ;;
-  esac
-
-  ac_dir=`$as_dirname -- "$ac_file" ||
-$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$ac_file" : 'X\(//\)[^/]' \| \
-	 X"$ac_file" : 'X\(//\)$' \| \
-	 X"$ac_file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$ac_file" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-  as_dir="$ac_dir"; as_fn_mkdir_p
-  ac_builddir=.
-
-case "$ac_dir" in
-.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;;
-*)
-  ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'`
-  # A ".." for each directory in $ac_dir_suffix.
-  ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'`
-  case $ac_top_builddir_sub in
-  "") ac_top_builddir_sub=. ac_top_build_prefix= ;;
-  *)  ac_top_build_prefix=$ac_top_builddir_sub/ ;;
-  esac ;;
-esac
-ac_abs_top_builddir=$ac_pwd
-ac_abs_builddir=$ac_pwd$ac_dir_suffix
-# for backward compatibility:
-ac_top_builddir=$ac_top_build_prefix
-
-case $srcdir in
-  .)  # We are building in place.
-    ac_srcdir=.
-    ac_top_srcdir=$ac_top_builddir_sub
-    ac_abs_top_srcdir=$ac_pwd ;;
-  [\\/]* | ?:[\\/]* )  # Absolute name.
-    ac_srcdir=$srcdir$ac_dir_suffix;
-    ac_top_srcdir=$srcdir
-    ac_abs_top_srcdir=$srcdir ;;
-  *) # Relative name.
-    ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix
-    ac_top_srcdir=$ac_top_build_prefix$srcdir
-    ac_abs_top_srcdir=$ac_pwd/$srcdir ;;
-esac
-ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix
-
-
-  case $ac_mode in
-  :F)
-  #
-  # CONFIG_FILE
-  #
-
-  case $INSTALL in
-  [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;;
-  *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;;
-  esac
-  ac_MKDIR_P=$MKDIR_P
-  case $MKDIR_P in
-  [\\/$]* | ?:[\\/]* ) ;;
-  */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;;
-  esac
-_ACEOF
-
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-# If the template does not know about datarootdir, expand it.
-# FIXME: This hack should be removed a few years after 2.60.
-ac_datarootdir_hack=; ac_datarootdir_seen=
-ac_sed_dataroot='
-/datarootdir/ {
-  p
-  q
-}
-/@datadir@/p
-/@docdir@/p
-/@infodir@/p
-/@localedir@/p
-/@mandir@/p'
-case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in
-*datarootdir*) ac_datarootdir_seen=yes;;
-*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*)
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5
-$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;}
-_ACEOF
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-  ac_datarootdir_hack='
-  s&@datadir@&$datadir&g
-  s&@docdir@&$docdir&g
-  s&@infodir@&$infodir&g
-  s&@localedir@&$localedir&g
-  s&@mandir@&$mandir&g
-  s&\\\${datarootdir}&$datarootdir&g' ;;
-esac
-_ACEOF
-
-# Neutralize VPATH when `$srcdir' = `.'.
-# Shell code in configure.ac might set extrasub.
-# FIXME: do we really want to maintain this feature?
-cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
-ac_sed_extra="$ac_vpsub
-$extrasub
-_ACEOF
-cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
-:t
-/@[a-zA-Z_][a-zA-Z_0-9]*@/!b
-s|@configure_input@|$ac_sed_conf_input|;t t
-s&@top_builddir@&$ac_top_builddir_sub&;t t
-s&@top_build_prefix@&$ac_top_build_prefix&;t t
-s&@srcdir@&$ac_srcdir&;t t
-s&@abs_srcdir@&$ac_abs_srcdir&;t t
-s&@top_srcdir@&$ac_top_srcdir&;t t
-s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t
-s&@builddir@&$ac_builddir&;t t
-s&@abs_builddir@&$ac_abs_builddir&;t t
-s&@abs_top_builddir@&$ac_abs_top_builddir&;t t
-s&@INSTALL@&$ac_INSTALL&;t t
-s&@MKDIR_P@&$ac_MKDIR_P&;t t
-$ac_datarootdir_hack
-"
-eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \
-  >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-
-test -z "$ac_datarootdir_hack$ac_datarootdir_seen" &&
-  { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } &&
-  { ac_out=`sed -n '/^[	 ]*datarootdir[	 ]*:*=/p' \
-      "$ac_tmp/out"`; test -z "$ac_out"; } &&
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&5
-$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir'
-which seems to be undefined.  Please make sure it is defined" >&2;}
-
-  rm -f "$ac_tmp/stdin"
-  case $ac_file in
-  -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";;
-  *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";;
-  esac \
-  || as_fn_error $? "could not create $ac_file" "$LINENO" 5
- ;;
-  :H)
-  #
-  # CONFIG_HEADER
-  #
-  if test x"$ac_file" != x-; then
-    {
-      $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs"
-    } >"$ac_tmp/config.h" \
-      || as_fn_error $? "could not create $ac_file" "$LINENO" 5
-    if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then
-      { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5
-$as_echo "$as_me: $ac_file is unchanged" >&6;}
-    else
-      rm -f "$ac_file"
-      mv "$ac_tmp/config.h" "$ac_file" \
-	|| as_fn_error $? "could not create $ac_file" "$LINENO" 5
-    fi
-  else
-    $as_echo "/* $configure_input  */" \
-      && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \
-      || as_fn_error $? "could not create -" "$LINENO" 5
-  fi
-# Compute "$ac_file"'s index in $config_headers.
-_am_arg="$ac_file"
-_am_stamp_count=1
-for _am_header in $config_headers :; do
-  case $_am_header in
-    $_am_arg | $_am_arg:* )
-      break ;;
-    * )
-      _am_stamp_count=`expr $_am_stamp_count + 1` ;;
-  esac
-done
-echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" ||
-$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$_am_arg" : 'X\(//\)[^/]' \| \
-	 X"$_am_arg" : 'X\(//\)$' \| \
-	 X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$_am_arg" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`/stamp-h$_am_stamp_count
- ;;
-
-  :C)  { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5
-$as_echo "$as_me: executing $ac_file commands" >&6;}
- ;;
-  esac
-
-
-  case $ac_file$ac_mode in
-    "depfiles":C) test x"$AMDEP_TRUE" != x"" || {
-  # Autoconf 2.62 quotes --file arguments for eval, but not when files
-  # are listed without --file.  Let's play safe and only enable the eval
-  # if we detect the quoting.
-  case $CONFIG_FILES in
-  *\'*) eval set x "$CONFIG_FILES" ;;
-  *)   set x $CONFIG_FILES ;;
-  esac
-  shift
-  for mf
-  do
-    # Strip MF so we end up with the name of the file.
-    mf=`echo "$mf" | sed -e 's/:.*$//'`
-    # Check whether this is an Automake generated Makefile or not.
-    # We used to match only the files named `Makefile.in', but
-    # some people rename them; so instead we look at the file content.
-    # Grep'ing the first line is not enough: some people post-process
-    # each Makefile.in and add a new line on top of each file to say so.
-    # Grep'ing the whole file is not good either: AIX grep has a line
-    # limit of 2048, but all sed's we know have understand at least 4000.
-    if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then
-      dirpart=`$as_dirname -- "$mf" ||
-$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$mf" : 'X\(//\)[^/]' \| \
-	 X"$mf" : 'X\(//\)$' \| \
-	 X"$mf" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$mf" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-    else
-      continue
-    fi
-    # Extract the definition of DEPDIR, am__include, and am__quote
-    # from the Makefile without running `make'.
-    DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"`
-    test -z "$DEPDIR" && continue
-    am__include=`sed -n 's/^am__include = //p' < "$mf"`
-    test -z "am__include" && continue
-    am__quote=`sed -n 's/^am__quote = //p' < "$mf"`
-    # When using ansi2knr, U may be empty or an underscore; expand it
-    U=`sed -n 's/^U = //p' < "$mf"`
-    # Find all dependency output files, they are included files with
-    # $(DEPDIR) in their names.  We invoke sed twice because it is the
-    # simplest approach to changing $(DEPDIR) to its actual value in the
-    # expansion.
-    for file in `sed -n "
-      s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \
-	 sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do
-      # Make sure the directory exists.
-      test -f "$dirpart/$file" && continue
-      fdir=`$as_dirname -- "$file" ||
-$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \
-	 X"$file" : 'X\(//\)[^/]' \| \
-	 X"$file" : 'X\(//\)$' \| \
-	 X"$file" : 'X\(/\)' \| . 2>/dev/null ||
-$as_echo X"$file" |
-    sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)[^/].*/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\/\)$/{
-	    s//\1/
-	    q
-	  }
-	  /^X\(\/\).*/{
-	    s//\1/
-	    q
-	  }
-	  s/.*/./; q'`
-      as_dir=$dirpart/$fdir; as_fn_mkdir_p
-      # echo "creating $dirpart/$file"
-      echo '# dummy' > "$dirpart/$file"
-    done
-  done
-}
- ;;
-    "libtool":C)
-
-    # See if we are running on zsh, and set the options which allow our
-    # commands through without removal of \ escapes.
-    if test -n "${ZSH_VERSION+set}" ; then
-      setopt NO_GLOB_SUBST
-    fi
-
-    cfgfile="${ofile}T"
-    trap "$RM \"$cfgfile\"; exit 1" 1 2 15
-    $RM "$cfgfile"
-
-    cat <<_LT_EOF >> "$cfgfile"
-#! $SHELL
-
-# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services.
-# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION
-# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`:
-# NOTE: Changes made to this file will be lost: look at ltmain.sh.
-#
-#   Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005,
-#                 2006, 2007, 2008, 2009, 2010, 2011 Free Software
-#                 Foundation, Inc.
-#   Written by Gordon Matzigkeit, 1996
-#
-#   This file is part of GNU Libtool.
-#
-# GNU Libtool is free software; you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as
-# published by the Free Software Foundation; either version 2 of
-# the License, or (at your option) any later version.
-#
-# As a special exception to the GNU General Public License,
-# if you distribute this file as part of a program or library that
-# is built using GNU Libtool, you may include this file under the
-# same distribution terms that you use for the rest of that program.
-#
-# GNU Libtool is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with GNU Libtool; see the file COPYING.  If not, a copy
-# can be downloaded from http://www.gnu.org/licenses/gpl.html, or
-# obtained by writing to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-
-
-# The names of the tagged configurations supported by this script.
-available_tags="FC "
-
-# ### BEGIN LIBTOOL CONFIG
-
-# Which release of libtool.m4 was used?
-macro_version=$macro_version
-macro_revision=$macro_revision
-
-# Whether or not to build shared libraries.
-build_libtool_libs=$enable_shared
-
-# Whether or not to build static libraries.
-build_old_libs=$enable_static
-
-# What type of objects to build.
-pic_mode=$pic_mode
-
-# Whether or not to optimize for fast installation.
-fast_install=$enable_fast_install
-
-# Shell to use when invoking shell scripts.
-SHELL=$lt_SHELL
-
-# An echo program that protects backslashes.
-ECHO=$lt_ECHO
-
-# The PATH separator for the build system.
-PATH_SEPARATOR=$lt_PATH_SEPARATOR
-
-# The host system.
-host_alias=$host_alias
-host=$host
-host_os=$host_os
-
-# The build system.
-build_alias=$build_alias
-build=$build
-build_os=$build_os
-
-# A sed program that does not truncate output.
-SED=$lt_SED
-
-# Sed that helps us avoid accidentally triggering echo(1) options like -n.
-Xsed="\$SED -e 1s/^X//"
-
-# A grep program that handles long lines.
-GREP=$lt_GREP
-
-# An ERE matcher.
-EGREP=$lt_EGREP
-
-# A literal string matcher.
-FGREP=$lt_FGREP
-
-# A BSD- or MS-compatible name lister.
-NM=$lt_NM
-
-# Whether we need soft or hard links.
-LN_S=$lt_LN_S
-
-# What is the maximum length of a command?
-max_cmd_len=$max_cmd_len
-
-# Object file suffix (normally "o").
-objext=$ac_objext
-
-# Executable file suffix (normally "").
-exeext=$exeext
-
-# whether the shell understands "unset".
-lt_unset=$lt_unset
-
-# turn spaces into newlines.
-SP2NL=$lt_lt_SP2NL
-
-# turn newlines into spaces.
-NL2SP=$lt_lt_NL2SP
-
-# convert \$build file names to \$host format.
-to_host_file_cmd=$lt_cv_to_host_file_cmd
-
-# convert \$build files to toolchain format.
-to_tool_file_cmd=$lt_cv_to_tool_file_cmd
-
-# An object symbol dumper.
-OBJDUMP=$lt_OBJDUMP
-
-# Method to check whether dependent libraries are shared objects.
-deplibs_check_method=$lt_deplibs_check_method
-
-# Command to use when deplibs_check_method = "file_magic".
-file_magic_cmd=$lt_file_magic_cmd
-
-# How to find potential files when deplibs_check_method = "file_magic".
-file_magic_glob=$lt_file_magic_glob
-
-# Find potential files using nocaseglob when deplibs_check_method = "file_magic".
-want_nocaseglob=$lt_want_nocaseglob
-
-# DLL creation program.
-DLLTOOL=$lt_DLLTOOL
-
-# Command to associate shared and link libraries.
-sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd
-
-# The archiver.
-AR=$lt_AR
-
-# Flags to create an archive.
-AR_FLAGS=$lt_AR_FLAGS
-
-# How to feed a file listing to the archiver.
-archiver_list_spec=$lt_archiver_list_spec
-
-# A symbol stripping program.
-STRIP=$lt_STRIP
-
-# Commands used to install an old-style archive.
-RANLIB=$lt_RANLIB
-old_postinstall_cmds=$lt_old_postinstall_cmds
-old_postuninstall_cmds=$lt_old_postuninstall_cmds
-
-# Whether to use a lock for old archive extraction.
-lock_old_archive_extraction=$lock_old_archive_extraction
-
-# A C compiler.
-LTCC=$lt_CC
-
-# LTCC compiler flags.
-LTCFLAGS=$lt_CFLAGS
-
-# Take the output of nm and produce a listing of raw symbols and C names.
-global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe
-
-# Transform the output of nm in a proper C declaration.
-global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl
-
-# Transform the output of nm in a C name address pair.
-global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address
-
-# Transform the output of nm in a C name address pair when lib prefix is needed.
-global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix
-
-# Specify filename containing input files for \$NM.
-nm_file_list_spec=$lt_nm_file_list_spec
-
-# The root where to search for dependent libraries,and in which our libraries should be installed.
-lt_sysroot=$lt_sysroot
-
-# The name of the directory that contains temporary libtool files.
-objdir=$objdir
-
-# Used to examine libraries when file_magic_cmd begins with "file".
-MAGIC_CMD=$MAGIC_CMD
-
-# Must we lock files when doing compilation?
-need_locks=$lt_need_locks
-
-# Manifest tool.
-MANIFEST_TOOL=$lt_MANIFEST_TOOL
-
-# Tool to manipulate archived DWARF debug symbol files on Mac OS X.
-DSYMUTIL=$lt_DSYMUTIL
-
-# Tool to change global to local symbols on Mac OS X.
-NMEDIT=$lt_NMEDIT
-
-# Tool to manipulate fat objects and archives on Mac OS X.
-LIPO=$lt_LIPO
-
-# ldd/readelf like tool for Mach-O binaries on Mac OS X.
-OTOOL=$lt_OTOOL
-
-# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4.
-OTOOL64=$lt_OTOOL64
-
-# Old archive suffix (normally "a").
-libext=$libext
-
-# Shared library suffix (normally ".so").
-shrext_cmds=$lt_shrext_cmds
-
-# The commands to extract the exported symbol list from a shared archive.
-extract_expsyms_cmds=$lt_extract_expsyms_cmds
-
-# Variables whose values should be saved in libtool wrapper scripts and
-# restored at link time.
-variables_saved_for_relink=$lt_variables_saved_for_relink
-
-# Do we need the "lib" prefix for modules?
-need_lib_prefix=$need_lib_prefix
-
-# Do we need a version for libraries?
-need_version=$need_version
-
-# Library versioning type.
-version_type=$version_type
-
-# Shared library runtime path variable.
-runpath_var=$runpath_var
-
-# Shared library path variable.
-shlibpath_var=$shlibpath_var
-
-# Is shlibpath searched before the hard-coded library search path?
-shlibpath_overrides_runpath=$shlibpath_overrides_runpath
-
-# Format of library name prefix.
-libname_spec=$lt_libname_spec
-
-# List of archive names.  First name is the real one, the rest are links.
-# The last name is the one that the linker finds with -lNAME
-library_names_spec=$lt_library_names_spec
-
-# The coded name of the library, if different from the real name.
-soname_spec=$lt_soname_spec
-
-# Permission mode override for installation of shared libraries.
-install_override_mode=$lt_install_override_mode
-
-# Command to use after installation of a shared archive.
-postinstall_cmds=$lt_postinstall_cmds
-
-# Command to use after uninstallation of a shared archive.
-postuninstall_cmds=$lt_postuninstall_cmds
-
-# Commands used to finish a libtool library installation in a directory.
-finish_cmds=$lt_finish_cmds
-
-# As "finish_cmds", except a single script fragment to be evaled but
-# not shown.
-finish_eval=$lt_finish_eval
-
-# Whether we should hardcode library paths into libraries.
-hardcode_into_libs=$hardcode_into_libs
-
-# Compile-time system search path for libraries.
-sys_lib_search_path_spec=$lt_sys_lib_search_path_spec
-
-# Run-time system search path for libraries.
-sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec
-
-# Whether dlopen is supported.
-dlopen_support=$enable_dlopen
-
-# Whether dlopen of programs is supported.
-dlopen_self=$enable_dlopen_self
-
-# Whether dlopen of statically linked programs is supported.
-dlopen_self_static=$enable_dlopen_self_static
-
-# Commands to strip libraries.
-old_striplib=$lt_old_striplib
-striplib=$lt_striplib
-
-
-# The linker used to build libraries.
-LD=$lt_LD
-
-# How to create reloadable object files.
-reload_flag=$lt_reload_flag
-reload_cmds=$lt_reload_cmds
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds
-
-# A language specific compiler.
-CC=$lt_compiler
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds
-archive_expsym_cmds=$lt_archive_expsym_cmds
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds
-module_expsym_cmds=$lt_module_expsym_cmds
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds
-
-# Commands necessary for finishing linking programs.
-postlink_cmds=$lt_postlink_cmds
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects
-postdep_objects=$lt_postdep_objects
-predeps=$lt_predeps
-postdeps=$lt_postdeps
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path
-
-# ### END LIBTOOL CONFIG
-
-_LT_EOF
-
-  case $host_os in
-  aix3*)
-    cat <<\_LT_EOF >> "$cfgfile"
-# AIX sometimes has problems with the GCC collect2 program.  For some
-# reason, if we set the COLLECT_NAMES environment variable, the problems
-# vanish in a puff of smoke.
-if test "X${COLLECT_NAMES+set}" != Xset; then
-  COLLECT_NAMES=
-  export COLLECT_NAMES
-fi
-_LT_EOF
-    ;;
-  esac
-
-
-ltmain="$ac_aux_dir/ltmain.sh"
-
-
-  # We use sed instead of cat because bash on DJGPP gets confused if
-  # if finds mixed CR/LF and LF-only lines.  Since sed operates in
-  # text mode, it properly converts lines to CR/LF.  This bash problem
-  # is reportedly fixed, but why not run on old versions too?
-  sed '$q' "$ltmain" >> "$cfgfile" \
-     || (rm -f "$cfgfile"; exit 1)
-
-  if test x"$xsi_shell" = xyes; then
-  sed -e '/^func_dirname ()$/,/^} # func_dirname /c\
-func_dirname ()\
-{\
-\    case ${1} in\
-\      */*) func_dirname_result="${1%/*}${2}" ;;\
-\      *  ) func_dirname_result="${3}" ;;\
-\    esac\
-} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_basename ()$/,/^} # func_basename /c\
-func_basename ()\
-{\
-\    func_basename_result="${1##*/}"\
-} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\
-func_dirname_and_basename ()\
-{\
-\    case ${1} in\
-\      */*) func_dirname_result="${1%/*}${2}" ;;\
-\      *  ) func_dirname_result="${3}" ;;\
-\    esac\
-\    func_basename_result="${1##*/}"\
-} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_stripname ()$/,/^} # func_stripname /c\
-func_stripname ()\
-{\
-\    # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\
-\    # positional parameters, so assign one to ordinary parameter first.\
-\    func_stripname_result=${3}\
-\    func_stripname_result=${func_stripname_result#"${1}"}\
-\    func_stripname_result=${func_stripname_result%"${2}"}\
-} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\
-func_split_long_opt ()\
-{\
-\    func_split_long_opt_name=${1%%=*}\
-\    func_split_long_opt_arg=${1#*=}\
-} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\
-func_split_short_opt ()\
-{\
-\    func_split_short_opt_arg=${1#??}\
-\    func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\
-} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\
-func_lo2o ()\
-{\
-\    case ${1} in\
-\      *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\
-\      *)    func_lo2o_result=${1} ;;\
-\    esac\
-} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_xform ()$/,/^} # func_xform /c\
-func_xform ()\
-{\
-    func_xform_result=${1%.*}.lo\
-} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_arith ()$/,/^} # func_arith /c\
-func_arith ()\
-{\
-    func_arith_result=$(( $* ))\
-} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_len ()$/,/^} # func_len /c\
-func_len ()\
-{\
-    func_len_result=${#1}\
-} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-fi
-
-if test x"$lt_shell_append" = xyes; then
-  sed -e '/^func_append ()$/,/^} # func_append /c\
-func_append ()\
-{\
-    eval "${1}+=\\${2}"\
-} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\
-func_append_quoted ()\
-{\
-\    func_quote_for_eval "${2}"\
-\    eval "${1}+=\\\\ \\$func_quote_for_eval_result"\
-} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \
-  && mv -f "$cfgfile.tmp" "$cfgfile" \
-    || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-test 0 -eq $? || _lt_function_replace_fail=:
-
-
-  # Save a `func_append' function call where possible by direct use of '+='
-  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \
-    && mv -f "$cfgfile.tmp" "$cfgfile" \
-      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-  test 0 -eq $? || _lt_function_replace_fail=:
-else
-  # Save a `func_append' function call even when '+=' is not available
-  sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \
-    && mv -f "$cfgfile.tmp" "$cfgfile" \
-      || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp")
-  test 0 -eq $? || _lt_function_replace_fail=:
-fi
-
-if test x"$_lt_function_replace_fail" = x":"; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5
-$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;}
-fi
-
-
-   mv -f "$cfgfile" "$ofile" ||
-    (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile")
-  chmod +x "$ofile"
-
-
-    cat <<_LT_EOF >> "$ofile"
-
-# ### BEGIN LIBTOOL TAG CONFIG: FC
-
-# The linker used to build libraries.
-LD=$lt_LD_FC
-
-# How to create reloadable object files.
-reload_flag=$lt_reload_flag_FC
-reload_cmds=$lt_reload_cmds_FC
-
-# Commands used to build an old-style archive.
-old_archive_cmds=$lt_old_archive_cmds_FC
-
-# A language specific compiler.
-CC=$lt_compiler_FC
-
-# Is the compiler the GNU compiler?
-with_gcc=$GCC_FC
-
-# Compiler flag to turn off builtin functions.
-no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_FC
-
-# Additional compiler flags for building library objects.
-pic_flag=$lt_lt_prog_compiler_pic_FC
-
-# How to pass a linker flag through the compiler.
-wl=$lt_lt_prog_compiler_wl_FC
-
-# Compiler flag to prevent dynamic linking.
-link_static_flag=$lt_lt_prog_compiler_static_FC
-
-# Does compiler simultaneously support -c and -o options?
-compiler_c_o=$lt_lt_cv_prog_compiler_c_o_FC
-
-# Whether or not to add -lc for building shared libraries.
-build_libtool_need_lc=$archive_cmds_need_lc_FC
-
-# Whether or not to disallow shared libs when runtime libs are static.
-allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_FC
-
-# Compiler flag to allow reflexive dlopens.
-export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_FC
-
-# Compiler flag to generate shared objects directly from archives.
-whole_archive_flag_spec=$lt_whole_archive_flag_spec_FC
-
-# Whether the compiler copes with passing no objects directly.
-compiler_needs_object=$lt_compiler_needs_object_FC
-
-# Create an old-style archive from a shared archive.
-old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_FC
-
-# Create a temporary old-style archive to link instead of a shared archive.
-old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_FC
-
-# Commands used to build a shared archive.
-archive_cmds=$lt_archive_cmds_FC
-archive_expsym_cmds=$lt_archive_expsym_cmds_FC
-
-# Commands used to build a loadable module if different from building
-# a shared archive.
-module_cmds=$lt_module_cmds_FC
-module_expsym_cmds=$lt_module_expsym_cmds_FC
-
-# Whether we are building with GNU ld or not.
-with_gnu_ld=$lt_with_gnu_ld_FC
-
-# Flag that allows shared libraries with undefined symbols to be built.
-allow_undefined_flag=$lt_allow_undefined_flag_FC
-
-# Flag that enforces no undefined symbols.
-no_undefined_flag=$lt_no_undefined_flag_FC
-
-# Flag to hardcode \$libdir into a binary during linking.
-# This must work even if \$libdir does not exist
-hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_FC
-
-# Whether we need a single "-rpath" flag with a separated argument.
-hardcode_libdir_separator=$lt_hardcode_libdir_separator_FC
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary.
-hardcode_direct=$hardcode_direct_FC
-
-# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes
-# DIR into the resulting binary and the resulting library dependency is
-# "absolute",i.e impossible to change by setting \${shlibpath_var} if the
-# library is relocated.
-hardcode_direct_absolute=$hardcode_direct_absolute_FC
-
-# Set to "yes" if using the -LDIR flag during linking hardcodes DIR
-# into the resulting binary.
-hardcode_minus_L=$hardcode_minus_L_FC
-
-# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR
-# into the resulting binary.
-hardcode_shlibpath_var=$hardcode_shlibpath_var_FC
-
-# Set to "yes" if building a shared library automatically hardcodes DIR
-# into the library and all subsequent libraries and executables linked
-# against it.
-hardcode_automatic=$hardcode_automatic_FC
-
-# Set to yes if linker adds runtime paths of dependent libraries
-# to runtime path list.
-inherit_rpath=$inherit_rpath_FC
-
-# Whether libtool must link a program against all its dependency libraries.
-link_all_deplibs=$link_all_deplibs_FC
-
-# Set to "yes" if exported symbols are required.
-always_export_symbols=$always_export_symbols_FC
-
-# The commands to list exported symbols.
-export_symbols_cmds=$lt_export_symbols_cmds_FC
-
-# Symbols that should not be listed in the preloaded symbols.
-exclude_expsyms=$lt_exclude_expsyms_FC
-
-# Symbols that must always be exported.
-include_expsyms=$lt_include_expsyms_FC
-
-# Commands necessary for linking programs (against libraries) with templates.
-prelink_cmds=$lt_prelink_cmds_FC
-
-# Commands necessary for finishing linking programs.
-postlink_cmds=$lt_postlink_cmds_FC
-
-# Specify filename containing input files.
-file_list_spec=$lt_file_list_spec_FC
-
-# How to hardcode a shared library path into an executable.
-hardcode_action=$hardcode_action_FC
-
-# The directories searched by this compiler when creating a shared library.
-compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_FC
-
-# Dependencies to place before and after the objects being linked to
-# create a shared library.
-predep_objects=$lt_predep_objects_FC
-postdep_objects=$lt_postdep_objects_FC
-predeps=$lt_predeps_FC
-postdeps=$lt_postdeps_FC
-
-# The library search path used internally by the compiler when linking
-# a shared library.
-compiler_lib_search_path=$lt_compiler_lib_search_path_FC
-
-# ### END LIBTOOL TAG CONFIG: FC
-_LT_EOF
-
- ;;
-
-  esac
-done # for ac_tag
-
-
-as_fn_exit 0
-_ACEOF
-ac_clean_files=$ac_clean_files_save
-
-test $ac_write_fail = 0 ||
-  as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5
-
-
-# configure is writing to config.log, and then calls config.status.
-# config.status does its own redirection, appending to config.log.
-# Unfortunately, on DOS this fails, as config.log is still kept open
-# by configure, so config.status won't be able to write to it; its
-# output is simply discarded.  So we exec the FD to /dev/null,
-# effectively closing config.log, so it can be properly (re)opened and
-# appended to by config.status.  When coming back to configure, we
-# need to make the FD available again.
-if test "$no_create" != yes; then
-  ac_cs_success=:
-  ac_config_status_args=
-  test "$silent" = yes &&
-    ac_config_status_args="$ac_config_status_args --quiet"
-  exec 5>/dev/null
-  $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false
-  exec 5>>config.log
-  # Use ||, not &&, to avoid exiting from the if with $? = 1, which
-  # would make configure fail if this is the last instruction.
-  $ac_cs_success || as_fn_exit 1
-fi
-if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then
-  { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5
-$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;}
-fi
-
diff --git a/CMake/cdat_modules_extra/uvcdat.in b/CMake/cdat_modules_extra/uvcdat.in
deleted file mode 100755
index 8b1a3f238..000000000
--- a/CMake/cdat_modules_extra/uvcdat.in
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-# source is not portable whereas . is
-. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh"
-
-# Used in event of -o "log_location"; grabs the next arg and puts it in target
-capture=false
-# The location we'll be logging to
-target="$HOME/.uvcdat/uvcdatsession.log"
-# Whether or not we're redirecting the stdout/stderr
-redirect=true
-
-for var in "$@"
-do
-    if [ $capture = true ]; then
-        # -o was found, grabbing the next value
-        target=$var
-        if [ "$target" = "" ]; then
-            # This is the way we can redirect output to stdout
-            # Do not redirect output
-            redirect=false
-        fi
-        # Don't need to capture anything else
-        capture=false
-        continue
-    fi
-
-    case $var in
-        # Trigger above block on the next arg
-    -o) capture=true;
-            ;;
-                # Parse the target out of the = section
-    --output=*) target=`sed "s/--output=\(.*\)/\1/" <<< $var`
-        if [ "$target" = "" ]; then
-            # Do not redirect output
-            redirect=false
-        fi
-            ;;
-                  # Do not redirect output
-    --output-std) redirect=false
-            ;;
-            # Shouldn't redirect for help
-    --help) redirect=false
-            ;;
-    *)      ;;
-    esac
-done
-
-if [ $redirect = false ]  ;then
-    python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@"
-else
-    # Replace all uses of ~ with $HOME
-    target="${target/#\~/$HOME}"
-
-    # Check if path exists
-    target_dir="$(dirname $target)"
-    if [ ! -d "$target_dir" ] ;then
-        mkdir -p $target_dir
-    fi
-
-    # Make sure the file exists and that we have write privileges
-    touch $target
-    # Launch with redirection
-    python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" >>$target 2>&1
-fi
diff --git a/CMake/cdat_modules_extra/uvcdat.mac.in b/CMake/cdat_modules_extra/uvcdat.mac.in
deleted file mode 100755
index 14a394f37..000000000
--- a/CMake/cdat_modules_extra/uvcdat.mac.in
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-# source is not portable where as . is
-BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-. $BASEDIR/setup_runtime.sh
-python@PYVER@ $BASEDIR/../vistrails/vistrails/uvcdat.py
-
diff --git a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt b/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt
deleted file mode 100644
index e61b4896d..000000000
--- a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt
+++ /dev/null
@@ -1,232 +0,0 @@
-82848263d3f9032b41bc02f758cb0bed  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-01.nc
-09c2f48312305fef59ee571fe1c3a84a  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-02.nc
-3274cb2d7cccffac20059f564a97998e  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-03.nc
-5e677beb0eccfe8c94ec9e18460c2581  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-04.nc
-cd565477d7d8555566e16bf5ff4bfe44  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-05.nc
-d6038ef39f33b6a6d06a3554531a1ed2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-06.nc
-97bf73768c9f50068ffa7399fc0a1e0a  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-07.nc
-705147cb320524d8257dcee8b450aec3  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-08.nc
-164861198d2cb1897713afbeebf9eb62  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-09.nc
-0b342120b940679cab8a2204e6b9f0d0  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-10.nc
-6a12f3a02fc2607afe871f1d4aff7ea2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-11.nc
-0d642c1b3697ff3c45d07b7a90a07fab  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-12.nc
-8b3e27df842aba7dc88b4c13266cc4ed  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-01.nc
-38a7850265356a9b49ab78172b121927  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-02.nc
-30ab14ec20e9ee54ff9ba3bd0332c490  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-03.nc
-2d4c0cf37429c5a1d97be1acc5b907b1  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-04.nc
-72ed71d9937b77e9c01f35ec3924e478  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-05.nc
-62c85090e8b93a0caedebae52a6feddf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-06.nc
-7c9a70dfc28d7a9eb052f281738adb55  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-07.nc
-d505af09b431fcfb2255fbabcae16ce0  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-08.nc
-2875586ec0f21efd94a4fca640ef7f59  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-09.nc
-6bc3b40018820413633a07c4d8278e50  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-10.nc
-6a56554e98908dbcb1ad04f8129b7e8d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-11.nc
-610ad7ff458a87c863fc2d792e69dc2f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-12.nc
-e17b3f827c0162c246df0a3aabe4ce9d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-01.nc
-79a551fdfb44b88c64fb6552048f4dc5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-02.nc
-e8c38da3ad16c7866b3b3b540647a5da  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-03.nc
-59de1e4fedabf0976590af6e470ceec1  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-04.nc
-147389dbf5bfb479d09a8982d6690e8b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-05.nc
-c6f3843a3f716de98693c11bc807c206  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-06.nc
-be02c6100e317dd037ad0cccf9d8a8cf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-07.nc
-109b769371207a503ac9039b37fd4dad  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-08.nc
-629f86af7dbe6f3b379450f951e3e1b2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-09.nc
-02c3a536f6025ebde38bee00bc69aa09  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-10.nc
-a661f1ce9b87e46865b489fde9752edf  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-11.nc
-7de08765c4e2f9a34e21ba8024453adc  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-12.nc
-28441278df2af93f9ebfa1b51ef21007  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-01.nc
-1576faec4df27627c3eb975e7c6f5fef  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-02.nc
-abccaf09316d0f1705557dd752d359af  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-03.nc
-cde766ef10310253fc3baaa4d5ca8761  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-04.nc
-fd58a1f7d6d2a6037df183e0fca9ff5f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-05.nc
-73a0b57991b798ca2b52e56afcf4f630  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-06.nc
-f45485c533798bb53b4452469a5bc678  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-07.nc
-149bfade64fe7b0b984059954e88ce97  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-08.nc
-ada05ce9162160c9a6c02d9d335c9349  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-09.nc
-aca027b6b88bfa17059ff22945cd393f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-10.nc
-54e738cdb234fcec78d86a49790fafdc  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-11.nc
-151f3e6f7c5a8cbfd31abada8df36dd2  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-12.nc
-91b73bdb596231c604d4c76db55bce5e  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-01.nc
-5446fed21e3700d9d90f212ddbdbedc4  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-02.nc
-8f69e20b5993613eb473a904cb3c5cfd  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-03.nc
-6d984999d23f93c2434960f43381556d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-04.nc
-8be183c391e859bc36a8215f276bdd1b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-05.nc
-6e610ae6499ec706940ce81b3ee5df85  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-06.nc
-2be1078885df583b0a1ee929ef663846  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-07.nc
-493969c7aef835400219722322276ec5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-08.nc
-055d76ef47600f3b0e0142d6cb4db758  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-09.nc
-12ec6242e2e3269b180c4a2367963327  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-10.nc
-a857e9ae0696c33c38171c7d92791181  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-11.nc
-42097c573ac657ec44bde9aabfa98afd  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-12.nc
-b7198ad93b6eae51fcfd49fb3f9877a9  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-01.nc
-09d6b9c23bf272f7ad8e6eba37e45edb  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-02.nc
-b3ab42c5083df9f901dde9c7fe90bf26  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-03.nc
-4a63c5b704fa1e8fefab025c4e8c46aa  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-04.nc
-4608c9358aa5754352eb9b87d85e7a1c  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-05.nc
-4eff1ec373b9beb820e5e1e4113498aa  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-06.nc
-cffdc3aab308d233c956720d80671b95  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-07.nc
-8dfcd2ecac7d37c12ac0adef4825c67f  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-08.nc
-0a196de01ca67ce291a026e755b9921d  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-09.nc
-e6931415ab36579fff13f4933a6bf1f5  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-10.nc
-526fbd9987a6d5faf927106bf048aa2b  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-11.nc
-839301c709e5a7b3eb271e75782979af  acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-12.nc
-6d86e5edd0a92931226ac99d13167980  acme_lores_atm_climo/F1850.g37_bench_ANN_climo.nc
-f3c5c5a4d91d6e3e0cc4d2df362b0503  acme_lores_atm_climo/F1850.g37_bench_DJF_climo.nc
-06e0cc5f50cd7f2c1e2f30c4c4278b47  acme_lores_cam_climo/F1850.g37_bench_ANN_climo.nc
-712d887975121e81168ab03a535cadba  acme_lores_cam_climo/F1850.g37_bench_APR_climo.nc
-e2e5505205c326eea69574226d881359  acme_lores_cam_climo/F1850.g37_bench_DJF_climo.nc
-d36b0d4f7fb27c3897668131bdec05aa  acme_lores_cam_climo/F1850.g37_bench_JAN_climo.nc
-39342297493a616eb8988ef0a3a9c988  acme_lores_cam_climo/F1850.g37_bench_JJA_climo.nc
-0a8cbf9b41f2cc752800a584f6356cbd  acme_lores_cam_climo/F1850.g37_bench_JUL_climo.nc
-7b2da1926acf2c0f9ffad80497775bb6  acme_lores_cam_climo/F1850.g37_bench_MAM_climo.nc
-7df286b070640d0074c556560edc6a73  acme_lores_cam_climo/F1850.g37_bench_SON_climo.nc
-77d7b6de33467bdebe1a05700f03cae7  acme_lores_clm_climo/ANN_climo.nc
-a075f9d88b0b29b9f6a706f56bc628fa  acme_lores_clm_climo/APR_climo.nc
-f0694a365f88bef9f2ae34169afcd99b  acme_lores_clm_climo/AUG_climo.nc
-3928a8108bed42d5035bb9e9ef06a227  acme_lores_clm_climo/DEC_climo.nc
-5cd00312d791f34b1d33ca336d874473  acme_lores_clm_climo/DJF_climo.nc
-5e2849739943108c549c6724c6927ccd  acme_lores_clm_climo/FEB_climo.nc
-4badd8e20c7e45d8156f0677416d4f85  acme_lores_clm_climo/JAN_climo.nc
-bcc44d7f27938f1f21cf3c34d29dfe0d  acme_lores_clm_climo/JJA_climo.nc
-c11b441acebdf5e7dac696485abd31b8  acme_lores_clm_climo/JUL_climo.nc
-1740586484d8e59b18bf97d89658cd97  acme_lores_clm_climo/JUN_climo.nc
-6aca924e7541a42f37c189934912d4bb  acme_lores_clm_climo/MAM_climo.nc
-16c8c8d84c30d2f72b1bafd7929841a5  acme_lores_clm_climo/MAR_climo.nc
-eb483652fc0b0b069761659262d1d111  acme_lores_clm_climo/MAY_climo.nc
-e3e52b82e64357c50fe42aed7e0ba56c  acme_lores_clm_climo/NOV_climo.nc
-8969b2045cd430d03cebaccb91995f3d  acme_lores_clm_climo/OCT_climo.nc
-4a1d44b3ab16645aef032006be8b4af3  acme_lores_clm_climo/SEP_climo.nc
-f57a1c82229d2985894ef643e0392135  acme_lores_clm_climo/SON_climo.nc
-2a40dbd588429cbefb6317fc48076bb9  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-01.nc
-176fbe665aa0ea9ee3ba63d2df780537  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-02.nc
-cc857575c3b7e81520be03a20fd5fc4c  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-03.nc
-1a01b328a240435c32ea7f4dcc880db6  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-04.nc
-14b1ed3abf5c37c7d3611b57111123a8  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-05.nc
-a2cf201b629578dc40a1a6c8c2ebfdd4  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-06.nc
-3ba6118cecded5739d20ef78d2e75458  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-07.nc
-a42132db7da5c17b9a69aee42951ae3d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-08.nc
-ee65c00602bc7e0de884e09be4b2bb1d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-09.nc
-1909f013d84b298eeff19b5250f61daa  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-10.nc
-4b96d62be06f31b8be94388ce59dbeb7  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-11.nc
-486218898744c21420a24ab36121520d  acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-12.nc
-950360fe5f5334d3026ba44850c539a6  cam35_data/cam3_5_01_climo.nc
-fc869f4e9c79960f2f0766905379c4c3  cam35_data/cam3_5_02_climo.nc
-c11b0b3283b726318d84edc8ad042714  cam35_data/cam3_5_03_climo.nc
-4d1bfc12f358026addd34d47eca1b52c  cam35_data/cam3_5_04_climo.nc
-da9d0149d3e81d7bdae96076e07daf39  cam35_data/cam3_5_05_climo.nc
-a5526dbaac0a0da52ca04bc5b9c71c56  cam35_data/cam3_5_06_climo.nc
-00662c2eafcb297cf8aabf8c51456d0b  cam35_data/cam3_5_07_climo.nc
-ba72017189a80edd5181d639ae6204e9  cam35_data/cam3_5_08_climo.nc
-b23c87bbf00d39b0966e3a6d072c0abc  cam35_data/cam3_5_09_climo.nc
-4f5d4e732e97c163f63ed1430858c5e3  cam35_data/cam3_5_10_climo.nc
-6abc0b942e43cf5fbadbead8ea2aac26  cam35_data/cam3_5_11_climo.nc
-c9ecb1cbabcc60196263f0a8b488d1e1  cam35_data/cam3_5_12_climo.nc
-84204a1bc34f41f71ed613278b29a57f  cam35_data_smaller/cam3_5_01_climo.nc
-9fcd1364523a26f4fa833a89fc14bae9  cam35_data_smaller/cam3_5_02_climo.nc
-d53f58834cf9053f3255818e441c735a  cam35_data_smaller/cam3_5_03_climo.nc
-7c848ac7acf21552d93273b0ba4817e5  cam35_data_smaller/cam3_5_04_climo.nc
-96248cd867434a51d160ada6af4d0f4f  cam35_data_smaller/cam3_5_05_climo.nc
-155a163a204538164980a1425f4aa301  cam35_data_smaller/cam3_5_06_climo.nc
-b33bf096521235e9fec1a64479438568  cam35_data_smaller/cam3_5_07_climo.nc
-6fe5fcd5a4221dc4ae711ab6631b9cea  cam35_data_smaller/cam3_5_08_climo.nc
-7f2b52b2807e52ab0cdb94e892cec986  cam35_data_smaller/cam3_5_09_climo.nc
-a5121dec5eb93415d8988fb3ae1f279e  cam35_data_smaller/cam3_5_10_climo.nc
-36183ada10292e09053a6573f0d493b6  cam35_data_smaller/cam3_5_11_climo.nc
-018e37b4e760d92edfafcb035173db3d  cam35_data_smaller/cam3_5_12_climo.nc
-8cd47baae6710a9373ebaba96a6e262b  cam_output/c_t_b30.009.cam2.h0.0600-01.nc
-82731ab10329e5cdacfa78ea3da520f2  cam_output/c_t_b30.009.cam2.h0.0600-02.nc
-146a578b04623773ad0e98e930d1a5e5  cam_output/c_t_b30.009.cam2.h0.0600-03.nc
-e6ce8ea3580b3266bd93fc73dcad9adc  cam_output/c_t_b30.009.cam2.h0.0600-04.nc
-a5698548a26c40c514adcadd9623eb27  cam_output/c_t_b30.009.cam2.h0.0600-05.nc
-848918d62382e94bad56a2cc2cd07fd8  cam_output/c_t_b30.009.cam2.h0.0600-06.nc
-bf447ef80bef314a5e2b2003d741a529  cam_output/c_t_b30.009.cam2.h0.0600-07.nc
-be548db39e7607d4153f73e4b5657aa1  cam_output/c_t_b30.009.cam2.h0.0600-08.nc
-0f7764b3aaf5412bdcd70943129026d6  cam_output/c_t_b30.009.cam2.h0.0600-09.nc
-f0ac64dfbf1e5ccb97a167d0f6c75672  cam_output/c_t_b30.009.cam2.h0.0600-10.nc
-7bf5f3401a0fbe8263bac61ca113e7d8  cam_output/c_t_b30.009.cam2.h0.0600-11.nc
-cf83e939285b29ff808ed41544d7df92  cam_output/c_t_b30.009.cam2.h0.0600-12.nc
-6e8cdaf575f9101921d11c571334842f  cam_output/c_t_b30.009.cam2.h0.0601-01.nc
-999693e6583eb4ed322151b68dda4e72  cam_output/c_t_b30.009.cam2.h0.0601-02.nc
-e6d09f6db4fcf81ce68c935277fb110f  cam_output/c_t_b30.009.cam2.h0.0601-03.nc
-635be9948c7e7cecf82c76f953ed0624  cam_output/c_t_b30.009.cam2.h0.0601-04.nc
-a2c14b3f0602aa9ad3b43316f11ae5ff  cam_output/c_t_b30.009.cam2.h0.0601-05.nc
-fbbb8c51f858fe89f4880a41b5f17d04  cam_output/c_t_b30.009.cam2.h0.0601-06.nc
-1e5b7508a062d6aeb16afbf98045a5de  cam_output/c_t_b30.009.cam2.h0.0601-07.nc
-fc30abee308e251bde7be642fa0c3f7a  cam_output/c_t_b30.009.cam2.h0.0601-08.nc
-beafa07dc0c98b09984fd7830eb99f52  cam_output/c_t_b30.009.cam2.h0.0601-09.nc
-4f36607badf32ee9d2c5234a58e779ad  cam_output/c_t_b30.009.cam2.h0.0601-10.nc
-039b724f844a15b936bfe7ee00e79a6e  cam_output/c_t_b30.009.cam2.h0.0601-11.nc
-da7fb4fcc052983bd7e5ac8a63a6a451  cam_output/c_t_b30.009.cam2.h0.0601-12.nc
-f7a5944e246ca97ec722ed72d2e53315  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-01.nc
-c4ad68141d351aea55ce1e9bf0859798  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-04.nc
-bf0b2ef03cd280f5e635870b2ccda8d9  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-07.nc
-6893d78c8c5541999043f19d2dcee035  model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-10.nc
-e241fc465279e7126e0e59789d9baedf  obs/NCEP_01_climo.nc
-cd1f8016b4f575c4b2a08a69c78b041a  obs/NCEP_02_climo.nc
-fae4c3bfa51707a9329b274e8de3633e  obs/NCEP_03_climo.nc
-49d418806a382eb17ae1c7cfa5295355  obs/NCEP_04_climo.nc
-97fa9532455053987f1a92645c42ef71  obs/NCEP_05_climo.nc
-078fece9cf0a1730ee13a18211cefa05  obs/NCEP_06_climo.nc
-039a8dd4c98b2e2332699e750f72e2b2  obs/NCEP_07_climo.nc
-4bc14d3447ff3af8c0fec1a19c7cd7b3  obs/NCEP_08_climo.nc
-ef45a99e527f5a36b4a145d9919ac628  obs/NCEP_09_climo.nc
-6673a7bcbf1476015dad7b5106a4213f  obs/NCEP_10_climo.nc
-9e86a777517ad6f4b392f7d63d8e98f7  obs/NCEP_11_climo.nc
-5f4ec5821d1ebb9e5a73c9a46666291a  obs/NCEP_12_climo.nc
-578dcbfb4979cd3cbee2bde42a52d5c7  obs/NCEP_ANN_climo.nc
-78c01194a72dc3da7b25c1ce402dfe7b  obs/NCEP_DJF_climo.nc
-dcd392831c5c0628fde4f92e2f704c18  obs/NCEP_JJA_climo.nc
-185a376e3e6403191d42dbef55b72928  obs_atmos/c_CRU_ANN_climo.nc
-9c754380f93e4305c5ed40b67d7282e5  obs_atmos/c_CRU_DJF_climo.nc
-a8b02bd2ea54d089db13005e7a9b4999  obs_atmos/c_CRU_JJA_climo.nc
-ef18dbf141367c0d7cf3990d7e10d64c  obs_atmos/c_t_NCEP_01_climo.nc
-10c09087712b3b283765381c78002154  obs_atmos/c_t_NCEP_02_climo.nc
-3bcec656166614c11ad1f436129b4922  obs_atmos/c_t_NCEP_03_climo.nc
-bf326d77aceedcdf7197b6ca4d7624df  obs_atmos/c_t_NCEP_04_climo.nc
-631dadd9a88b46a47506fa2b2cc0cc1e  obs_atmos/c_t_NCEP_05_climo.nc
-3b65eb064433b28d9e23aaf260994768  obs_atmos/c_t_NCEP_06_climo.nc
-dd2962224eb21be51dd2e1d38d4d7bfc  obs_atmos/c_t_NCEP_07_climo.nc
-a7f0f0a58959c30f4342a643537d5791  obs_atmos/c_t_NCEP_08_climo.nc
-16f1fb6a6fd60428a24821dfdbf9ba3f  obs_atmos/c_t_NCEP_09_climo.nc
-c1c5580c10e6017d7a1b4c844f4bee95  obs_atmos/c_t_NCEP_10_climo.nc
-58ca74759be8e809e6113309163eb87e  obs_atmos/c_t_NCEP_11_climo.nc
-0a34a591d117471b83ec15d41ca4de5e  obs_atmos/c_t_NCEP_12_climo.nc
-53a07928fd5bb8282e3b00707c30d352  obs_atmos/c_t_NCEP_ANN_climo.nc
-07fbdfe7c5ac96dca4d5b30cf0ffca4d  obs_atmos/c_t_NCEP_DJF_climo.nc
-bba7b95da836594ba56eccc5cc735953  obs_atmos/c_t_NCEP_JJA_climo.nc
-ded2539f0946958f20946211ec6de7c6  obs_data_12/._RAOBS.nc
-2df5c553f24cf4e51a826a34075a6122  obs_data_12/RAOBS.nc
-3057f458f2eea7e29b5df6622b71c5c6  obs_data_13/ISCCPCOSP_01_climo.nc
-863fdc036ca6c8bc181b68934fb5f334  obs_data_13/ISCCPCOSP_02_climo.nc
-44d91325876baa34dd53a3d5fdebc8a5  obs_data_13/ISCCPCOSP_03_climo.nc
-2821ea5e0d7d1ab2e32486e6336c07b5  obs_data_13/ISCCPCOSP_04_climo.nc
-dc5823c8971136e536c1f7c7d8f8452f  obs_data_13/ISCCPCOSP_05_climo.nc
-b0fb19767ddf330a4dd37a429810b9d9  obs_data_13/ISCCPCOSP_06_climo.nc
-a07c2a2e6adfed391c53a0aff0c436ab  obs_data_13/ISCCPCOSP_07_climo.nc
-ca089074a4f3d1fe7f6897c0c88b1b6b  obs_data_13/ISCCPCOSP_08_climo.nc
-9f9c9897dc8e09e18f155fe5355d1ed8  obs_data_13/ISCCPCOSP_09_climo.nc
-d74abae2b663ea67cf95de9b5f4e8485  obs_data_13/ISCCPCOSP_10_climo.nc
-ba01b312ad7fc2f936299798c963114c  obs_data_13/ISCCPCOSP_11_climo.nc
-0a20a6f6220e941ad84e75347d044ff0  obs_data_13/ISCCPCOSP_12_climo.nc
-f422c02f76cfd8ffdc3d664f7df29fa5  obs_data_13/ISCCPCOSP_ANN_climo.nc
-c0c6e18ef0202b8da755210ff5bab6d0  obs_data_13/ISCCPCOSP_DJF_climo.nc
-a52e9a734e34d3b6198f836c407a834b  obs_data_13/ISCCPCOSP_JJA_climo.nc
-0692a353d71f86e3b008f5b7136fead4  obs_data_13/ISCCPCOSP_MAM_climo.nc
-65790f602a139f5e7ac561c0f50073a6  obs_data_13/ISCCPCOSP_SON_climo.nc
-25da719f4a94f073b344d463ef46dd5c  obs_data_5.6/ERS_01_climo.nc
-82938151479416212514ea92f5c8944d  obs_data_5.6/ERS_02_climo.nc
-4474e171bc3ed010bc4cf85f2156331c  obs_data_5.6/ERS_03_climo.nc
-5928149aaa7e20e8e021051e4c1cf8af  obs_data_5.6/ERS_04_climo.nc
-8ba71cabf16409ec359250137313e1fc  obs_data_5.6/ERS_05_climo.nc
-7173b6c6ad21ebba3faae364bb0e2abd  obs_data_5.6/ERS_06_climo.nc
-4a4dce6ec29ff746e6ca438a1144e2f9  obs_data_5.6/ERS_07_climo.nc
-89b82d69760e786d4c5cd6007e67ad8e  obs_data_5.6/ERS_08_climo.nc
-703d8a3c2bca30d721db74e4a9607991  obs_data_5.6/ERS_09_climo.nc
-6be5b6eaacbd4bfee413b0432a3822bd  obs_data_5.6/ERS_10_climo.nc
-3aab5e306b45952d4bc538cf09733d36  obs_data_5.6/ERS_11_climo.nc
-b7d52d062f54e6c28b73c1630866eb8f  obs_data_5.6/ERS_12_climo.nc
-257874570e3aeeda6cbd55accf60f6c9  obs_data_5.6/ERS_ANN_climo.nc
-d7fc6bbb9a2dfdb0fa44d7835f94a3d4  obs_data_5.6/ERS_DJF_climo.nc
-3cce9af23687f27d3b134f60039ebdce  obs_data_5.6/ERS_JJA_climo.nc
-aaedba911f145e711d05b6430e13ce4e  obs_data_5.6/ERS_MAM_climo.nc
-e40f05dfec15f145e9623290d5142705  obs_data_5.6/ERS_SON_climo.nc
diff --git a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in b/CMake/cdat_modules_extra/vacumm_build_step.cmake.in
deleted file mode 100644
index 8ef121b84..000000000
--- a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in
+++ /dev/null
@@ -1,6 +0,0 @@
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY "@vacumm_source_dir@"
-  )
diff --git a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in b/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in
deleted file mode 100644
index 9d7ba552b..000000000
--- a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in
+++ /dev/null
@@ -1,36 +0,0 @@
-
-set(ENV${CC} "@CMAKE_C_COMPILER@")
-set(ENV${CXX} "@CMAKE_CXX_COMPILER@")
-set(ENV${CPP} "@CMAKE_CXX_COMPILER@")
-
-set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}")
-set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib")
-set(ENV{CFLAGS} "@cdat_osx_flags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CFLAGS@")
-set(ENV{CPPFLAGS} "@cdat_osx_cppflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CPPFLAGS@")
-set(ENV{CXXFLAGS} "@cdat_osx_cxxflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CXXFLAGS@")
-
-set(ENV{EXTERNALS} "@cdat_EXTERNALS@")
-
-execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@
-  WORKING_DIRECTORY @cdat_BINARY_DIR@/build/ParaView-build/VTK/Wrapping/Python
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in VTK Python Install")
-endif()
-
-if (APPLE)
- message("We are on a Mac, need to relink all libraries")
- execute_process(
-  COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/fixlink.py
-  WORKING_DIRECTORY @cdat_BINARY_DIR@
-  OUTPUT_VARIABLE out
-  ERROR_VARIABLE err
-  RESULT_VARIABLE res)
- message("got: "${res})
-endif ()
-
-message("Install succeeded.")
-
diff --git a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in b/CMake/cdat_modules_extra/xgks_configure_step.cmake.in
deleted file mode 100644
index 0abcb5c96..000000000
--- a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in
+++ /dev/null
@@ -1,13 +0,0 @@
-
-include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake)
-
-execute_process(
-  COMMAND env FC="" sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS}
-  WORKING_DIRECTORY "${WORKING_DIR}"
-  RESULT_VARIABLE res)
-
-if(NOT ${res} EQUAL 0)
-  message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}")
-  message(FATAL_ERROR "Error in config")
-endif()
-message("Config succeeded.")
diff --git a/CMake/curses_gcc5.patch b/CMake/curses_gcc5.patch
deleted file mode 100644
index a1ee0240b..000000000
--- a/CMake/curses_gcc5.patch
+++ /dev/null
@@ -1,30 +0,0 @@
-index d8cc3c9..b91398c 100755
---- a/ncurses/base/MKlib_gen.sh
-+++ b/ncurses/base/MKlib_gen.sh
-@@ -491,11 +492,22 @@ sed -n -f $ED1 \
-	-e 's/gen_$//' \
-	-e 's/  / /g' >>$TMP
-
-+cat >$ED1 <<EOF
-+s/  / /g
-+s/^ //
-+s/ $//
-+s/P_NCURSES_BOOL/NCURSES_BOOL/g
-+EOF
-+
-+# A patch discussed here:
-+#	https://gcc.gnu.org/ml/gcc-patches/2014-06/msg02185.html
-+# introduces spurious #line markers.  Work around that by ignoring the system's
-+# attempt to define "bool" and using our own symbol here.
-+sed -e 's/bool/P_NCURSES_BOOL/g' $TMP > $ED2
-+cat $ED2 >$TMP
-+
- $preprocessor $TMP 2>/dev/null \
--| sed \
--	-e 's/  / /g' \
--	-e 's/^ //' \
--	-e 's/_Bool/NCURSES_BOOL/g' \
-+| sed -f $ED1 \
- | $AWK -f $AW2 \
- | sed -f $ED3 \
- | sed \
diff --git a/CMake/dummy.f90 b/CMake/dummy.f90
deleted file mode 100644
index 4bbd9fbdc..000000000
--- a/CMake/dummy.f90
+++ /dev/null
@@ -1,4 +0,0 @@
-        PROGRAM dummy
-
-        print*, "Hi"
-        END
diff --git a/CMake/fixName.py b/CMake/fixName.py
deleted file mode 100644
index 17f2a0606..000000000
--- a/CMake/fixName.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env python
-
-
-import sys
-
-fnm = sys.prefix+"/Resources/Python.app/Contents/Info.plist"
-
-f=open(fnm)
-s=f.read()
-pat="<key>CFBundleName</key>"
-i=s.find(pat)#<string>Python</string>")
-s2=s[:i+len(pat)]+s[i+len(pat):].replace("Python","UV-CDAT",1)
-f=open(fnm,'w')
-f.write(s2)
-f.close()
diff --git a/CMake/fix_install_name.py.in b/CMake/fix_install_name.py.in
deleted file mode 100644
index 787490a48..000000000
--- a/CMake/fix_install_name.py.in
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-import sys
-import fnmatch
-
-def find_files(directory, pattern):
-    for root, dirs, files in os.walk(directory):
-        for basename in files:
-            if fnmatch.fnmatch(basename, pattern):
-                filename = os.path.join(root, basename)
-                yield filename
-
-exts = ['*.dylib', '*.so']
-
-# Find all the modules and shared libraries and replace the path contained 
-# if referencing the built ones
-for pattern in exts:
-  for library in find_files("./", pattern):
-    print library
-    cmd = 'otool -L %s' % library
-    print "library is", library
-    deps = os.popen(cmd).readlines()
-    for dep in deps[1:]:
-      dep_name = os.path.split(dep)[1]
-      dep_name = dep_name.split()[0]
-      dep = dep.split()[0]
-      # Replace the ones that are built by us
-      if fnmatch.fnmatch(dep_name, pattern) and fnmatch.fnmatch(dep, "@CMAKE_INSTALL_PREFIX@*"):
-        print 'dep fullpath ', dep
-        print 'dep name', dep_name
-        cmd = "install_name_tool -change %s %s %s" % (dep, "@rpath/"+"".join(dep_name), library)
-        print 'change cmd is ', cmd
-        lns = os.popen(cmd)
-        print "\t"+"".join(lns)
diff --git a/CMake/fixlink.py b/CMake/fixlink.py
deleted file mode 100644
index 808baa7f2..000000000
--- a/CMake/fixlink.py
+++ /dev/null
@@ -1,49 +0,0 @@
-import os,sys,numpy
-lib = '/usr/local/uvcdat/1.0.alpha/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/paraview/vtk/vtkCommonPython.so'
-bad = 'ParaView-build'
-#bad="System"
-#bad="paraview3.11"
-def change(lib,bad,paraviewPath,sameDir=False):
-    cmd = 'otool -L %s' % lib
-    print "LIB:",lib
-    ln=os.popen(cmd).readlines()
-    for l in ln[1:]:
-        link = l.strip().split()[0]
-        if link.find(bad)>-1:
-            print link,"\t",
-            nm=os.path.split(link)[1]
-            print nm
-            cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,nm,lib)
-            print "\t",cmd
-            lns = os.popen(cmd)
-            print "\t"+"".join(lns)
-        if sameDir:
-            if link[:6] in ["libvtk","libXdm","libKWC","libQVT","libVPI","libCos","libpro"]:
-                cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,link,lib)
-                print "\t",cmd
-                lns = os.popen(cmd)
-                print "\t"+"".join(lns)
-            
-
-inpath =  "/".join(numpy.__path__[0].split("/")[:-1]+["paraview",])
-inpath2 =  "/".join(numpy.__path__[0].split("/")[:-1]+["paraview","vtk"])
-inpath3 =  "/".join(numpy.__path__[0].split("/")[:-1]+["vtk"])
-inpath4 =  "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview",])
-inpath5 =  "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview","vtk"])
-inpath6 =  "/".join(numpy.__path__[0].split("/")[:-1]+["VTK-5.9.0-py2.7.egg","vtk"])
-paraviewPath = "/".join(sys.prefix.split("/")[:-5]+["Externals","lib","paraview-3.11"]) #= '/usr/local/uvcdat/1.0.alpha/Externals/lib/paraview-3.11/'
-def doPath(inpath,paraviewPath,sameDir=False):
-    files = os.popen("ls %s" % inpath).readlines()
-    for f in files:
-        lib = inpath+"/"+f.strip()
-        print lib
-        change(lib,bad,paraviewPath,sameDir)
-doPath(inpath,paraviewPath)
-doPath(inpath2,paraviewPath)
-doPath(inpath3,paraviewPath)
-doPath(inpath4,paraviewPath)
-doPath(inpath5,paraviewPath)
-doPath(inpath6,paraviewPath)
-doPath(paraviewPath,paraviewPath,True)
-
-
diff --git a/CMake/install.py b/CMake/install.py
deleted file mode 100644
index 2e37d9791..000000000
--- a/CMake/install.py
+++ /dev/null
@@ -1,10 +0,0 @@
-import os
-
-# The main installation script is installation/install.py
-# However, we need to first check for problems using 1.5.2 syntax only.
-current_dir = os.path.dirname(__file__)
-
-execfile(os.path.join(current_dir, 'test_python_ok.py'))
-
-install_script_path = os.path.join(current_dir, '..', 'installation', 'install.py')
-execfile(install_script_path)
diff --git a/CMake/netcdf_clang.patch b/CMake/netcdf_clang.patch
deleted file mode 100644
index 44729cf6e..000000000
--- a/CMake/netcdf_clang.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/ncgen3/load.c b/git/uvcdat/exsrc/ncgen3_load.c
-index 3da4712..147f4e7 100644
---- a/ncgen3/load.c
-+++ b/git/uvcdat/exsrc/ncgen3_load.c
-@@ -5,6 +5,7 @@
-  *********************************************************************/
- 
- #include <stdio.h>
-+#include <config.h>
- #include <stdlib.h>
- #include <string.h>
- #include <ctype.h>
diff --git a/CMake/pyspharm_setup.patch b/CMake/pyspharm_setup.patch
deleted file mode 100644
index da6437198..000000000
--- a/CMake/pyspharm_setup.patch
+++ /dev/null
@@ -1,19 +0,0 @@
---- setup.py.ok	2015-07-28 15:37:07.000000000 -0700
-+++ setup.py	2015-07-28 15:53:10.000000000 -0700
-@@ -27,12 +27,12 @@
-  the terms of the SPHEREPACK license at
-  http://www2.cisl.ucar.edu/resources/legacy/spherepack/license\n
-  """)
--    download = raw_input('Do you want to download SPHEREPACK now? (yes or no)')
--    if download not in ['Y','y','yes','Yes','YES']:
--        sys.exit(0)
-+    # download = raw_input('Do you want to download SPHEREPACK now? (yes or no)')
-+    # if download not in ['Y','y','yes','Yes','YES']:
-+    #     sys.exit(0)
-     import urllib, tarfile
-     tarfname = 'spherepack3.2.tar'
--    URL="https://www2.cisl.ucar.edu/sites/default/files/"+tarfname
-+    URL="http://uvcdat.llnl.gov/cdat/resources/"+tarfname
-     urllib.urlretrieve(URL,tarfname)
-     if not os.path.isfile(tarfname):
-         raise IOError('Sorry, download failed')
diff --git a/CMake/python_patch_step.cmake.in b/CMake/python_patch_step.cmake.in
deleted file mode 100644
index c1cb47384..000000000
--- a/CMake/python_patch_step.cmake.in
+++ /dev/null
@@ -1,15 +0,0 @@
-execute_process(
-  COMMAND
-  "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py @python_SOURCE_DIR@/setup.py
-)
-
-# Refer: http://bugs.python.org/issue14572
-if(NOT WIN32)
-  execute_process(
-    WORKING_DIRECTORY @python_SOURCE_DIR@
-    COMMAND patch -p1
-    INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/sqlite3_int64_v2.patch
-  )
-endif()
-
-
diff --git a/CMake/sqlite3_int64_v2.patch b/CMake/sqlite3_int64_v2.patch
deleted file mode 100644
index 3a3ab31d6..000000000
--- a/CMake/sqlite3_int64_v2.patch
+++ /dev/null
@@ -1,24 +0,0 @@
-# HG changeset patch
-# Parent 4641d8d99a7dd56c76aa7f769d6d91499113a3b8
-
-diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
---- a/Modules/_sqlite/connection.c
-+++ b/Modules/_sqlite/connection.c
-@@ -549,7 +549,7 @@
-     } else if (py_val == Py_None) {
-         sqlite3_result_null(context);
-     } else if (PyInt_Check(py_val)) {
--        sqlite3_result_int64(context, (sqlite3_int64)PyInt_AsLong(py_val));
-+        sqlite3_result_int64(context, (sqlite_int64)PyInt_AsLong(py_val));
-     } else if (PyLong_Check(py_val)) {
-         sqlite3_result_int64(context, PyLong_AsLongLong(py_val));
-     } else if (PyFloat_Check(py_val)) {
-@@ -580,7 +580,7 @@
-     sqlite3_value* cur_value;
-     PyObject* cur_py_value;
-     const char* val_str;
--    sqlite3_int64 val_int;
-+    sqlite_int64 val_int;
-     Py_ssize_t buflen;
-     void* raw_buffer;
- 
diff --git a/CMake/test_python_ok.py b/CMake/test_python_ok.py
deleted file mode 100644
index 274e15ac9..000000000
--- a/CMake/test_python_ok.py
+++ /dev/null
@@ -1,19 +0,0 @@
-import sys, os
-# The main installation script is installation/install.py
-# However, we need to first check for problems using 1.5.2 syntax only.
-trouble = 0
-minimum_python_version = (2,5,0,'final',0)
-if not hasattr(sys, 'version_info') or sys.version_info < minimum_python_version:
-    sys.stderr.write("Your Python is too old; please see README.txt.\n")
-    trouble = 1
-for x in ["PYTHONHOME"]:
-    if os.environ.has_key(x):
-        sys.stderr.write('Please undefine ' + x + ' before installation.\n')
-        trouble = 1
-if not os.environ.has_key('HOME'):
-    sys.stderr.write(\
-"Caution: You'll need to set environment variable HOME before using CDAT.\n")
-
-if trouble:
-    raise SystemExit, 1
-print 'Your Python checked OK!'
diff --git a/CMake/travis_build.cmake b/CMake/travis_build.cmake
deleted file mode 100644
index 83c821435..000000000
--- a/CMake/travis_build.cmake
+++ /dev/null
@@ -1,18 +0,0 @@
-set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}")
-set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build")
-
-include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake)
-set(CTEST_SITE "Travis")
-set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}")
-set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
-
-ctest_start("Continuous")
-ctest_configure()
-ctest_build()
-#ctest_test(INCLUDE vcs PARALLEL_LEVEL 1 RETURN_VALUE res)
-#ctest_coverage()
-#file(REMOVE ${CTEST_BINARY_DIRECTORY}/coverage.xml)
-
-#if(NOT res EQUAL 0)
-#  message(FATAL_ERROR "Test failures occurred.")
-#endif()
diff --git a/CMake/travis_submit.cmake b/CMake/travis_submit.cmake
deleted file mode 100644
index 285e876e5..000000000
--- a/CMake/travis_submit.cmake
+++ /dev/null
@@ -1,10 +0,0 @@
-set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}")
-set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build")
-
-include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake)
-set(CTEST_SITE "Travis")
-set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}")
-set(CTEST_CMAKE_GENERATOR "Unix Makefiles")
-
-ctest_start("Continuous")
-ctest_submit()
diff --git a/CMake/uvcdat.plist b/CMake/uvcdat.plist
deleted file mode 100644
index 496982c35..000000000
--- a/CMake/uvcdat.plist
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
-	<key>CFBundleDevelopmentRegion</key>
-	<string>English</string>
-	<key>CFBundleExecutable</key>
-	<string>uvcdat</string>
-	<key>CFBundleGetInfoString</key>
-	<string></string>
-	<key>CFBundleIconFile</key>
-	<string>uvcdat.icns</string>
-	<key>CFBundleIdentifier</key>
-	<string></string>
-	<key>CFBundleInfoDictionaryVersion</key>
-	<string>6.0</string>
-	<key>CFBundleLongVersionString</key>
-	<string></string>
-	<key>CFBundleName</key>
-	<string>UVCDAT</string>
-	<key>CFBundlePackageType</key>
-	<string>APPL</string>
-	<key>CFBundleShortVersionString</key>
-	<string></string>
-	<key>CFBundleSignature</key>
-	<string>????</string>
-	<key>CFBundleVersion</key>
-	<string></string>
-	<key>CSResourcesFileMapped</key>
-	<true/>
-	<key>LSRequiresCarbon</key>
-	<true/>
-  <key>LSUIElement</key>
-  <string>1</string>
-	<key>NSHumanReadableCopyright</key>
-	<string></string>
-</dict>
-</plist>
-- 
GitLab


From bd749c47392ab0f409a124bf5805cb274588e40f Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 16:13:30 -0700
Subject: [PATCH 148/196] file needed

---
 .../checkout_testdata.cmake                   | 256 ++++++++++++++++++
 1 file changed, 256 insertions(+)
 create mode 100644 CMake/cdat_modules_extra/checkout_testdata.cmake

diff --git a/CMake/cdat_modules_extra/checkout_testdata.cmake b/CMake/cdat_modules_extra/checkout_testdata.cmake
new file mode 100644
index 000000000..d914fa3ca
--- /dev/null
+++ b/CMake/cdat_modules_extra/checkout_testdata.cmake
@@ -0,0 +1,256 @@
+# Usage:
+# cmake -DGIT_EXECUTABLE=[git executable]
+#       -DTESTDATA_URL=[uvcdat-testdata url]
+#       -DTESTDATA_DIR=[local testdata directory]
+#       -DSOURCE_DIR=[uvcdat source root]
+#       -P checkout_testdata.cmake
+#
+# This script creates and syncs a clone of the uvcdat-testdata directory.
+#
+# In detail:
+#
+# 1) Check if the TESTDATA_DIR exists.
+#    If not, clone the repo and exit.
+# 2) Check if the TESTDATA_DIR is a git repo with TESTDATA_URL as its origin.
+#    If not, abort with a warning message.
+# 3) Check if the TESTDATA_DIR repo is clean.
+#    If not, abort with an warning message.
+# 4) Fetch the current git branch name for the SOURCE_DIR repo.
+#    If the current HEAD is not a named branch, use master.
+# 5) Update the remote branches in the TESTDATA_DIR repo.
+# 6) Check if the desired branch exists in TESTDATA_DIR's origin remote.
+# 7) Check if the desired branch exists in TESTDATA_DIR as a local branch.
+# 8) If the neither the local or remote branch exist, use master.
+# 9) Check out the local <branch> in TESTDATA_DIR repo.
+# 10) If the remote branch exists, or we are using master, run
+#     'git pull origin <branch>:<branch>' to fetch/update the local branch from
+#     the remote.
+#
+# Any failures are handled via non-fatal warnings. This is to allow the project
+# to build when access to the repo is not available.
+
+# 1) Clone and exit if the target directory doesn't exist.
+if(NOT EXISTS "${TESTDATA_DIR}")
+  message("Cloning \"${TESTDATA_URL}\" into \"${TESTDATA_DIR}\"...")
+
+  # Use depth=1 to avoid fetching the full history. Use "git pull --unshallow"
+  # to backfill the history if needed.
+  # --no-single-branch fetches the tip of all remote branches -- this is needed
+  # for auto-updating the testdata when the source branch changes.
+  execute_process(COMMAND
+    "${GIT_EXECUTABLE}"
+      clone --depth=1 --no-single-branch "${TESTDATA_URL}" "${TESTDATA_DIR}"
+    RESULT_VARIABLE RESULT
+    ERROR_VARIABLE OUTPUT
+    OUTPUT_VARIABLE OUTPUT)
+
+  string(STRIP "${OUTPUT}" OUTPUT)
+
+  message("${OUTPUT}")
+
+  if(NOT RESULT EQUAL 0)
+    message("Could not clone test data repo! "
+            "Baseline images will not be available.")
+    return()
+  endif()
+endif()
+
+# bots merge master in and mess the following, always rechecking master
+# bots check out the correct branches
+# following keyword skips the branch checking
+if (CDAT_CHECKOUT_BASELINE_MATCHING_BRANCH)
+    message("[INFO] Trying to find matching branch on baseline repo")
+    # 2) Is TESTDATA_DIR a clone of TESTDATA_URL?
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" config --get remote.origin.url
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Directory exists and is not a git repository. "
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    string(STRIP "${OUTPUT}" OUTPUT)
+
+    if(NOT "${TESTDATA_URL}" STREQUAL "${OUTPUT}")
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Directory is a git clone of \"${OUTPUT}\", not \"${TESTDATA_URL}\". "
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    # 3) Is the current testdata repo clean? Don't want to clobber any local mods.
+    # Update the index first:
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" update-index -q --refresh
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Error updating current index with 'git update-index -q --refresh':\n."
+              "${OUTPUT}\n"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    # Now check if the index is dirty:
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" diff-index --name-only HEAD "--"
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Error checking current index with 'git diff-index --name-only HEAD --':\n."
+              "${OUTPUT}\n"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    string(STRIP "${OUTPUT}" OUTPUT)
+
+    if(NOT "${OUTPUT}" STREQUAL "")
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Current checkout is not clean. The following files have modifications:\n"
+              "${OUTPUT}\n"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    # 4) Get the current branch name of the source repo.
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" rev-parse --abbrev-ref HEAD
+      WORKING_DIRECTORY "${SOURCE_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Cannot determine current branch name of source directory. "
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    string(STRIP "${OUTPUT}" BRANCH)
+
+    # If BRANCH is "HEAD", we're not on a named branch. Just use master in that
+    # case.
+    if("${BRANCH}" STREQUAL "HEAD")
+      message("The current source directory at '${SOURCE_DIR}' is not on a named "
+              "branch. Using the 'master' branch of the testdata repo.")
+      set(BRANCH "master")
+    endif()
+
+    # 5) Update the remote branches available on the testdata repo.
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" fetch --depth=1
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Error updating remote branches with "
+              "'git fetch --depth=1':\n."
+              "${OUTPUT}\n"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    # 6) Check if the desired branch exists in TESTDATA_DIR's origin remote.
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" branch -a --list "*${BRANCH}"
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Error obtaining full branch list:\n${OUTPUT}"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    message("Testing if remote branch 'origin/${BRANCH}' exists...")
+    string(FIND "${OUTPUT}" " remotes/origin/${BRANCH}\n" POS)
+    if(NOT POS EQUAL -1)
+      message("Remote branch exists.")
+      set(REMOTE_EXISTS "YES")
+    else()
+      message("Remote branch does not exist.")
+      set(REMOTE_EXISTS "NO")
+    endif()
+
+    # 7) Check if the desired branch exists locally:
+    message("Testing if local branch '${BRANCH}' exists...")
+    string(FIND "${OUTPUT}" " ${BRANCH}\n" POS) # Leading space in regex intended
+    if(NOT POS EQUAL -1)
+      message("Local branch exists.")
+      set(LOCAL_EXISTS "YES")
+    else()
+      message("Local branch does not exist.")
+      set(LOCAL_EXISTS "NO")
+    endif()
+
+    # 8) If the neither the local or remote branch exist, use master.
+    if(NOT REMOTE_EXISTS AND NOT LOCAL_EXISTS)
+      set(BRANCH "master")
+      set(REMOTE_EXISTS "YES")
+      set(LOCAL_EXISTS "YES")
+    endif()
+
+    # 9) Check out the desired branch in TESTDATA_DIR repo.
+    message("Checking out branch '${BRANCH}' in repo '${TESTDATA_DIR}'.")
+    execute_process(COMMAND
+      "${GIT_EXECUTABLE}" checkout "${BRANCH}"
+      WORKING_DIRECTORY "${TESTDATA_DIR}"
+      RESULT_VARIABLE RESULT
+      ERROR_VARIABLE OUTPUT
+      OUTPUT_VARIABLE OUTPUT)
+
+    if(NOT RESULT EQUAL 0)
+      message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". "
+              "Error executing 'git checkout ${BRANCH}':\n."
+              "${OUTPUT}\n"
+              "Baseline images may be out of date.")
+      return()
+    endif()
+
+    # 10) If the remote branch exists, or we are using master, run
+    #     'git pull origin <branch>:<branch>' to fetch/update the local branch from
+    #     the remote.
+    if(REMOTE_EXISTS)
+      message("Updating \"${TESTDATA_DIR}:${BRANCH}\" from "
+              "\"${TESTDATA_URL}:${BRANCH}\"...")
+      execute_process(COMMAND
+        "${GIT_EXECUTABLE}" pull origin "${BRANCH}:${BRANCH}"
+        WORKING_DIRECTORY "${TESTDATA_DIR}"
+        RESULT_VARIABLE RESULT
+        ERROR_VARIABLE OUTPUT
+        OUTPUT_VARIABLE OUTPUT)
+
+      string(STRIP "${OUTPUT}" OUTPUT)
+
+      message("${OUTPUT}")
+
+      if(NOT RESULT EQUAL 0)
+        message("Error updating testdata repo! "
+                "Baseline images may be out of date.")
+      endif()
+    endif()
+else()
+    message("[INFO] NOT trying to switch branch on baseline (only bots should turn this on)")
+endif()
-- 
GitLab


From 5a811816540985b9d3558ff51030b6b97c40d082 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 7 Jun 2016 16:15:22 -0700
Subject: [PATCH 149/196] pcmdi won't work unitl rebuild or checkout from
 nightly

---
 testing/CMakeLists.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt
index 909790f68..f890a8e22 100644
--- a/testing/CMakeLists.txt
+++ b/testing/CMakeLists.txt
@@ -43,4 +43,4 @@ add_subdirectory(Thermo)
 add_subdirectory(unidata)
 add_subdirectory(cdms2)
 add_subdirectory(xmgrace)
-add_subdirectory(pcmdi)
+#add_subdirectory(pcmdi)
-- 
GitLab


From c22af32985fbc256bca628448ba4f4a13529a8bf Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 8 Jun 2016 00:06:50 -0400
Subject: [PATCH 150/196] Updated text on conda

---
 Packages/vcs/docs/user-guide.rst | 20 ++++++++++++++++----
 1 file changed, 16 insertions(+), 4 deletions(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 97a55b7d4..2f87fa221 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -16,13 +16,25 @@ vcs specific entities will be ``formatted like this``.
 Installation
 ------------
 While there are many ways a user can install vcs, installation using conda is
-preferred for the end user. Currently, to install vcs, you need to install the entire uvcdat
-pacakge. ::
+preferred for the end user. To install just vcs or uvcdat, make sure that anaconda
+or miniconda is installed and in path of your shell. Information on how to install conda
+can be found `here <https://www.continuum.io>`_. Very conda is available on the shell using
+the following command ::
 
-    conda install -c uvcdat
+    conda --help
 
-It is assumed that conda is installed on user's system and is available on the shell.
+To enable conda installation in a tight ssl certificate/security environment try ::
 
+    conda config --set ssl_verify False
+    binstar config --set verify_ssl False
+
+Install uvcdat which will install vcs as well using the following command ::
+
+    conda install uvcdat -c uvcdat
+
+To install only vcs, use the following command ::
+
+    conda install vcs -c uvcdat
 
 Concepts
 --------
-- 
GitLab


From 904bacdac9ab695326bf9e1fd5efb20e56d368b2 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 8 Jun 2016 00:17:02 -0400
Subject: [PATCH 151/196] Fixed note on colormap object

---
 Packages/vcs/docs/user-guide.rst | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst
index 2f87fa221..6e03c0ee3 100644
--- a/Packages/vcs/docs/user-guide.rst
+++ b/Packages/vcs/docs/user-guide.rst
@@ -6,8 +6,7 @@ Document Conventions
 
 This User Guide is written for end-users of vcs, rather than developers. If you
 have suggestions or questions about this documentation, feel free to contact us
-on `UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_,
-`mailing list <uvcdat-users@lists.llnl.gov>`_.
+on `UV-CDAT <https://github.com/UV-CDAT/uvcdat>`_ `mailing list <uvcdat-users@lists.llnl.gov>`_.
 
 vcs specific entities will be ``formatted like this``.
 
@@ -95,7 +94,8 @@ A description of each secondary object is warranted before showing their use and
 The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows:
 
 * ``colormap`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is “Cp”.
-Note: VCS colormaps are objects, but they are not referenced like other secondary objects.
+
+.. note:: VCS colormaps are objects, but they are not referenced like other secondary objects.
 
 
 **Fillarea Object**
-- 
GitLab


From fe75ca438dfc532167923b637580e476deb3f758 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 8 Jun 2016 10:40:22 -0400
Subject: [PATCH 152/196] Updated cmake to use new location for legal and
 readme

---
 CMakeLists.txt | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 32b16d453..a3a96dc7b 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -831,8 +831,8 @@ set(CPACK_PACKAGE_VERSION_MAJOR "2")
 set(CPACK_PACKAGE_VERSION_MINOR "3")
 set(CPACK_PACKAGE_VERSION_PATCH "0")
 set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH})
-set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/docs/README.txt")
-set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/docs/Legal.txt")
+set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README.md")
+set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LEGAL.txt")
 set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required
 set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX})
 set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7)
@@ -883,7 +883,7 @@ install(CODE "
       endif ()
     endif ()
   endforeach()
-  
+
 
   file(INSTALL FILES \${resolved_programs} DESTINATION
     \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\"
@@ -971,7 +971,7 @@ install(CODE "
          PERMISSIONS USE_SOURCE_PERMISSIONS
     )
   endif()
-  
+
   file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info)
 
   # Unset QT_LIB_DIR as we need to use the one in user's environment
-- 
GitLab


From 018a30849668b3897d00cb7c05f7d2f33fe4cc7d Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 07:47:56 -0700
Subject: [PATCH 153/196] flake8 for dv3d

---
 Packages/testing/regression.py | 2 +-
 Packages/vcs/vcs/dv3d.py       | 1 -
 2 files changed, 1 insertion(+), 2 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 72047380e..aa8efa96b 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = False):
+                       baseline=True, cleanup=True, update_baselines = True):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py
index 2afae29f2..4a30aed8e 100644
--- a/Packages/vcs/vcs/dv3d.py
+++ b/Packages/vcs/vcs/dv3d.py
@@ -158,7 +158,6 @@ class Gfdv3d(object):
         self.plot_attributes['name'] = self.g_name
         self.plot_attributes['template'] = Gfdv3d_name
 
-
     def setProvenanceHandler(self, provenanceHandler):
         self.provenanceHandler = provenanceHandler
 
-- 
GitLab


From 514af9db51d5d446aa112836655be0305642bd68 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 08:24:12 -0700
Subject: [PATCH 154/196] test tweaks

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in   | 2 +-
 testing/regrid/testDistSrc.py                              | 7 ++++---
 testing/vcs/test_vcs_line_patterns.py                      | 2 --
 testing/vcsaddons/test_12_plot_one_leg_per_row.py          | 3 ++-
 testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py   | 2 +-
 .../vcsaddons/test_EzTemplate_12_plots_legd_direction.py   | 3 ++-
 .../test_EzTemplate_12_plots_margins_thickness.py          | 3 ++-
 .../vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py    | 3 ++-
 testing/vcsaddons/test_EzTemplate_12_plots_spacing.py      | 3 ++-
 9 files changed, 16 insertions(+), 12 deletions(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 78e72d78e..1c9ce4163 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -3,7 +3,7 @@
 conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
 
 source activate @CONDA_ENVIRONMENT_NAME@
-for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons EzTemplate cdutil unidata xmgrace genutil Thermo WK distarray; do
+for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
     cd @cdat_SOURCE_DIR@/Packages/${pkg}
     rm -rf build
     if [ ${pkg} == "vcs" ]; then
diff --git a/testing/regrid/testDistSrc.py b/testing/regrid/testDistSrc.py
index 183efd498..9da231af5 100644
--- a/testing/regrid/testDistSrc.py
+++ b/testing/regrid/testDistSrc.py
@@ -14,7 +14,7 @@ import regrid2
 import unittest
 import ESMP
 from regrid2 import esmf
-import scipy.io.netcdf
+#import scipy.io.netcdf
 from regrid2 import ESMFRegrid
 import sys
 HAS_MPI = False
@@ -75,7 +75,7 @@ class Test(unittest.TestCase):
 
     def Xtest0_ESMP(self):
 
-        import scipy.io.netcdf
+        #import scipy.io.netcdf
 
         #
         # 1. input
@@ -85,7 +85,8 @@ class Test(unittest.TestCase):
 
         inFile = cdat_info.get_sampledata_path() + \
             '/so_Omon_ACCESS1-0_historical_r1i1p1_185001-185412_2timesteps.nc'
-        srcF = scipy.io.netcdf.netcdf_file(inFile)
+        #srcF = scipy.io.netcdf.netcdf_file(inFile)
+        srcF = cdms2.open(inFile)
         #so = srcF.variables['so'][0, 0,...]
         missing_value = 1.e20
         srcGrd = [srcF.variables['lat'][:], srcF.variables['lon'][:]]
diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py
index 7597403fc..848ebb110 100755
--- a/testing/vcs/test_vcs_line_patterns.py
+++ b/testing/vcs/test_vcs_line_patterns.py
@@ -8,8 +8,6 @@ import testing.regression as regression
 pth = os.path.join(os.path.dirname(__file__), "..")
 sys.path.append(pth)
 
-import checkimage
-
 x = regression.init(bg=1, geometry=(1620, 1080))
 
 f = cdms2.open(vcs.sample_data + "/clt.nc")
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
index 0c0298527..4f836c1d4 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
@@ -1,4 +1,5 @@
-import os, sys, EzTemplate, vcs, testing.regression as regression
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplates
 
 ## Initialize VCS
 x = vcs.init()
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
index 6a9d50284..141d94e09 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py
@@ -1,5 +1,5 @@
 import vcs
-import EzTemplate
+form vcsaddons import EzTemplate
 
 M=EzTemplate.Multi(rows=2,columns=2)
 
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
index d46e7b9ac..e941fba16 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
@@ -1,4 +1,5 @@
-import os, sys, EzTemplate, vcs, testing.regression as regression
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
 x = vcs.init()
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
index 73b7c8dbf..60d354127 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
@@ -1,4 +1,5 @@
-import os, sys, EzTemplate, vcs, testing.regression as regression
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
 x = vcs.init()
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
index 6e9398fe8..2e9be2521 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
@@ -1,4 +1,5 @@
-import os, sys, EzTemplate, vcs, testing.regression as regression
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 ## Initialize VCS
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
index 5d4cd293b..3b60a8d07 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
@@ -1,4 +1,5 @@
-import os, sys, EzTemplate, vcs, testing.regression as regression
+import os, sys, vcs, testing.regression as regression
+from vcsaddons import EzTemplate
 ## Initialize VCS
 x = vcs.init()
 x.drawlogooff()
-- 
GitLab


From 3df8fa1fed90337e69a8510dc8cc9bff5218299c Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 08:59:22 -0700
Subject: [PATCH 155/196] mac passes now

---
 testing/vcsaddons/test_12_plot_one_leg_per_row.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
index 4f836c1d4..9e8f25ab9 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
@@ -1,5 +1,5 @@
 import os, sys, vcs, testing.regression as regression
-from vcsaddons import EzTemplates
+from vcsaddons import EzTemplate
 
 ## Initialize VCS
 x = vcs.init()
-- 
GitLab


From 541a590922b6332eb50d9f10a55ddfd19ba6e563 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 8 Jun 2016 13:43:07 -0400
Subject: [PATCH 156/196] falke8

---
 Packages/vcsaddons/Lib/polar.py | 25 +++++++++++++++++--------
 1 file changed, 17 insertions(+), 8 deletions(-)

diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py
index 622b0d3a0..900b34916 100644
--- a/Packages/vcsaddons/Lib/polar.py
+++ b/Packages/vcsaddons/Lib/polar.py
@@ -49,7 +49,8 @@ def convert_arrays(var, theta):
                         except AttributeError:
                             names.append(None)
                     else:
-                        raise ValueError("Array is wrong shape; expected 2d array of 2-long elements, got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1]))
+                        raise ValueError("Array is wrong shape; expected 2d array of 2-long elements,"
+                                         " got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1]))
                 else:
                     if len(arr) == 2:
                         # Might be just a pair
@@ -62,7 +63,8 @@ def convert_arrays(var, theta):
                     theta_group = []
                     for val in arr:
                         if len(val) != 2:
-                            raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples, got %s of %d elements." % (type(val).__name__, len(val)))
+                            raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples,"
+                                             " got %s of %d elements." % (type(val).__name__, len(val)))
                         mag_group.append(val[0])
                         theta_group.append(val[1])
                         names.append(None)
@@ -112,7 +114,8 @@ def convert_arrays(var, theta):
                     except AttributeError:
                         names.append(None)
             else:
-                raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array,"
+                                 " got %dd array." % len(var.shape))
 
         thetas = []
         if isinstance(theta, (list, tuple)):
@@ -126,7 +129,8 @@ def convert_arrays(var, theta):
             elif len(theta.shape) == 2:
                 thetas = [list(theta[i]) for i in range(theta.shape[0])]
             else:
-                raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape))
+                raise ValueError("Array is wrong shape; expected 1d array or 2d array,"
+                                 " got %dd array." % len(var.shape))
         if not names:
             names = [None] * len(var)
     return magnitudes, thetas, names
@@ -335,7 +339,8 @@ class Gpo(vcsaddons.core.VCSaddon):
             m_ticks.y = []
 
             if template.ylabel1.priority > 0:
-                to = self.text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation)
+                to = self.text_orientation_for_angle(self.magnitude_tick_angle,
+                                                     source=template.ylabel1.textorientation)
                 m_labels = self.create_text(template.ylabel1.texttable, to)
                 m_labels.x = []
                 m_labels.y = []
@@ -410,7 +415,9 @@ class Gpo(vcsaddons.core.VCSaddon):
                 y0 = center[1] + (ymul * radius * numpy.sin(angle))
                 y1 = center[1]
                 if t_labels is not None:
-                    label = self.create_text(template.xlabel1.texttable, self.text_orientation_for_angle(angle, source=template.xlabel1.textorientation))
+                    label = self.create_text(template.xlabel1.texttable,
+                                             self.text_orientation_for_angle(angle,
+                                                                             source=template.xlabel1.textorientation))
                     label.string = [theta_labels[t]]
                     label.x = [x0]
                     label.y = [y0]
@@ -481,7 +488,8 @@ class Gpo(vcsaddons.core.VCSaddon):
                 scale = theta_ticks
                 vals = theta_flat
 
-            indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1])) for i in range(len(scale) - 1)]
+            indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1]))
+                       for i in range(len(scale) - 1)]
             magnitudes = [mag_flat[inds] for inds in indices]
             thetas = [theta_flat[inds] for inds in indices]
             names = vcs.mklabels(scale, output="list")
@@ -498,7 +506,8 @@ class Gpo(vcsaddons.core.VCSaddon):
                 y.append(ymul * numpy.sin(t) * r + center[1])
 
             if template.legend.priority > 0 and name is not None:
-                lx, ly = template.legend.x1, template.legend.y1 + len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1)
+                y_offset = len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1)
+                lx, ly = template.legend.x1, template.legend.y1 + y_offset
                 x.append(lx)
                 y.append(ly)
                 labels.x.append(lx + .01)
-- 
GitLab


From b88ea73a81a0536f6219412a953ca775a2a89716 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Wed, 8 Jun 2016 13:44:49 -0400
Subject: [PATCH 157/196] flake8 tests

---
 .../vcsaddons/test_vcs_addons_convert_arrays.py   | 15 ++++++++-------
 .../test_vcs_addons_histogram_defaults.py         |  2 +-
 .../test_vcs_addons_histogram_inherit.py          |  2 +-
 testing/vcsaddons/test_vcs_addons_polar.py        |  2 +-
 .../vcsaddons/test_vcs_addons_polar_degrees.py    |  2 +-
 .../vcsaddons/test_vcs_addons_polar_diurnal.py    |  2 +-
 .../vcsaddons/test_vcs_addons_polar_inherit.py    |  2 +-
 .../vcsaddons/test_vcs_addons_polar_seasonal.py   |  2 +-
 .../test_vcs_addons_polar_semidiurnal.py          |  2 +-
 9 files changed, 16 insertions(+), 15 deletions(-)

diff --git a/testing/vcsaddons/test_vcs_addons_convert_arrays.py b/testing/vcsaddons/test_vcs_addons_convert_arrays.py
index c39bea513..6e784e10b 100644
--- a/testing/vcsaddons/test_vcs_addons_convert_arrays.py
+++ b/testing/vcsaddons/test_vcs_addons_convert_arrays.py
@@ -18,23 +18,24 @@ one_list_tuples = zip(magnitudes, thetas)
 one_list_grouped_tuples = [zip(magnitudes[:2], thetas[:2]), zip(magnitudes[2:], thetas[2:])]
 one_list_of_arrays = [numpy.array(zip(magnitudes[:2], thetas[:2])), numpy.array(zip(magnitudes[2:], thetas[2:]))]
 
+
 def compare(input, expected):
     result = vcsaddons.polar.convert_arrays(*input)
     print "Checking", result[0:2], "vs", expected
     assert result[0] == expected[0]
     assert result[1] == expected[1]
 
-grouped = ([magnitudes[:2], magnitudes[2:]],[thetas[:2], thetas[2:]])
+grouped = ([magnitudes[:2], magnitudes[2:]], [thetas[:2], thetas[2:]])
 
-compare((one_array, None), ([magnitudes],[thetas]))
-compare(two_arrays, ([magnitudes],[thetas]))
+compare((one_array, None), ([magnitudes], [thetas]))
+compare(two_arrays, ([magnitudes], [thetas]))
 compare(two_array_groups, grouped)
 three_d_expected = ([[1, 2], [3, 4]], [[5, 6], [7, 8]])
 compare((three_d_array, None), three_d_expected)
-compare(list_and_array, ([magnitudes],[thetas]))
-compare(two_lists, ([magnitudes],[thetas]))
-compare(lists_of_arrays, ([magnitudes],[thetas]))
-compare(array_and_list, ([magnitudes],[thetas]))
+compare(list_and_array, ([magnitudes], [thetas]))
+compare(two_lists, ([magnitudes], [thetas]))
+compare(lists_of_arrays, ([magnitudes], [thetas]))
+compare(array_and_list, ([magnitudes], [thetas]))
 compare((one_list_tuples, None), ([[i] for i in magnitudes], [[i] for i in thetas]))
 compare((one_list_grouped_tuples, None), grouped)
 compare((one_list_of_arrays, None), grouped)
diff --git a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
index 24af6757b..090aaf33d 100644
--- a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
+++ b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py
@@ -4,7 +4,7 @@ import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=regression.init()
+x = regression.init()
 
 numpy.random.seed(seed=12345)
 vals = numpy.random.random_sample(2000) * 100
diff --git a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
index c761c4e05..a977a68bb 100644
--- a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
+++ b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py
@@ -4,7 +4,7 @@ import testing.regression as regression
 import vcs, cdms2
 import vcsaddons, numpy
 
-x=regression.init()
+x = regression.init()
 
 cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc")
 clt = cdmsfile("clt")
diff --git a/testing/vcsaddons/test_vcs_addons_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py
index 5512d9d52..5fcdc6c95 100644
--- a/testing/vcsaddons/test_vcs_addons_polar.py
+++ b/testing/vcsaddons/test_vcs_addons_polar.py
@@ -4,7 +4,7 @@ import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=regression.init()
+x = regression.init()
 
 polar = vcsaddons.polar.Gpo()
 polar.markers = ["dot", "circle"]
diff --git a/testing/vcsaddons/test_vcs_addons_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
index 3727dad14..1a44b6b0e 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_degrees.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_degrees.py
@@ -4,7 +4,7 @@ import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=regression.init()
+x = regression.init()
 
 polar = vcsaddons.getpolar("degrees")
 polar.markers = ["dot", "circle"]
diff --git a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
index 927180e38..24a6f832b 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py
@@ -5,7 +5,7 @@ import vcsaddons, numpy
 import cdms2, cdutil, cdtime
 import testing.regression as regression
 
-x=regression.init()
+x = regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
 temp = f('t')
diff --git a/testing/vcsaddons/test_vcs_addons_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
index 4fc56138d..2eb10b7d8 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_inherit.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_inherit.py
@@ -4,7 +4,7 @@ import testing.regression as regression
 import vcs
 import vcsaddons, numpy
 
-x=regression.init()
+x = regression.init()
 
 gm = vcsaddons.polar.Gpo()
 gm.markers = ["dot", "circle"]
diff --git a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
index 6a6eafd9b..0f5693a35 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py
@@ -5,7 +5,7 @@ import vcs
 import vcsaddons, numpy, MV2
 import cdms2, cdutil, cdtime
 
-x=regression.init()
+x = regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 # Trim first few months and last month so we have even number of seasons
diff --git a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
index 3061e8a0d..1ce3f21ef 100644
--- a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
+++ b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py
@@ -5,7 +5,7 @@ import vcs
 import vcsaddons, numpy
 import cdms2, cdutil, cdtime
 
-x=regression.init()
+x = regression.init()
 
 f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc"))
 temp = f('t')
-- 
GitLab


From 1fce425ab5014bf661aedfc23d45b646098e9089 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 12:02:23 -0700
Subject: [PATCH 158/196] option to add suffix to baselines

---
 Packages/testing/regression.py | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index aa8efa96b..eeaf3bdf5 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = True):
+                       baseline=True, cleanup=True, update_baselines = True, suffix="_linux"):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
@@ -134,10 +134,11 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh
     print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold)
 
     if update_baselines:
+        bestFilename2=bestFilename[:-4]+suffix+".png"
         print "Update baselines is ON so we are assuming you know what you're doing"
-        print "Replacing baseline %s with new baseline from %s" % (bestFilename, fname)
+        print "Replacing baseline %s with new baseline from %s" % (bestFilename2, fname)
         import shutil
-        shutil.copy2(fname, bestFilename)
+        shutil.copy2(fname, bestFilename2)
 
     sp = fname.split(".")
     diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1]
-- 
GitLab


From e272435ea165167787e635acee68ef16df323502 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 14:07:41 -0700
Subject: [PATCH 159/196] made suffix one otherwise find_alternate won't find
 them

---
 Packages/testing/regression.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index eeaf3bdf5..961d1f75a 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = True, suffix="_linux"):
+                       baseline=True, cleanup=True, update_baselines = True, suffix="_1"):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
-- 
GitLab


From 42462a26051832015f5f46b1c4bb9e75e4cccbfc Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 8 Jun 2016 16:18:04 -0700
Subject: [PATCH 160/196] slowly fixing test suite

---
 Packages/testing/regression.py                          | 2 +-
 testing/vcs/test_vcs_matplotlib_colormap.py             | 7 ++++++-
 testing/vcsaddons/test_12_plot_one_leg_per_row_right.py | 3 ++-
 3 files changed, 9 insertions(+), 3 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 961d1f75a..8880c402f 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = True, suffix="_1"):
+                       baseline=True, cleanup=True, update_baselines = True, suffix="_2"):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py
index baf0e1bb0..03ec5e8dc 100644
--- a/testing/vcs/test_vcs_matplotlib_colormap.py
+++ b/testing/vcs/test_vcs_matplotlib_colormap.py
@@ -1,4 +1,9 @@
 import os, sys, cdms2, vcs, testing.regression as regression
+import matplotlib
+sp = matplotlib.__version__.split()
+if int(sp[0])*10+int(sp[1])<15:
+    # This only works with matplotlib 1.5 and greater
+    sys.exit()
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,4 +15,4 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
 canvas = regression.init()
 canvas.setcolormap(vcs.matplotlib2vcs("viridis"))
 canvas.plot(clt, bg=1)
-regression.run(canvas, "test_matplotlib_colormap.png")
\ No newline at end of file
+regression.run(canvas, "test_matplotlib_colormap.png")
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
index b57b76724..6e0e647bc 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
@@ -1,6 +1,7 @@
 import os, sys, testing.regression as regression
 
-import EzTemplate,vcs
+import vcs
+from vcsaddons import EzTemplate
 import cdms,EzTemplate,vcs,sys
 ## 12 plots 1 legend per row on the right
 ## Initialize VCS
-- 
GitLab


From 39da09b1d92984294b3676a6352d921afb6daf5b Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 9 Jun 2016 10:03:49 -0700
Subject: [PATCH 161/196] turning off baselines update

---
 Packages/testing/regression.py                          | 2 +-
 testing/vcs/test_vcs_matplotlib_colormap.py             | 2 +-
 testing/vcsaddons/test_12_plot_one_leg_per_row_right.py | 2 +-
 3 files changed, 3 insertions(+), 3 deletions(-)

diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py
index 8880c402f..b0b862993 100644
--- a/Packages/testing/regression.py
+++ b/Packages/testing/regression.py
@@ -83,7 +83,7 @@ def find_alternates(fname):
     return results
 
 def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
-                       baseline=True, cleanup=True, update_baselines = True, suffix="_2"):
+                       baseline=True, cleanup=True, update_baselines = False, suffix="_2"):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py
index 03ec5e8dc..b7ba251f5 100644
--- a/testing/vcs/test_vcs_matplotlib_colormap.py
+++ b/testing/vcs/test_vcs_matplotlib_colormap.py
@@ -1,6 +1,6 @@
 import os, sys, cdms2, vcs, testing.regression as regression
 import matplotlib
-sp = matplotlib.__version__.split()
+sp = matplotlib.__version__.split(".")
 if int(sp[0])*10+int(sp[1])<15:
     # This only works with matplotlib 1.5 and greater
     sys.exit()
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
index 6e0e647bc..8daf50456 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
@@ -2,7 +2,7 @@ import os, sys, testing.regression as regression
 
 import vcs
 from vcsaddons import EzTemplate
-import cdms,EzTemplate,vcs,sys
+import cdms,vcs,sys
 ## 12 plots 1 legend per row on the right
 ## Initialize VCS
 x = vcs.init()
-- 
GitLab


From ce4c96affea8e7da1f02baa3b5625ecaec87fa65 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 9 Jun 2016 10:36:31 -0700
Subject: [PATCH 162/196] added code to make sure we have conda in path

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 CMakeLists.txt                                           | 8 ++++++++
 2 files changed, 9 insertions(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 1c9ce4163..c587fed2d 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
+@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
 
 source activate @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
diff --git a/CMakeLists.txt b/CMakeLists.txt
index a9e2fb3f6..64c6b6e33 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -19,6 +19,14 @@ set(CMAKE_MODULE_PATH
   ${CMAKE_MODULE_PATH}
 )
 
+find_program(CONDA NAMES conda)
+
+if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" ) 
+    message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH")
+endif()
+
+message("[INFO] Found conda at: ${CONDA}")
+
 if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
   if (($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "yes") OR
       ($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "no"))
-- 
GitLab


From 4383c8be5a52028fdceaa77937c492b77f19c43e Mon Sep 17 00:00:00 2001
From: Bryce Sampson <sampson.bryce@yahoo.com>
Date: Thu, 9 Jun 2016 10:54:32 -0700
Subject: [PATCH 163/196] fixed bug with extend right not working of levels is
 a list

---
 Packages/vcs/vcs/VCS_validation_functions.py | 21 ++++++++++----------
 1 file changed, 11 insertions(+), 10 deletions(-)

diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index 7595d900e..11b76ee5d 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -1995,7 +1995,7 @@ def add_level_ext_2(self, ext_value):
         if isinstance(self.levels[0], list):  # remove from tuple of lists
             if self.levels[-1][1] > 9.e19:
                 self.levels.pop(-1)
-        if isinstance(self.levels, tuple):       # remove from list
+        if isinstance(self.levels, (tuple, list)):       # remove from list
             ret_tup = []
             for i in range(len(self.levels) - 1):
                 ret_tup.insert(i + 1, self.levels[i])
@@ -2009,15 +2009,16 @@ def add_level_ext_2(self, ext_value):
         return self.levels
 
     # We may need to add extnsion
-    if isinstance(self.levels, tuple):
-        self.levels = list(self.levels)
-    if isinstance(self.levels[-1], list):  # add to tuple of lists
-        if self.levels[-1][1] < 9.e19:
-            self.levels.append([self.levels[-1][1], 1e20])
-    else:
-        if self.levels[-1] < 9.e19:
-            self.levels.append(1.e20)
-    return self.levels
+    if isinstance(self.levels, (list, tuple)):
+        if isinstance(self.levels, tuple):
+            self.levels = list(self.levels)
+        if isinstance(self.levels[-1], list):  # add to tuple of lists
+            if self.levels[-1][1] < 9.e19:
+                self.levels.append([self.levels[-1][1], 1e20])
+        else:
+            if self.levels[-1] < 9.e19:
+                self.levels.append(1.e20)
+        return self.levels
 
 
 def _getext_1(self):
-- 
GitLab


From d0418738bba6412560d37f01c26c869ab04dc39c Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Fri, 10 Jun 2016 08:43:08 -0700
Subject: [PATCH 164/196] fix #1776

let user pass an axis argument, rather than always picking z level, that allows for non cf use
also pep8ed the file
---
 Packages/cdutil/Lib/vertical.py | 292 ++++++++++++++++++--------------
 testing/cdutil/test_vert.py     |  28 +--
 2 files changed, 179 insertions(+), 141 deletions(-)

diff --git a/Packages/cdutil/Lib/vertical.py b/Packages/cdutil/Lib/vertical.py
index 9d6df0ae8..167b7d4cd 100644
--- a/Packages/cdutil/Lib/vertical.py
+++ b/Packages/cdutil/Lib/vertical.py
@@ -4,17 +4,19 @@ import genutil
 import cdms2
 import numpy
 import cdat_info
-def reconstructPressureFromHybrid(ps,A,B,Po):
+
+
+def reconstructPressureFromHybrid(ps, A, B, Po):
     """
     Reconstruct the Pressure field on sigma levels, from the surface pressure
-    
+
     Input
     Ps   : Surface pressure
     A,B,Po: Hybrid Convertion Coefficients, such as: p=B.ps+A.Po
     Ps: surface pressure
     B,A are 1D : sigma levels
     Po and Ps must have same units
-    
+
     Output
     Pressure field
     Such as P=B*Ps+A*Po
@@ -23,200 +25,234 @@ def reconstructPressureFromHybrid(ps,A,B,Po):
     P=reconstructPressureFromHybrid(ps,A,B,Po)
     """
     # Compute the pressure for the sigma levels
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.reconstructPressureFromHybrid")
-    ps,B=genutil.grower(ps,B)
-    ps,A=genutil.grower(ps,A)
-    p=ps*B
-    p=p+A*Po
+    cdat_info.pingPCMDIdb(
+        "cdat",
+        "cdutil.vertical.reconstructPressureFromHybrid")
+    ps, B = genutil.grower(ps, B)
+    ps, A = genutil.grower(ps, A)
+    p = ps * B
+    p = p + A * Po
     p.setAxisList(ps.getAxisList())
-    p.id='P'
+    p.id = 'P'
     try:
-      p.units=ps.units
+        p.units = ps.units
     except:
-      pass
-    t=p.getTime()
+        pass
+    t = p.getTime()
     if not t is None:
-      p=p(order='tz...')
+        p = p(order='tz...')
     else:
-     p=p(order='z...')
+        p = p(order='z...')
     return p
-    
-def linearInterpolation(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None):
+
+
+def linearInterpolation(
+    A, I, levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000,
+                  30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None, axis='z'):
     """
     Linear interpolation
     to interpolate a field from some levels to another set of levels
     Value below "surface" are masked
-    
+
     Input
     A :      array to interpolate
     I :      interpolation field (usually Pressure or depth) from TOP (level 0) to BOTTOM (last level), i.e P value going up with each level
     levels : levels to interplate to (same units as I), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]
+    axis:    axis over which to do the linear interpolation, default is 'z', accepted: '1' '(myaxis)'
 
     I and levels must have same units
 
     Output
     array on new levels (levels)
-    
+
     Examples:
     A=interpolate(A,I,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
     """
-    
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.linearInterpolation")
+
+    cdat_info.pingPCMDIdb("cdat", "cdutil.vertical.linearInterpolation")
     try:
-        nlev=len(levels)  # Number of pressure levels
+        nlev = len(levels)  # Number of pressure levels
     except:
-        nlev=1  # if only one level len(levels) would breaks
-        levels=[levels,]
-    order=A.getOrder()
-    A=A(order='z...')
-    I=I(order='z...')
-    sh=list(I.shape)
-    nsigma=sh[0] #number of sigma levels
-    sh[0]=nlev
-    t=MV2.zeros(sh,typecode=MV2.float32)
-    sh2=I[0].shape
-    prev=-1
-    for ilev in range(nlev): # loop through pressure levels
+        nlev = 1  # if only one level len(levels) would breaks
+        levels = [levels, ]
+    order = A.getOrder()
+    A = A(order='%s...' % axis)
+    I = I(order='%s...' % axis)
+    sh = list(I.shape)
+    nsigma = sh[0]  # number of sigma levels
+    sh[0] = nlev
+    t = MV2.zeros(sh, typecode=MV2.float32)
+    sh2 = I[0].shape
+    prev = -1
+    for ilev in range(nlev):  # loop through pressure levels
         if status is not None:
-            prev=genutil.statusbar(ilev,nlev-1.,prev)
-        lev=levels[ilev] # get value for the level
-        Iabv=MV2.ones(sh2,MV2.float)
-        Aabv=-1*Iabv # Array on sigma level Above
-        Abel=-1*Iabv # Array on sigma level Below
-        Ibel=-1*Iabv # Pressure on sigma level Below
-        Iabv=-1*Iabv # Pressure on sigma level Above
-        Ieq=MV2.masked_equal(Iabv,-1) # Area where Pressure == levels
-        for i in range(1,nsigma): # loop from second sigma level to last one
-            a = MV2.greater_equal(I[i],  lev) # Where is the pressure greater than lev
-            b =    MV2.less_equal(I[i-1],lev) # Where is the pressure less than lev
+            prev = genutil.statusbar(ilev, nlev - 1., prev)
+        lev = levels[ilev]  # get value for the level
+        Iabv = MV2.ones(sh2, MV2.float)
+        Aabv = -1 * Iabv  # Array on sigma level Above
+        Abel = -1 * Iabv  # Array on sigma level Below
+        Ibel = -1 * Iabv  # Pressure on sigma level Below
+        Iabv = -1 * Iabv  # Pressure on sigma level Above
+        Ieq = MV2.masked_equal(Iabv, -1)  # Area where Pressure == levels
+        for i in range(1, nsigma):  # loop from second sigma level to last one
+            a = MV2.greater_equal(
+                I[i],
+                lev)  # Where is the pressure greater than lev
+            b = MV2.less_equal(
+                I[i - 1],
+                lev)  # Where is the pressure less than lev
             # Now looks if the pressure level is in between the 2 sigma levels
             # If yes, sets Iabv, Ibel and Aabv, Abel
-            a=MV2.logical_and(a,b)
-            Iabv=MV2.where(a,I[i],Iabv) # Pressure on sigma level Above
-            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
-            Ibel=MV2.where(a,I[i-1],Ibel) # Pressure on sigma level Below
-            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
-            Ieq= MV2.where(MV2.equal(I[i],lev),A[i],Ieq)
-
-        val=MV2.masked_where(MV2.equal(Ibel,-1.),numpy.ones(Ibel.shape)*lev) # set to missing value if no data below lev if there is
-        
-        tl=(val-Ibel)/(Iabv-Ibel)*(Aabv-Abel)+Abel # Interpolation
+            a = MV2.logical_and(a, b)
+            Iabv = MV2.where(a, I[i], Iabv)  # Pressure on sigma level Above
+            Aabv = MV2.where(a, A[i], Aabv)  # Array on sigma level Above
+            Ibel = MV2.where(
+                a,
+                I[i - 1],
+                Ibel)  # Pressure on sigma level Below
+            Abel = MV2.where(a, A[i - 1], Abel)  # Array on sigma level Below
+            Ieq = MV2.where(MV2.equal(I[i], lev), A[i], Ieq)
+
+        val = MV2.masked_where(
+            MV2.equal(Ibel, -1.), numpy.ones(Ibel.shape) * lev)
+                               # set to missing value if no data below lev if
+                               # there is
+
+        tl = (val - Ibel) / (Iabv - Ibel) * \
+            (Aabv - Abel) + Abel  # Interpolation
         if ((Ieq.mask is None) or (Ieq.mask is MV2.nomask)):
-            tl=Ieq
+            tl = Ieq
         else:
-            tl=MV2.where(1-Ieq.mask,Ieq,tl)
-        t[ilev]=tl.astype(MV2.float32)
+            tl = MV2.where(1 - Ieq.mask, Ieq, tl)
+        t[ilev] = tl.astype(MV2.float32)
 
-    ax=A.getAxisList()
-    autobnds=cdms2.getAutoBounds()
+    ax = A.getAxisList()
+    autobnds = cdms2.getAutoBounds()
     cdms2.setAutoBounds('off')
-    lvl=cdms2.createAxis(MV2.array(levels).filled())
+    lvl = cdms2.createAxis(MV2.array(levels).filled())
     cdms2.setAutoBounds(autobnds)
     try:
-        lvl.units=I.units
+        lvl.units = I.units
     except:
         pass
-    lvl.id='plev'
-    
+    lvl.id = 'plev'
+
     try:
-      t.units=I.units
+        t.units = I.units
     except:
-      pass
-  
-    ax[0]=lvl
+        pass
+
+    ax[0] = lvl
     t.setAxisList(ax)
-    t.id=A.id
+    t.id = A.id
     for att in A.listattributes():
-        setattr(t,att,getattr(A,att))
+        setattr(t, att, getattr(A, att))
     return t(order=order)
 
-def logLinearInterpolation(A,P,levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000],status=None):
+
+def logLinearInterpolation(
+    A, P, levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000,
+                  30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000], status=None, axis='z'):
     """
     Log-linear interpolation
     to convert a field from sigma levels to pressure levels
     Value below surface are masked
-    
+
     Input
-    A :    array on sigma levels
-    P :    pressure field from TOP (level 0) to BOTTOM (last level)
+    A :      array on sigma levels
+    P :      pressure field from TOP (level 0) to BOTTOM (last level)
     levels : pressure levels to interplate to (same units as P), default levels are:[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000]
+    axis:    axis over which to do the linear interpolation, default is 'z', accepted: '1' '(myaxis)'
 
     P and levels must have same units
 
     Output
     array on pressure levels (levels)
-    
+
     Examples:
     A=logLinearInterpolation(A,P),levels=[100000, 92500, 85000, 70000, 60000, 50000, 40000, 30000, 25000, 20000, 15000, 10000, 7000, 5000, 3000, 2000, 1000])
     """
-    
-    cdat_info.pingPCMDIdb("cdat","cdutil.vertical.logLinearInterpolation")
+
+    cdat_info.pingPCMDIdb("cdat", "cdutil.vertical.logLinearInterpolation")
     try:
-        nlev=len(levels)  # Number of pressure levels
+        nlev = len(levels)  # Number of pressure levels
     except:
-        nlev=1  # if only one level len(levels) would breaks
-        levels=[levels,]
-    order=A.getOrder()
-    A=A(order='z...')
-    P=P(order='z...')
-    sh=list(P.shape)
-    nsigma=sh[0] #number of sigma levels
-    sh[0]=nlev
-    t=MV2.zeros(sh,typecode=MV2.float32)
-    sh2=P[0].shape
-    prev=-1
-    for ilev in range(nlev): # loop through pressure levels
+        nlev = 1  # if only one level len(levels) would breaks
+        levels = [levels, ]
+    order = A.getOrder()
+    A = A(order='%s...' % axis)
+    P = P(order='%s...' % axis)
+    sh = list(P.shape)
+    nsigma = sh[0]  # number of sigma levels
+    sh[0] = nlev
+    t = MV2.zeros(sh, typecode=MV2.float32)
+    sh2 = P[0].shape
+    prev = -1
+    for ilev in range(nlev):  # loop through pressure levels
         if status is not None:
-            prev=genutil.statusbar(ilev,nlev-1.,prev)
-        lev=levels[ilev] # get value for the level
-        Pabv=MV2.ones(sh2,MV2.float)
-        Aabv=-1*Pabv # Array on sigma level Above
-        Abel=-1*Pabv # Array on sigma level Below
-        Pbel=-1*Pabv # Pressure on sigma level Below
-        Pabv=-1*Pabv # Pressure on sigma level Above
-        Peq=MV2.masked_equal(Pabv,-1) # Area where Pressure == levels
-        for i in range(1,nsigma): # loop from second sigma level to last one
-            a=MV2.greater_equal(P[i],  lev) # Where is the pressure greater than lev
-            b=   MV2.less_equal(P[i-1],lev) # Where is the pressure less than lev
+            prev = genutil.statusbar(ilev, nlev - 1., prev)
+        lev = levels[ilev]  # get value for the level
+        Pabv = MV2.ones(sh2, MV2.float)
+        Aabv = -1 * Pabv  # Array on sigma level Above
+        Abel = -1 * Pabv  # Array on sigma level Below
+        Pbel = -1 * Pabv  # Pressure on sigma level Below
+        Pabv = -1 * Pabv  # Pressure on sigma level Above
+        Peq = MV2.masked_equal(Pabv, -1)  # Area where Pressure == levels
+        for i in range(1, nsigma):  # loop from second sigma level to last one
+            a = MV2.greater_equal(
+                P[i],
+                lev)  # Where is the pressure greater than lev
+            b = MV2.less_equal(
+                P[i - 1],
+                lev)  # Where is the pressure less than lev
             # Now looks if the pressure level is in between the 2 sigma levels
             # If yes, sets Pabv, Pbel and Aabv, Abel
-            a=MV2.logical_and(a,b)
-            Pabv=MV2.where(a,P[i],Pabv) # Pressure on sigma level Above
-            Aabv=MV2.where(a,A[i],Aabv) # Array on sigma level Above
-            Pbel=MV2.where(a,P[i-1],Pbel) # Pressure on sigma level Below
-            Abel=MV2.where(a,A[i-1],Abel) # Array on sigma level Below
-            Peq= MV2.where(MV2.equal(P[i],lev),A[i],Peq)
-
-        val=MV2.masked_where(MV2.equal(Pbel,-1),numpy.ones(Pbel.shape)*lev) # set to missing value if no data below lev if there is
-        
-        tl=MV2.log(val/Pbel)/MV2.log(Pabv/Pbel)*(Aabv-Abel)+Abel # Interpolation
+            a = MV2.logical_and(a, b)
+            Pabv = MV2.where(a, P[i], Pabv)  # Pressure on sigma level Above
+            Aabv = MV2.where(a, A[i], Aabv)  # Array on sigma level Above
+            Pbel = MV2.where(
+                a,
+                P[i - 1],
+                Pbel)  # Pressure on sigma level Below
+            Abel = MV2.where(a, A[i - 1], Abel)  # Array on sigma level Below
+            Peq = MV2.where(MV2.equal(P[i], lev), A[i], Peq)
+
+        val = MV2.masked_where(
+            MV2.equal(Pbel, -1), numpy.ones(Pbel.shape) * lev)
+                               # set to missing value if no data below lev if
+                               # there is
+
+        tl = MV2.log(
+            val / Pbel) / MV2.log(
+                Pabv / Pbel) * (
+            Aabv - Abel) + Abel  # Interpolation
         if ((Peq.mask is None) or (Peq.mask is MV2.nomask)):
-            tl=Peq
+            tl = Peq
         else:
-            tl=MV2.where(1-Peq.mask,Peq,tl)
-        t[ilev]=tl.astype(MV2.float32)
-        
-    ax=A.getAxisList()
-    autobnds=cdms2.getAutoBounds()
+            tl = MV2.where(1 - Peq.mask, Peq, tl)
+        t[ilev] = tl.astype(MV2.float32)
+
+    ax = A.getAxisList()
+    autobnds = cdms2.getAutoBounds()
     cdms2.setAutoBounds('off')
-    lvl=cdms2.createAxis(MV2.array(levels).filled())
+    lvl = cdms2.createAxis(MV2.array(levels).filled())
     cdms2.setAutoBounds(autobnds)
     try:
-        lvl.units=P.units
+        lvl.units = P.units
     except:
         pass
-    lvl.id='plev'
-    
+    lvl.id = 'plev'
+
     try:
-      t.units=P.units
+        t.units = P.units
     except:
-      pass
-  
-    ax[0]=lvl
+        pass
+
+    ax[0] = lvl
     t.setAxisList(ax)
-    t.id=A.id
+    t.id = A.id
     for att in A.listattributes():
-        setattr(t,att,getattr(A,att))
+        setattr(t, att, getattr(A, att))
     return t(order=order)
-    
-sigma2Pressure=logLinearInterpolation
+
+sigma2Pressure = logLinearInterpolation
diff --git a/testing/cdutil/test_vert.py b/testing/cdutil/test_vert.py
index 591ac6130..54f9c625a 100644
--- a/testing/cdutil/test_vert.py
+++ b/testing/cdutil/test_vert.py
@@ -2,22 +2,24 @@
 # Adapted for numpy/ma/cdms2 by convertcdms.py
 
 
-import cdutil,cdat_info
+import cdutil
+import cdat_info
+import numpy
 
 import cdms2
 import os
 bg = 0
 
-f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(),'vertical.nc'))
-Ps=f('PS')
-U=f('U')
-B=f('hybm')
-A=f('hyam')
-Po=f('variable_2')
-P=cdutil.reconstructPressureFromHybrid(Ps,A,B,Po)
+f = cdms2.open(os.path.join(cdat_info.get_sampledata_path(), 'vertical.nc'))
+Ps = f('PS')
+U = f('U')
+B = f('hybm')
+A = f('hyam')
+Po = f('variable_2')
+P = cdutil.reconstructPressureFromHybrid(Ps, A, B, Po)
 
-U2=cdutil.logLinearInterpolation(U,P)
-
-#x=vcs.init()
-#x.plot(U2,bg=bg)
-#raw_input()
+U2 = cdutil.logLinearInterpolation(U, P)
+U2b = cdutil.logLinearInterpolation(U, P, axis='0')
+assert(numpy.ma.allclose(U2, U2b))
+U2b = cdutil.logLinearInterpolation(U, P, axis='(lev)')
+assert(numpy.ma.allclose(U2, U2b))
-- 
GitLab


From e80d6866dad16a338e547e29382249f79f53efee Mon Sep 17 00:00:00 2001
From: Bryce Sampson <sampson.bryce@yahoo.com>
Date: Fri, 10 Jun 2016 11:50:42 -0700
Subject: [PATCH 165/196] added test for extends and attributes attribute for
 projection

---
 Packages/vcs/vcs/projection.py  | 9 +++++++--
 testing/vcs/CMakeLists.txt      | 6 ++++--
 testing/vcs/test_vcs_extends.py | 0
 3 files changed, 11 insertions(+), 4 deletions(-)
 create mode 100644 testing/vcs/test_vcs_extends.py

diff --git a/Packages/vcs/vcs/projection.py b/Packages/vcs/vcs/projection.py
index 6b19c5e62..cabf461eb 100644
--- a/Packages/vcs/vcs/projection.py
+++ b/Packages/vcs/vcs/projection.py
@@ -327,6 +327,12 @@ class Proj(object):
         print 'name =', self.name
         print 'type =', self.type
         # print 'parameters =',self.parameters
+
+        for att in self.attributes:
+            print att, '=', getattr(self, att)
+
+    @property
+    def attributes(self):
         p = []
         if self._type in [3, 4]:
             p.append('smajor')
@@ -455,8 +461,7 @@ class Proj(object):
             p.append('centerlatitude')
             p.append('falseeasting')
             p.append('falsenorthing')
-        for att in p:
-            print att, '=', getattr(self, att)
+        return p
 
     ##########################################################################
     #                                                                           #
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 99244cd55..86e8cc3ae 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -29,8 +29,10 @@ foreach(lat_0 45 90)
     ${lat_0}
     )
 endforeach()
-
-
+cdat_add_test(test_vcs_extends
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_extends.py
+)
 cdat_add_test(test_vcs_create_get
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_create_get.py
diff --git a/testing/vcs/test_vcs_extends.py b/testing/vcs/test_vcs_extends.py
new file mode 100644
index 000000000..e69de29bb
-- 
GitLab


From 85b48f851b5c75d13df26f49c2caed6028925d7d Mon Sep 17 00:00:00 2001
From: Bryce Sampson <sampson.bryce@yahoo.com>
Date: Fri, 10 Jun 2016 11:55:31 -0700
Subject: [PATCH 166/196] found file since all code disappeared

---
 testing/vcs/test_vcs_extends.py | 29 +++++++++++++++++++++++++++++
 1 file changed, 29 insertions(+)

diff --git a/testing/vcs/test_vcs_extends.py b/testing/vcs/test_vcs_extends.py
index e69de29bb..4106cecd6 100644
--- a/testing/vcs/test_vcs_extends.py
+++ b/testing/vcs/test_vcs_extends.py
@@ -0,0 +1,29 @@
+import vcs
+import numpy
+
+box = vcs.createboxfill()
+
+box.ext_1 = True
+assert numpy.allclose(box.levels, [1e20] * 2)
+
+box.ext_2 = True
+assert numpy.allclose(box.levels, [1e20] * 2)
+
+box.levels = [1, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
+assert box.ext_1 == False
+assert box.ext_1 == False
+
+box.ext_1 = True
+assert box.levels[0] < -9e19
+
+box.ext_2 = True
+assert box.levels[-1] > 9e19
+
+box.ext_1 = False
+assert box.levels[0] > -9e19
+
+box.ext_2 = False
+assert box.levels[-1] < 9e19
+
+
+
-- 
GitLab


From 64f378e20839dde709c8668d09acae7a415cc857 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Fri, 10 Jun 2016 21:44:01 -0400
Subject: [PATCH 167/196] Fixed style issues

---
 Packages/vcs/vcs/Canvas.py | 1 -
 Packages/vcs/vcs/dv3d.py   | 1 -
 2 files changed, 2 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index eb774505b..8ef04cd8e 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -4920,7 +4920,6 @@ Options:::
             H = tmp
         return W, H
 
-
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
                    units='inches', textAsPaths=True):
         """
diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py
index 2afae29f2..4a30aed8e 100644
--- a/Packages/vcs/vcs/dv3d.py
+++ b/Packages/vcs/vcs/dv3d.py
@@ -158,7 +158,6 @@ class Gfdv3d(object):
         self.plot_attributes['name'] = self.g_name
         self.plot_attributes['template'] = Gfdv3d_name
 
-
     def setProvenanceHandler(self, provenanceHandler):
         self.provenanceHandler = provenanceHandler
 
-- 
GitLab


From fb306a36006b7406460649306d2edb339939a7da Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Fri, 10 Jun 2016 11:05:39 -0400
Subject: [PATCH 168/196] Fixed failing tests

---
 Packages/vcs/vcs/Canvas.py         | 12 ++++++------
 Packages/vcsaddons/Lib/__init__.py | 19 +++++++++++--------
 2 files changed, 17 insertions(+), 14 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 8ef04cd8e..903e22a7a 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -2450,13 +2450,13 @@ Options:::
     def __plot(self, arglist, keyargs):
         import vcsaddons
 
-            # This routine has five arguments in arglist from _determine_arg_list
-            # It adds one for bg and passes those on to Canvas.plot as its sixth
-            # arguments.
+        # This routine has five arguments in arglist from _determine_arg_list
+        # It adds one for bg and passes those on to Canvas.plot as its sixth
+        # arguments.
 
-            # First of all let's remember which elets we have before comin in here
-            # so that anything added (temp objects) can be removed at clear
-            # time
+        # First of all let's remember which elets we have before comin in here
+        # so that anything added (temp objects) can be removed at clear
+        # time
         original_elts = {}
         new_elts = {}
         for k in vcs.elements.keys():
diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py
index c38e5b29e..7136d36aa 100644
--- a/Packages/vcsaddons/Lib/__init__.py
+++ b/Packages/vcsaddons/Lib/__init__.py
@@ -20,16 +20,20 @@ def createusercontinents(name=None,source="default",x=None,template=None):
 
 
 def createpolar(name=None, source="default", x=None, template=None):
+    if "polar_oned" not in gms:
+        init_polar()
     return polar.Gpo(name, source=source, x=x, template=template)
 
 
 def getpolar(name=None):
+    if "polar_oned" not in gms:
+        init_polar()
     if name in gms["polar_oned"]:
         return gms["polar_oned"][name]
     raise KeyError("No Polar GM exists with name '%s'" % name)
 
 
-if "polar_oned" not in gms:
+def init_polar():
     # Create nice polar template
     try:
         t = vcs.createtemplate("polar_oned")
@@ -58,14 +62,14 @@ if "polar_oned" not in gms:
         # Template already exists
         pass
     # Create some nice default polar GMs
-    degree_polar = createpolar("degrees", template="polar_oned")
+    degree_polar = polar.Gpo("degrees", template="polar_oned")
     degree_polar.datawc_x1 = 0
     degree_polar.datawc_x2 = 360
     degree_polar.xticlabels1 = {
         i: str(i) for i in range(0, 360, 45)
     }
 
-    clock_24 = createpolar("diurnal", template="polar_oned")
+    clock_24 = polar.Gpo("diurnal", template="polar_oned")
     clock_24.datawc_x1 = 0
     clock_24.datawc_x2 = 24
     clock_24.clockwise = True
@@ -75,8 +79,7 @@ if "polar_oned" not in gms:
         i: str(i) for i in range(0, 24, 3)
     }
 
-
-    clock_24_meridiem = createpolar("diurnal_12_hour", source="diurnal", template="polar_oned")
+    clock_24_meridiem = polar.Gpo("diurnal_12_hour", source="diurnal", template="polar_oned")
     clock_24_meridiem.xticlabels1 = {
         0: "12 AM",
         3: "3 AM",
@@ -88,7 +91,7 @@ if "polar_oned" not in gms:
         21: "9 PM"
     }
 
-    clock_12 = createpolar("semidiurnal", source="diurnal", template="polar_oned")
+    clock_12 = polar.Gpo("semidiurnal", source="diurnal", template="polar_oned")
     clock_12.datawc_x2 = 12
     clock_12.xticlabels1 = {
         i: str(i) for i in range(3, 13, 3)
@@ -96,7 +99,7 @@ if "polar_oned" not in gms:
     # 3 on the right
     clock_12.theta_offset = -3
 
-    annual_cycle = createpolar("annual_cycle", template="polar_oned")
+    annual_cycle = polar.Gpo("annual_cycle", template="polar_oned")
     annual_cycle.datawc_x1 = 1
     annual_cycle.datawc_x2 = 13
     annual_cycle.clockwise = True
@@ -117,7 +120,7 @@ if "polar_oned" not in gms:
     # Put December on the top
     annual_cycle.theta_offset = -2
 
-    seasonal = createpolar("seasonal", template="polar_oned")
+    seasonal = polar.Gpo("seasonal", template="polar_oned")
     seasonal.datawc_x1 = 0
     seasonal.datawc_x2 = 4
     seasonal.xticlabels1 = {0: "DJF", 1: "MAM", 2: "JJA", 3: "SON"}
-- 
GitLab


From c0ddfc13049d04ea48e81e49b753b3df28be1e28 Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Sat, 11 Jun 2016 12:47:23 -0400
Subject: [PATCH 169/196] Fixed another failing test

---
 testing/vcs/test_vcs_read_old_scr.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/testing/vcs/test_vcs_read_old_scr.py b/testing/vcs/test_vcs_read_old_scr.py
index 0a61df61e..4ae04d849 100644
--- a/testing/vcs/test_vcs_read_old_scr.py
+++ b/testing/vcs/test_vcs_read_old_scr.py
@@ -71,3 +71,4 @@ else:
     assert(gm.ymtics1=="lat5")
     assert(gm.fillareastyle == "solid")
     assert(gm.fillareacolors == [30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 35, 36])
+sys.exit(0)
-- 
GitLab


From 97724ba1f34b740532d65d38eeac3be94def7336 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 13 Jun 2016 12:29:35 -0700
Subject: [PATCH 170/196] added command to clean conda env when we are done
 with it

---
 CMakeLists.txt | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/CMakeLists.txt b/CMakeLists.txt
index 64c6b6e33..fb0d3b667 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -284,4 +284,13 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_con
     @ONLY
     )
 
+configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/clean_cdat_from_conda.bash.in
+    ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash
+    @ONLY
+    )
 add_subdirectory(testing)
+
+# Clean conda env
+add_custom_target(clean-conda
+    COMMAND ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash
+    )
-- 
GitLab


From 6811f8b9042d945a72e4d0dc87b19ff4796b414b Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 13 Jun 2016 12:35:36 -0700
Subject: [PATCH 171/196] added missing clean file

---
 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in | 5 +++++
 1 file changed, 5 insertions(+)
 create mode 100644 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in

diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in
new file mode 100644
index 000000000..dc5730546
--- /dev/null
+++ b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in
@@ -0,0 +1,5 @@
+
+#!/usr/bin/env bash
+
+source activate root
+@CONDA@ remove --all -y -n @CONDA_ENVIRONMENT_NAME@
-- 
GitLab


From 562b8b969814c23e90087c146a2aa6f4c8dc4349 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 13 Jun 2016 14:41:22 -0700
Subject: [PATCH 172/196] chmod clean script

---
 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in | 0
 1 file changed, 0 insertions(+), 0 deletions(-)
 mode change 100644 => 100755 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in

diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in
old mode 100644
new mode 100755
-- 
GitLab


From 567bffd80547eabffb94a7ecc070d04575c86521 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 14 Jun 2016 07:26:25 -0700
Subject: [PATCH 173/196] we used to have two path for rendering a frame

the one saving pngs was broken and not preserving pngs
two paths are now unified
this also fixes zoom on animations.
fix #1845
---
 Packages/vcs/vcs/VTKAnimate.py | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/Packages/vcs/vcs/VTKAnimate.py b/Packages/vcs/vcs/VTKAnimate.py
index fe83c6b2b..610c96760 100644
--- a/Packages/vcs/vcs/VTKAnimate.py
+++ b/Packages/vcs/vcs/VTKAnimate.py
@@ -134,7 +134,7 @@ class VTKAnimationPlayback(animate_helper.AnimationPlayback):
             self.controller.draw_frame(
                 allow_static=False,
                 render_offscreen=False,
-                main_window_png=False)
+                main_window_png=True)
 
             self.controller.frame_num += 1
             if self.controller.frame_num >= self.controller.number_of_frames():
@@ -280,8 +280,8 @@ class VTKAnimate(animate_helper.AnimationController):
         be.showGUI()
         be.renWin.Render()
 
-    def draw_frame(self, frame_num=None, render_offscreen=True,
-                   allow_static=True, main_window_png=False):
+    def draw_frame(self, frame_num=None, render_offscreen=False,
+                   allow_static=False, main_window_png=True):
         """
         Draws a frame on the canvas
           frame_num: Which frame to draw- defaults to self.frame_num
@@ -295,8 +295,7 @@ class VTKAnimate(animate_helper.AnimationController):
         else:
             self.frame_num = frame_num
 
-        if render_offscreen or (
-                allow_static and len(self.animation_files) == self.number_of_frames()):
+        if len(self.animation_files) == self.number_of_frames():
             # Attempt to extract the renderers and place them onto the create
             # thread
             self.extract_renderers()
@@ -346,4 +345,5 @@ class VTKAnimate(animate_helper.AnimationController):
         self.draw_frame(
             frame_num=frame,
             allow_static=False,
-            render_offscreen=False)
+            render_offscreen=False,
+            main_window_png=True)
-- 
GitLab


From 24922f7fbdf64c33bb1debe271ae920e69bb4f4a Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 15 Jun 2016 07:10:05 -0700
Subject: [PATCH 174/196] making @chaoshpere2112 happy, generating pngs only
 for save or zoomed pots

---
 Packages/vcs/vcs/VTKAnimate.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/Packages/vcs/vcs/VTKAnimate.py b/Packages/vcs/vcs/VTKAnimate.py
index 610c96760..76079826f 100644
--- a/Packages/vcs/vcs/VTKAnimate.py
+++ b/Packages/vcs/vcs/VTKAnimate.py
@@ -134,7 +134,7 @@ class VTKAnimationPlayback(animate_helper.AnimationPlayback):
             self.controller.draw_frame(
                 allow_static=False,
                 render_offscreen=False,
-                main_window_png=True)
+                main_window_png=False)
 
             self.controller.frame_num += 1
             if self.controller.frame_num >= self.controller.number_of_frames():
@@ -317,7 +317,7 @@ class VTKAnimate(animate_helper.AnimationController):
 
             self.vcs_self.backend.renWin.Render()
 
-            if main_window_png:
+            if main_window_png or self.playback_params.zoom_factor != 1:
                 png_name = self.create_thread.get_frame_name(self.frame_num)
                 self.vcs_self.png(png_name)
                 self.animation_files = sorted(
@@ -346,4 +346,4 @@ class VTKAnimate(animate_helper.AnimationController):
             frame_num=frame,
             allow_static=False,
             render_offscreen=False,
-            main_window_png=True)
+            main_window_png=False)
-- 
GitLab


From 721147778d60e463609053613e9f04553b06b8db Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Fri, 10 Jun 2016 14:21:31 -0400
Subject: [PATCH 175/196] BUG #1947: isofill does not handle out of bounds
 levels correctly.

When smallest level is bigger than min scalar value or biggest level
is smaller than max scalar value isofill creates the wrong image.

Also, out of range (white color) was shown black.
---
 Packages/vcs/vcs/vcs2vtk.py                | 25 +++++++++++++---------
 Packages/vcs/vcs/vcsvtk/isofillpipeline.py |  8 ++++++-
 testing/vcs/CMakeLists.txt                 | 11 ++++++++++
 testing/vcs/test_vcs_isofill_levels.py     | 19 ++++++++++++++++
 4 files changed, 52 insertions(+), 11 deletions(-)
 create mode 100644 testing/vcs/test_vcs_isofill_levels.py

diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index d07ca44cc..1334e361d 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -94,15 +94,14 @@ def setArray(grid, array, arrayName, isCellData, isScalars):
 
 
 def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
-    # Ok now looking
     msk = data.mask
     mapper = None
     if msk is not numpy.ma.nomask and not numpy.allclose(msk, False):
         if actorColor is not None:
-            flatIMask = msk.astype(numpy.int).flat
+            flatIMask = msk.astype(numpy.double).flat
             if grid.IsA("vtkStructuredGrid"):
                 grid2 = vtk.vtkStructuredGrid()
-                vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep)
+                vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE)
                 attributes2 = grid2.GetCellData() if cellData else grid2.GetPointData()
             else:
                 grid2 = vtk.vtkUnstructuredGrid()
@@ -114,26 +113,32 @@ def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True):
                     attributes = grid.GetPointData()
                 if (attributes.GetPedigreeIds()):
                     attributes2.SetPedigreeIds(attributes.GetPedigreeIds())
-                    vtkmask = vtk.vtkIntArray()
+                    pedigreeId = attributes2.GetPedigreeIds()
+                    vtkmask = vtk.vtkDoubleArray()
                     vtkmask.SetNumberOfTuples(attributes2.GetPedigreeIds().GetNumberOfTuples())
+                    for i in range(0, vtkmask.GetNumberOfTuples()):
+                        vtkmask.SetValue(i, flatIMask[pedigreeId.GetValue(i)])
                 else:
                     # the unstructured grid is not wrapped
-                    vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep)
+                    vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE)
             vtkmask.SetName("scalar")
             attributes2.RemoveArray(vtk.vtkDataSetAttributes.GhostArrayName())
             attributes2.SetScalars(vtkmask)
             grid2.CopyStructure(grid)
-            setArray(grid2, flatIMask, "scalar", isCellData=cellData,
-                     isScalars=True)
             geoFilter = vtk.vtkDataSetSurfaceFilter()
             lut = vtk.vtkLookupTable()
             r, g, b, a = actorColor
-            lut.SetNumberOfTableValues(2)
             geoFilter.SetInputData(grid2)
             if not cellData:
-                lut.SetTableValue(0, r / 100., g / 100., b / 100., a / 100.)
-                lut.SetTableValue(1, r / 100., g / 100., b / 100., a / 100.)
+                pointToCell = vtk.vtkPointDataToCellData()
+                pointToCell.SetInputConnection(geoFilter.GetOutputPort())
+                geoFilter = pointToCell
+                lut.SetNumberOfTableValues(256)
+                lut.SetTableValue(0, 1., 1., 1., 1.)
+                for i in range(1, 256):
+                    lut.SetTableValue(i, r / 100., g / 100., b / 100., a / 100.)
             else:
+                lut.SetNumberOfTableValues(2)
                 lut.SetTableValue(0, r / 100., g / 100., b / 100., 0.)
                 lut.SetTableValue(1, r / 100., g / 100., b / 100., 1.)
             geoFilter.Update()
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index 887c6158c..273376c09 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -67,7 +67,13 @@ class IsofillPipeline(Pipeline2D):
                     lut.SetTableValue(j, 1., 1., 1., 0.)
             luts.append([lut, [0, len(l) - 1, True]])
             mapper.SetLookupTable(lut)
-            mapper.SetScalarRange(0, len(l) - 1)
+            minRange = 0
+            maxRange = len(l) - 1
+            if (i == 0 and self._scalarRange[0] < l[0]):
+                # band 0 is from self._scalarRange[0] to l[0]
+                # we don't show band 0
+                minRange += 1
+            mapper.SetScalarRange(minRange, maxRange)
             mapper.SetScalarModeToUseCellData()
             mappers.append(mapper)
 
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 99244cd55..cc82bf047 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -876,6 +876,17 @@ cdat_add_test(test_vcs_settings_color_name_rgba
     --bigvalues
     "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png"
     )
+
+   foreach(level 0 1 2)
+     cdat_add_test(test_vcs_isofill_level${level}
+       "${PYTHON_EXECUTABLE}"
+       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_levels.py
+       "${BASELINE_DIR}/test_vcs_isofill_level${level}.png"
+       "${UVCDAT_GIT_TESTDATA_DIR}/data/HadSST1870to99.nc"
+       ${level}
+       )
+   endforeach()
+
   cdat_add_test(test_vcs_issue_960_labels
     "${PYTHON_EXECUTABLE}"
     ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py
diff --git a/testing/vcs/test_vcs_isofill_levels.py b/testing/vcs/test_vcs_isofill_levels.py
new file mode 100644
index 000000000..912178148
--- /dev/null
+++ b/testing/vcs/test_vcs_isofill_levels.py
@@ -0,0 +1,19 @@
+import cdms2
+import os
+import sys
+import testing.regression as regression
+import vcs
+
+data = sys.argv[2]
+level = sys.argv[3]
+levels = {'0': range(-5,36,5),
+          '1': [-1000, -15, 35],
+          '2': [-300, -15, 0, 15, 25]}
+
+x=regression.init(bg=1)
+f=cdms2.open(data)
+s=f("sst")
+iso=x.createisofill()
+iso.levels=levels[level]
+x.plot(s,iso)
+regression.run(x, "test_vcs_isofill_level%s.png"%level)
-- 
GitLab


From a2b2ddc881d9971ed41aaf35fe5d0bddfb085459 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 15 Jun 2016 16:21:21 -0700
Subject: [PATCH 176/196] also look for activate in case it is not in PATH but
 is in some default CMake path

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 CMakeLists.txt                                           | 4 ++++
 2 files changed, 5 insertions(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index c587fed2d..ff3ccdc6e 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -2,7 +2,7 @@
 
 @CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
 
-source activate @CONDA_ENVIRONMENT_NAME@
+source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
     cd @cdat_SOURCE_DIR@/Packages/${pkg}
     rm -rf build
diff --git a/CMakeLists.txt b/CMakeLists.txt
index fb0d3b667..bd7d1fa59 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -25,6 +25,10 @@ if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" )
     message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH")
 endif()
 
+find_program(ACTIVATE NAMES activate)
+if ( ${ACTIVATE} STREQUAL "ACTIVATE-NOTFOUND" ) 
+    message(FATAL_ERROR "Could not locate activate, please make sure conda is installed and in your PATH")
+endif()
 message("[INFO] Found conda at: ${CONDA}")
 
 if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG})
-- 
GitLab


From d9b8f6883ff2dcd9c78619463609e7926491b754 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 16 Jun 2016 15:47:24 -0700
Subject: [PATCH 177/196] also do a copy into new env

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index ff3ccdc6e..29d18d4d5 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls
+@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
 
 source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-- 
GitLab


From 4f07961bfc2879861ed6b667f45ec9cbd420a10e Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Fri, 17 Jun 2016 16:53:44 -0400
Subject: [PATCH 178/196] Update API for VTK update.

---
 Packages/vcs/vcs/vcs2vtk.py               | 10 +++++-----
 Packages/vcs/vcs/vcsvtk/vectorpipeline.py |  8 ++++----
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index 1334e361d..90a764d0b 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -647,9 +647,9 @@ def projectArray(w, projection, wc, geo=None):
 
     for i in range(0, w.GetNumberOfTuples()):
         tuple = [0, 0, 0]
-        w.GetTupleValue(i, tuple)
+        w.GetTypedTuple(i, tuple)
         geo.TransformPoint(tuple, tuple)
-        w.SetTupleValue(i, tuple)
+        w.SetTypedTuple(i, tuple)
 
 
 # Geo projection
@@ -1296,9 +1296,9 @@ def prepFillarea(renWin, farea, cmap=None):
             if opacity is not None:
                 color[-1] = opacity
             color = [int(C / 100. * 255) for C in color]
-            colors.SetTupleValue(cellId, color)
+            colors.SetTypedTuple(cellId, color)
         else:
-            color_arr.SetTupleValue(cellId, [255, 255, 255, 0])
+            color_arr.SetTypedTuple(cellId, [255, 255, 255, 0])
 
         if st != "solid":
             # Patterns/hatches support
@@ -1706,7 +1706,7 @@ def prepLine(renWin, line, cmap=None):
                     pts.InsertNextPoint(tmpx, tmpy, 0.)
                     n2 += 1
         for j in range(n2):
-            colors.InsertNextTupleValue(vtk_color)
+            colors.InsertNextTypedTuple(vtk_color)
             l = vtk.vtkLine()
             l.GetPointIds().SetId(0, j + point_offset)
             l.GetPointIds().SetId(1, j + point_offset + 1)
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index c471a6fa9..8c09596ed 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -40,15 +40,15 @@ class VectorPipeline(Pipeline2D):
         if self._vtkGeoTransform is not None:
             newv = vtk.vtkDoubleArray()
             newv.SetNumberOfComponents(3)
-            newv.InsertTupleValue(0, [lon.min(), lat.min(), 0])
-            newv.InsertTupleValue(1, [lon.max(), lat.max(), 0])
+            newv.InsertTypedTuple(0, [lon.min(), lat.min(), 0])
+            newv.InsertTypedTuple(1, [lon.max(), lat.max(), 0])
 
             vcs2vtk.projectArray(newv, projection, self._vtkDataSetBounds)
             dimMin = [0, 0, 0]
             dimMax = [0, 0, 0]
 
-            newv.GetTupleValue(0, dimMin)
-            newv.GetTupleValue(1, dimMax)
+            newv.GetTypedTuple(0, dimMin)
+            newv.GetTypedTuple(1, dimMax)
 
             maxDimX = max(dimMin[0], dimMax[0])
             maxDimY = max(dimMin[1], dimMax[1])
-- 
GitLab


From 29356770801c635ea370edf69ecdee898fb2f620 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 20 Jun 2016 12:00:36 -0700
Subject: [PATCH 179/196] forcing proj4 4.9.2

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index 29d18d4d5..a00fce884 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
+@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
 
 source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-- 
GitLab


From 8e7d4fbc6593ed745b2a6e7733ea0b2183ff71a2 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 20 Jun 2016 15:18:41 -0700
Subject: [PATCH 180/196] needed to duplicate yr fully in case the year starts
 in October for example

---
 Packages/cdutil/Lib/times.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdutil/Lib/times.py b/Packages/cdutil/Lib/times.py
index 4b4c9de7c..fdbc9ba8e 100644
--- a/Packages/cdutil/Lib/times.py
+++ b/Packages/cdutil/Lib/times.py
@@ -118,7 +118,7 @@ def getMonthIndex(my_str):
        # end of for mon in mon_list:
            
    yr = 'JFMAMJJASOND'
-   yrs = yr+yr[:6]
+   yrs = yr+yr
    #
    result = string.find(yrs, my_str)
    if result == -1: return []
-- 
GitLab


From b6308ccebc8896c63c950628fff7a12138f966da Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Wed, 22 Jun 2016 08:05:09 -0700
Subject: [PATCH 181/196] changed versioning on vtk no need for forcing version
 any longer

---
 CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
index a00fce884..eec135fcb 100755
--- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
+++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in
@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
+@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@  hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy
 
 source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@
 for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do
-- 
GitLab


From ab9e22a45e3746b54a573ee76a27608d004376ca Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 23 Jun 2016 09:54:21 -0700
Subject: [PATCH 182/196] missing parenthesis

---
 Packages/cdms2/Lib/axis.py     | 2 +-
 Packages/cdms2/Lib/cdmsNode.py | 5 ++---
 2 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index f6f255f25..c0a55b664 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -648,7 +648,7 @@ class AbstractAxis(CdmsObj):
         self._doubledata_ = None
 
     def __str__(self):
-        return "\n".join(self.listall() + "\n"
+        return "\n".join(self.listall()) + "\n"
 
     __repr__=__str__
 
diff --git a/Packages/cdms2/Lib/cdmsNode.py b/Packages/cdms2/Lib/cdmsNode.py
index 3e34f34f7..bec8f6c01 100644
--- a/Packages/cdms2/Lib/cdmsNode.py
+++ b/Packages/cdms2/Lib/cdmsNode.py
@@ -1041,9 +1041,8 @@ class AttrNode(CdmsNode):
 
     def __init__(self, name, value=None):
         CdmsNode.__init__(self, "attr")
-        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring))
-                or value is None:
-            raise CDMSError, 'Invalid attribute type: ' + `value`
+        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring))) or (value is None):
+            raise CDMSError, 'Invalid attribute type: ' + value
         self.name=name
         self.value=value
         self.datatype=None            # CDMS datatype, use getDatatype to retrieve
-- 
GitLab


From 9b72434367caa937acf88b4991b57257ef2cf04a Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 23 Jun 2016 09:55:42 -0700
Subject: [PATCH 183/196] more missing paren

---
 Packages/cdms2/Lib/axis.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index c0a55b664..b44c8002c 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -2004,7 +2004,7 @@ class FileAxis(AbstractAxis):
         if (self._obj_ is not None) and (self.parent._mode_ != 'r') and not (hasattr(self.parent, 'format') and self.parent.format == "DRS"):
             # For negative strides, get the equivalent slice with positive stride,
             # then reverse the result.
-            if (isinstance(key, slice) and (key.step is not None) and key.step < 0:
+            if (isinstance(key, slice)) and (key.step is not None) and key.step < 0:
                 posslice=reverseSlice(key, len(self))
                 result=apply(self._obj_.getitem, (posslice,))
                 return result[::-1]
-- 
GitLab


From d1eacf4bb9f2bf28ddf7e51c2f3de5a8cb1e76b3 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 23 Jun 2016 09:56:26 -0700
Subject: [PATCH 184/196] indent error

---
 Packages/cdms2/Lib/axis.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index b44c8002c..3490d09a6 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -2012,7 +2012,7 @@ class FileAxis(AbstractAxis):
                 if isinstance(key, int) and key >= len(self):
                     raise IndexError, 'Index out of bounds: %d' % key
                     if isinstance(key, tuple):
-                    key=(key,)
+                        key=(key,)
                 return apply(self._obj_.getitem, key)
         if self._data_ is None:
             self._data_=self.getData()
-- 
GitLab


From e31e2fd861ba1de60e36f692affb3847deb54359 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Mon, 27 Jun 2016 14:54:48 -0700
Subject: [PATCH 185/196] string.maketran still needed

---
 Packages/cdms2/Lib/cdxmllib.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/Packages/cdms2/Lib/cdxmllib.py b/Packages/cdms2/Lib/cdxmllib.py
index 70b91d9ba..bd7e2e45e 100644
--- a/Packages/cdms2/Lib/cdxmllib.py
+++ b/Packages/cdms2/Lib/cdxmllib.py
@@ -3,6 +3,7 @@
 # Author: Sjoerd Mullender.
 
 import re
+import string
 
 # import warnings
 # warnings.warn("The xmllib module is obsolete.  Use xml.sax instead.",
-- 
GitLab


From 1bd9a62aae3727a0a40b48f4081607941ed3a1e5 Mon Sep 17 00:00:00 2001
From: Dan Lipsa <dan.lipsa@kitware.com>
Date: Mon, 27 Jun 2016 23:20:26 -0400
Subject: [PATCH 186/196] Generated png should have same name with baseline

---
 testing/vcs/test_vcs_boxfill_robinson_wrap.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/testing/vcs/test_vcs_boxfill_robinson_wrap.py b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
index de66e516c..da6cbb54a 100644
--- a/testing/vcs/test_vcs_boxfill_robinson_wrap.py
+++ b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
@@ -11,4 +11,4 @@ kwargs = {}
 kwargs[ 'cdmsfile' ] = cdmsfile.id
 kwargs['bg'] = 1
 canvas.plot(clt3, gmBoxfill, **kwargs)
-regression.run(canvas, "test_robinson_wrap.png")
\ No newline at end of file
+regression.run(canvas, "test_vcs_robinson_wrap.png")
-- 
GitLab


From 43aa32a09b7a8406c3f9c7e822db7f149104eb8b Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 28 Jun 2016 11:13:34 -0700
Subject: [PATCH 187/196] isinstance was not spelt correctly

---
 Packages/cdms2/Lib/axis.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 3490d09a6..074655488 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -1097,7 +1097,7 @@ class AbstractAxis(CdmsObj):
             #
             # mf 20010419 test if attribute is a string (non CF), set to 360.0
             #
-            if isnstance(cycle, basestring):
+            if isinstance(cycle, basestring):
                 cycle=360.0
         else:
             cycle=360.0
-- 
GitLab


From 880e2f2f2d83e6bbb6ee814259d3c34c14db2ac1 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 28 Jun 2016 11:58:37 -0700
Subject: [PATCH 188/196] isinstance was not spelt correctly

---
 Packages/cdms2/Lib/avariable.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index f4c80aada..1a6b0d2f3 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -446,7 +446,7 @@ class AbstractVariable(CdmsObj, Slab):
             return
 
         selftype = self.typecode()
-        if isnstance(value, numpy.ndarray):
+        if isinstance(value, numpy.ndarray):
             value = value.astype(selftype).item()
         elif isinstance(value, (numpy.floating, numpy.integer)):
             value = numpy.array([value], selftype)
-- 
GitLab


From 2d5f700eff2bb89c2b8ae091fcf273156d74a054 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Tue, 28 Jun 2016 13:49:21 -0700
Subject: [PATCH 189/196] bad parenthesis

---
 Packages/cdms2/Lib/cdmsNode.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/Packages/cdms2/Lib/cdmsNode.py b/Packages/cdms2/Lib/cdmsNode.py
index bec8f6c01..ff5bba3f2 100644
--- a/Packages/cdms2/Lib/cdmsNode.py
+++ b/Packages/cdms2/Lib/cdmsNode.py
@@ -1041,7 +1041,7 @@ class AttrNode(CdmsNode):
 
     def __init__(self, name, value=None):
         CdmsNode.__init__(self, "attr")
-        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring))) or (value is None):
+        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring)) or (value is None)):
             raise CDMSError, 'Invalid attribute type: ' + value
         self.name=name
         self.value=value
-- 
GitLab


From 97c33e115849b6f994b898aa1490900ca768bbe9 Mon Sep 17 00:00:00 2001
From: Denis Nadeau <dnadeau4@users.noreply.github.com>
Date: Wed, 29 Jun 2016 14:58:51 -0700
Subject: [PATCH 190/196] Fix not tuple instance condition for axis getitem
 call

---
 Packages/cdms2/Lib/axis.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 074655488..9313441a4 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -2011,8 +2011,8 @@ class FileAxis(AbstractAxis):
             else:
                 if isinstance(key, int) and key >= len(self):
                     raise IndexError, 'Index out of bounds: %d' % key
-                    if isinstance(key, tuple):
-                        key=(key,)
+                if not isinstance(key, tuple):
+                    key=(key,)
                 return apply(self._obj_.getitem, key)
         if self._data_ is None:
             self._data_=self.getData()
-- 
GitLab


From 91141a6bc512e6b0baff37f980f99b7f43dfeef8 Mon Sep 17 00:00:00 2001
From: Charles Doutriaux <doutriaux1@llnl.gov>
Date: Thu, 30 Jun 2016 16:38:49 -0700
Subject: [PATCH 191/196] revert to last known working version

---
 Packages/cdms2/Lib/CDML.py             |  425 +++---
 Packages/cdms2/Lib/CDMLParser.py       |  189 ++-
 Packages/cdms2/Lib/MV2.py              |  524 +++----
 Packages/cdms2/Lib/__init__.py         |   62 +-
 Packages/cdms2/Lib/auxcoord.py         |   80 +-
 Packages/cdms2/Lib/avariable.py        | 1002 ++++++-------
 Packages/cdms2/Lib/axis.py             | 1821 ++++++++++++------------
 Packages/cdms2/Lib/bindex.py           |   41 +-
 Packages/cdms2/Lib/cache.py            |  167 +--
 Packages/cdms2/Lib/cdmsNode.py         |  794 +++++------
 Packages/cdms2/Lib/cdmsURLopener.py    |   17 +-
 Packages/cdms2/Lib/cdmsobj.py          |  481 +++----
 Packages/cdms2/Lib/cdurllib.py         |   72 +-
 Packages/cdms2/Lib/cdurlparse.py       |  339 +++--
 Packages/cdms2/Lib/cdxmllib.py         |  249 ++--
 Packages/cdms2/Lib/convention.py       |   85 +-
 Packages/cdms2/Lib/coord.py            |  211 ++-
 Packages/cdms2/Lib/cudsinterface.py    |  186 ++-
 Packages/cdms2/Lib/database.py         |  226 ++-
 Packages/cdms2/Lib/dataset.py          | 1264 +++++++---------
 Packages/cdms2/Lib/error.py            |    4 +-
 Packages/cdms2/Lib/forecast.py         |  226 ++-
 Packages/cdms2/Lib/fvariable.py        |  138 +-
 Packages/cdms2/Lib/gengrid.py          |  107 +-
 Packages/cdms2/Lib/grid.py             |  382 +++--
 Packages/cdms2/Lib/gsHost.py           |  212 ++-
 Packages/cdms2/Lib/gsMosaic.py         |  251 ++--
 Packages/cdms2/Lib/gsStaticVariable.py |  108 +-
 Packages/cdms2/Lib/gsTimeVariable.py   |  160 +--
 Packages/cdms2/Lib/gui.py              |   31 +-
 Packages/cdms2/Lib/hgrid.py            |  248 ++--
 Packages/cdms2/Lib/internattr.py       |  157 ++
 Packages/cdms2/Lib/mvBaseWriter.py     |   19 +-
 Packages/cdms2/Lib/mvCdmsRegrid.py     |   29 +-
 Packages/cdms2/Lib/mvSphereMesh.py     |  181 ++-
 Packages/cdms2/Lib/mvVTKSGWriter.py    |   29 +-
 Packages/cdms2/Lib/mvVTKUGWriter.py    |   47 +-
 Packages/cdms2/Lib/mvVsWriter.py       |   36 +-
 Packages/cdms2/Lib/restApi.py          |  899 ++++++------
 Packages/cdms2/Lib/selectors.py        |  292 ++--
 Packages/cdms2/Lib/slabinterface.py    |  137 +-
 Packages/cdms2/Lib/sliceut.py          |  214 ++-
 Packages/cdms2/Lib/tvariable.py        |  622 ++++----
 Packages/cdms2/Lib/typeconv.py         |   32 +-
 Packages/cdms2/Lib/variable.py         |  354 +++--
 Packages/cdms2/Lib/xlink.py            |    9 +
 46 files changed, 6109 insertions(+), 7050 deletions(-)
 create mode 100644 Packages/cdms2/Lib/internattr.py
 create mode 100644 Packages/cdms2/Lib/xlink.py

diff --git a/Packages/cdms2/Lib/CDML.py b/Packages/cdms2/Lib/CDML.py
index 963f70ac6..7a6c013f3 100644
--- a/Packages/cdms2/Lib/CDML.py
+++ b/Packages/cdms2/Lib/CDML.py
@@ -37,7 +37,6 @@ CdArray = "Array"
 # Note: at some point, this should be created dynamically
 # from the XML DTD file. For now, it is built statically.
 
-
 class CDML:
 
     cache = {}
@@ -48,254 +47,246 @@ class CDML:
         self.extra = self.extraCache.get(uri)
         if not self.dtd:
             self.dtd = self.buildDTD(uri)
-            self.cache[uri] = self.dtd
+            self.cache[uri]=self.dtd
             self.extra = self.buildExtra(uri)
-            self.extraCache[uri] = self.extra
+            self.extraCache[uri]=self.extra
 
-    def buildDTD(self, uri):
+    def buildDTD(self,uri):
         dtd = {}
         dtd['attr'] = {
-            'name': (Cdata, Required),
-            'datatype':
-                (("Char", "Byte", "Short", "Int", "Long",
-                 "Int64", "Float", "Double", "String"), Required),
-        }
+            'name': (Cdata,Required),
+            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
+            }
         dtd['axis'] = {
-            'id': (Id, Required),
-            'associate': (Idref, Implied),
-            'axis': (("X", "Y", "Z", "T"), Implied),
-            'bounds': (Idref, Implied),
-            'calendar': (Cdata, Implied),
-            'comment': (Cdata, Implied),
-            'component': (Cdata, Implied),
-            'compress': (Cdata, Implied),
-            'datatype':
-                (("Char", "Byte", "Short", "Int", "Long",
-                 "Int64", "Float", "Double", "String"), Required),
-            'expand': (Idref, Implied),
-            'interval': (Cdata, Implied),
-            'isvar': (("true", "false"), "true"),
-            'length': (Cdata, Required),
-            'long_name': (Cdata, Implied),
-            'modulo': (Cdata, Implied),
-            'name_in_file': (Cdata, Implied),
-            'partition': (Cdata, Implied),
-            'partition_length': (Cdata, Implied),
-            'positive': (("up", "down"), Implied),
-            'spacing': (("uniform", "variable", "disjoint"), Implied),
-            'topology': (("linear", "circular"), Implied),
-            'weights': (Idref, Implied),
-            'units': (Cdata, Required),
-        }
+            'id': (Id,Required),
+            'associate': (Idref,Implied),
+            'axis': (("X","Y","Z","T"),Implied),
+            'bounds': (Idref,Implied),
+            'calendar': (Cdata,Implied),
+            'comment': (Cdata,Implied),
+            'component': (Cdata,Implied),
+            'compress': (Cdata,Implied),
+            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
+            'expand': (Idref,Implied),
+            'interval': (Cdata,Implied),
+            'isvar': ( ("true","false"),"true"),
+            'length': (Cdata,Required),
+            'long_name': (Cdata,Implied),
+            'modulo': (Cdata,Implied),
+            'name_in_file': (Cdata,Implied),
+            'partition': (Cdata,Implied),
+            'partition_length': (Cdata,Implied),
+            'positive': (("up","down"),Implied),
+            'spacing': (("uniform","variable","disjoint"),Implied),
+            'topology': (("linear","circular"),Implied),
+            'weights': (Idref,Implied),
+            'units': (Cdata,Required),
+            }
         dtd['component'] = {
-            'name': (Idref, Required),
-        }
+            'name':(Idref,Required),
+            }
         dtd['dataset'] = {
-            'id': (Id, Required),
-            'Conventions': (Cdata, Required),
-            'appendices': (Cdata, Implied),
-            'calendar': (Cdata, Implied),
-            'cdms_filemap': (Cdata, Implied),
-            'comment': (Cdata, Implied),
-            'directory': (Cdata, Implied),
-            'frequency': (Cdata, Implied),
-            'history': (Cdata, Implied),
-            'institution': (Cdata, Implied),
-            'production': (Cdata, Implied),
-            'project': (Cdata, Implied),
-            'template': (Cdata, Implied),
-        }
+            'id': (Id,Required),
+            'Conventions': (Cdata,Required),
+            'appendices': (Cdata,Implied),
+            'calendar': (Cdata,Implied),
+            'cdms_filemap': (Cdata,Implied),
+            'comment': (Cdata,Implied),
+            'directory': (Cdata,Implied),
+            'frequency': (Cdata,Implied),
+            'history': (Cdata,Implied),
+            'institution': (Cdata,Implied),
+            'production': (Cdata,Implied),
+            'project': (Cdata,Implied),
+            'template': (Cdata,Implied),
+            }
         dtd['doclink'] = {
-            'id': (Id, Implied),
-            'xml:link': (Cdata, (Fixed, "simple")),
-            'href': (Cdata, Required),
-            'role': (Cdata, Implied),
-            'title': (Cdata, Implied),
-            'show': (("embed", "replace", "new"), "replace"),
-            'actuate': (("auto", "user"), Implied),
-            'behavior': (Cdata, Implied),
-            'content-role': (Cdata, Implied),
-            'content-title': (Cdata, Implied),
-            'inline': (("true", "false"), "true"),
-        }
+            'id': (Id,Implied),
+            'xml:link': (Cdata,(Fixed,"simple")),
+            'href': (Cdata,Required),
+            'role':	(Cdata,Implied),
+            'title': (Cdata,Implied),
+            'show': (("embed","replace","new"),"replace"),
+            'actuate': (("auto","user"),Implied),
+            'behavior':(Cdata,Implied),
+            'content-role': (Cdata,Implied),
+            'content-title': (Cdata,Implied),
+            'inline':(("true","false"),"true"),
+            }
         dtd['domain'] = {}
         dtd['domElem'] = {
-            'name': (Idref, Required),
-            'length': (Cdata, Implied),
-            'partition_length': (Cdata, Implied),
-            'start': (Cdata, Implied),
-        }
+            'name':(Idref,Required),
+            'length':(Cdata,Implied),
+            'partition_length':(Cdata,Implied),
+            'start':(Cdata,Implied),
+            }
         dtd['rectGrid'] = {
-            'id': (Id, Required),
-            'type':
-                (("gaussian", "uniform", "equalarea", "unknown"), "unknown"),
-            'latitude': (Idref, Required),
-            'longitude': (Idref, Required),
-            'mask': (Idref, Implied),
-            'order': (("xy", "yx"), "yx"),
-        }
+            'id': (Id,Required),
+            'type':(("gaussian","uniform","equalarea","unknown"),"unknown"),
+            'latitude':(Idref,Required),
+            'longitude':(Idref,Required),
+            'mask':(Idref,Implied),
+            'order':(("xy","yx"),"yx"),
+            }
         dtd['linear'] = {
-            'delta': (Cdata, Required),
-            'length': (Cdata, Required),
-            'start': (Cdata, Required),
-        }
+            'delta': (Cdata,Required),
+            'length': (Cdata,Required),
+            'start': (Cdata,Required),
+            }
         dtd['variable'] = {
-            'id': (Id, Required),
-            'add_offset': (Cdata, Implied),
-            'associate': (Cdata, Implied),
-            'axis': (Cdata, Implied),
-            'comments': (Cdata, Implied),
-            'datatype':
-                (("Char", "Byte", "Short", "Int", "Long",
-                 "Int64", "Float", "Double", "String"), Required),
-            'grid_name': (Cdata, Implied),
-            'grid_type': (Cdata, Implied),
-            'long_name': (Cdata, Implied),
+            'id': (Id,Required),
+            'add_offset': (Cdata,Implied),
+            'associate': (Cdata,Implied),
+            'axis': (Cdata,Implied),
+            'comments': (Cdata,Implied),
+            'datatype': (("Char","Byte","Short","Int","Long","Int64","Float","Double","String"),Required),
+            'grid_name': (Cdata,Implied),
+            'grid_type': (Cdata,Implied),
+            'long_name': (Cdata,Implied),
             'missing_value': (Cdata, Implied),
-            'name_in_file': (Cdata, Implied),
-            'scale_factor': (Cdata, Implied),
-            'subgrid': (Cdata, Implied),
-            'template': (Cdata, Implied),
-            'units': (Cdata, Implied),
-            'valid_max': (Cdata, Implied),
-            'valid_min': (Cdata, Implied),
-            'valid_range': (Cdata, Implied),
-        }
+            'name_in_file': (Cdata,Implied),
+            'scale_factor': (Cdata,Implied),
+            'subgrid': (Cdata,Implied),
+            'template': (Cdata,Implied),
+            'units': (Cdata,Implied),
+            'valid_max': (Cdata,Implied),
+            'valid_min': (Cdata,Implied),
+            'valid_range': (Cdata,Implied),
+            }
         dtd['xlink'] = {
-            'id': (Id, Implied),
-            'xml:link': (Cdata, (Fixed, "simple")),
-            'href': (Cdata, Required),
-            'role': (Cdata, Implied),
-            'title': (Cdata, Implied),
-            'show': (("embed", "replace", "new"), "embed"),
-            'actuate': (("auto", "user"), Implied),
-            'behavior': (Cdata, Implied),
-            'content-role':
-                (("dataset", "axis", "grid", "variable", "object"), "object"),
-            'content-title': (Cdata, Implied),
-            'inline': (("true", "false"), "true"),
-        }
+            'id': (Id,Implied),
+            'xml:link': (Cdata,(Fixed,"simple")),
+            'href': (Cdata,Required),
+            'role':	(Cdata,Implied),
+            'title': (Cdata,Implied),
+            'show': (("embed","replace","new"),"embed"),
+            'actuate': (("auto","user"),Implied),
+            'behavior':(Cdata,Implied),
+            'content-role': (("dataset","axis","grid","variable","object"),"object"),
+            'content-title': (Cdata,Implied),
+            'inline':(("true","false"),"true"),
+            }
         return dtd
 
     # Extra datatype information not included in the formal DTD.
-    def buildExtra(self, uri):
+    def buildExtra(self,uri):
         extra = {}
         extra['attr'] = {
-            'name': (CdScalar, CdString),
-            'datatype': (CdScalar, CdString),
-        }
+            'name': (CdScalar,CdString),
+            'datatype': (CdScalar,CdString),
+            }
         extra['axis'] = {
-            'id': (CdScalar, CdString),
-            'associate': (CdScalar, CdString),
-            'axis': (CdScalar, CdString),
-            'bounds': (CdArray, CdFromObject),
-            'calendar': (CdScalar, CdString),
-            'comment': (CdScalar, CdString),
-            'component': (CdScalar, CdString),
-            'compress': (CdScalar, CdString),
-            'datatype': (CdScalar, CdString),
-            'expand': (CdScalar, CdString),
-            'interval': (CdScalar, CdFromObject),
-            'isvar': (CdScalar, CdString),
-            'length': (CdScalar, CdInt),
-            'long_name': (CdScalar, CdString),
-            'modulo': (CdScalar, CdAny),
-            'name_in_file': (CdScalar, CdString),
-            'partition': (CdArray, CdInt),
-            'partition_length': (CdScalar, CdInt),
-            'positive': (CdScalar, CdString),
-            'spacing': (CdScalar, CdFromObject),
-            'topology': (CdScalar, CdString),
-            'weights': (CdArray, CdDouble),
-            'units': (CdScalar, CdString),
-        }
+            'id': (CdScalar,CdString),
+            'associate': (CdScalar,CdString),
+            'axis': (CdScalar,CdString),
+            'bounds': (CdArray,CdFromObject),
+            'calendar': (CdScalar,CdString),
+            'comment': (CdScalar,CdString),
+            'component': (CdScalar,CdString),
+            'compress': (CdScalar,CdString),
+            'datatype': (CdScalar,CdString),
+            'expand': (CdScalar,CdString),
+            'interval': (CdScalar,CdFromObject),
+            'isvar': (CdScalar,CdString),
+            'length': (CdScalar,CdInt),
+            'long_name': (CdScalar,CdString),
+            'modulo': (CdScalar,CdAny),
+            'name_in_file': (CdScalar,CdString),
+            'partition': (CdArray,CdInt),
+            'partition_length': (CdScalar,CdInt),
+            'positive': (CdScalar,CdString),
+            'spacing': (CdScalar,CdFromObject),
+            'topology': (CdScalar,CdString),
+            'weights': (CdArray,CdDouble),
+            'units': (CdScalar,CdString),
+            }
         extra['component'] = {
-            'name': (CdScalar, CdString),
-        }
+            'name': (CdScalar,CdString),
+            }
         extra['dataset'] = {
-            'id': (CdScalar, CdString),
-            'Conventions': (CdScalar, CdString),
-            'appendices': (CdScalar, CdString),
-            'calendar': (CdScalar, CdString),
-            'cdms_filemap': (CdScalar, CdString),
-            'comment': (CdScalar, CdString),
-            'directory': (CdScalar, CdString),
-            'frequency': (CdScalar, CdString),
-            'history': (CdScalar, CdString),
-            'institution': (CdScalar, CdString),
-            'production': (CdScalar, CdString),
-            'project': (CdScalar, CdString),
-            'template': (CdScalar, CdString),
-        }
+            'id': (CdScalar,CdString),
+            'Conventions': (CdScalar,CdString),
+            'appendices': (CdScalar,CdString),
+            'calendar': (CdScalar,CdString),
+            'cdms_filemap': (CdScalar,CdString),
+            'comment': (CdScalar,CdString),
+            'directory': (CdScalar,CdString),
+            'frequency': (CdScalar,CdString),
+            'history': (CdScalar,CdString),
+            'institution': (CdScalar,CdString),
+            'production': (CdScalar,CdString),
+            'project': (CdScalar,CdString),
+            'template': (CdScalar,CdString),
+            }
         extra['doclink'] = {
-            'id': (CdScalar, CdString),
-            'xml:link': (CdScalar, CdString),
-            'href': (CdScalar, CdString),
-            'role': (CdScalar, CdString),
-            'title': (CdScalar, CdString),
-            'show': (CdScalar, CdString),
-            'actuate': (CdScalar, CdString),
-            'behavior': (CdScalar, CdString),
-            'content-role': (CdScalar, CdString),
-            'content-title': (CdScalar, CdString),
-            'inline': (CdScalar, CdString),
-        }
+            'id': (CdScalar,CdString),
+            'xml:link': (CdScalar,CdString),
+            'href': (CdScalar,CdString),
+            'role': (CdScalar,CdString),
+            'title': (CdScalar,CdString),
+            'show': (CdScalar,CdString),
+            'actuate': (CdScalar,CdString),
+            'behavior': (CdScalar,CdString),
+            'content-role': (CdScalar,CdString),
+            'content-title': (CdScalar,CdString),
+            'inline': (CdScalar,CdString),
+            }
         extra['domain'] = {}
         extra['domElem'] = {
-            'name': (CdScalar, CdString),
-            'length': (CdScalar, CdInt),
-            'partition_length': (CdScalar, CdInt),
-            'start': (CdScalar, CdInt),
-        }
+            'name': (CdScalar,CdString),
+            'length': (CdScalar,CdInt),
+            'partition_length': (CdScalar,CdInt),
+            'start': (CdScalar,CdInt),
+            }
         extra['rectGrid'] = {
-            'id': (CdScalar, CdString),
-            'type': (CdScalar, CdString),
-            'latitude': (CdScalar, CdString),
-            'longitude': (CdScalar, CdString),
-            'mask': (CdScalar, CdString),
-            'order': (CdScalar, CdString),
-        }
+            'id': (CdScalar,CdString),
+            'type': (CdScalar,CdString),
+            'latitude': (CdScalar,CdString),
+            'longitude': (CdScalar,CdString),
+            'mask': (CdScalar,CdString),
+            'order': (CdScalar,CdString),
+            }
         extra['linear'] = {
-            'delta': (CdScalar, CdFromObject),
-            'length': (CdScalar, CdInt),
-            'start': (CdScalar, CdInt),
-        }
+            'delta': (CdScalar,CdFromObject),
+            'length': (CdScalar,CdInt),
+            'start': (CdScalar,CdInt),
+            }
         extra['variable'] = {
-            'id': (CdScalar, CdString),
-            'add_offset': (CdScalar, CdDouble),
-            'associate': (CdScalar, CdString),
-            'axis': (CdScalar, CdString),
-            'comments': (CdScalar, CdString),
-            'datatype': (CdScalar, CdString),
-            'grid_name': (CdScalar, CdString),
-            'grid_type': (CdScalar, CdString),
-            'long_name': (CdScalar, CdString),
-            'missing_value': (CdScalar, CdFromObject),
-            'name_in_file': (CdScalar, CdString),
-            'scale_factor': (CdScalar, CdDouble),
-            'subgrid': (CdScalar, CdString),
-            'template': (CdScalar, CdString),
-            'units': (CdScalar, CdString),
-            'valid_max': (CdScalar, CdFromObject),
-            'valid_min': (CdScalar, CdFromObject),
-            'valid_range': (CdArray, CdFromObject),
-        }
+            'id': (CdScalar,CdString),
+            'add_offset': (CdScalar,CdDouble),
+            'associate': (CdScalar,CdString),
+            'axis': (CdScalar,CdString),
+            'comments': (CdScalar,CdString),
+            'datatype': (CdScalar,CdString),
+            'grid_name': (CdScalar,CdString),
+            'grid_type': (CdScalar,CdString),
+            'long_name': (CdScalar,CdString),
+            'missing_value': (CdScalar,CdFromObject),
+            'name_in_file': (CdScalar,CdString),
+            'scale_factor': (CdScalar,CdDouble),
+            'subgrid': (CdScalar,CdString),
+            'template': (CdScalar,CdString),
+            'units': (CdScalar,CdString),
+            'valid_max': (CdScalar,CdFromObject),
+            'valid_min': (CdScalar,CdFromObject),
+            'valid_range': (CdArray,CdFromObject),
+            }
         extra['xlink'] = {
-            'id': (CdScalar, CdString),
-            'xml:link': (CdScalar, CdString),
-            'href': (CdScalar, CdString),
-            'role': (CdScalar, CdString),
-            'title': (CdScalar, CdString),
-            'show': (CdScalar, CdString),
-            'actuate': (CdScalar, CdString),
-            'behavior': (CdScalar, CdString),
-            'content-role': (CdScalar, CdString),
-            'content-title': (CdScalar, CdString),
-            'inline': (CdScalar, CdString),
-        }
+            'id': (CdScalar,CdString),
+            'xml:link': (CdScalar,CdString),
+            'href': (CdScalar,CdString),
+            'role': (CdScalar,CdString),
+            'title': (CdScalar,CdString),
+            'show': (CdScalar,CdString),
+            'actuate': (CdScalar,CdString),
+            'behavior': (CdScalar,CdString),
+            'content-role': (CdScalar,CdString),
+            'content-title': (CdScalar,CdString),
+            'inline': (CdScalar,CdString),
+            }
         return extra
 
-if __name__ == '__main__':
+if __name__=='__main__':
     cdml = CDML()
     print cdml.extra
     cdml2 = CDML()
diff --git a/Packages/cdms2/Lib/CDMLParser.py b/Packages/cdms2/Lib/CDMLParser.py
index 6a88eda9c..1ff9abc56 100644
--- a/Packages/cdms2/Lib/CDMLParser.py
+++ b/Packages/cdms2/Lib/CDMLParser.py
@@ -2,10 +2,11 @@
 Parse a CDML/XML file
 """
 
-from .cdxmllib import XMLParser
-from . import CDML
+from cdxmllib import XMLParser
+import CDML
 import re
-from . import cdmsNode
+import cdmsNode
+import string
 
 # Error constants
 InvalidAttribute = "Invalid attribute"
@@ -13,23 +14,21 @@ InvalidAttribute = "Invalid attribute"
 # Regular expressions
 _S = re.compile('[ \t\r\n]+$')
 _opS = '[ \t\r\n]*'
-_Integer = re.compile(_opS + '[0-9]+$' + _opS)
-
+_Integer = re.compile(_opS+'[0-9]+$'+_opS)
 
 class CDMLParser(XMLParser):
 
     def __init__(self, verbose=0):
-        XMLParser.__init__(self)
+	XMLParser.__init__(self)
         self.root = None
         self.currentPath = []         # Current path, a stack
         self.dtd = CDML.CDML().dtd
         self.verbose = verbose
 
     # Push current node on the stack
-    def pushCurrentNode(self, node):
+    def pushCurrentNode(self,node):
         self.currentPath.append(node)
-        if not self.root:
-            self.root = node
+        if not self.root: self.root = node
 
     # Pop the current node off the stack
     def popCurrentNode(self):
@@ -51,67 +50,62 @@ class CDMLParser(XMLParser):
     def handle_data(self, data):
         matchObj = _S.match(data)
         if not matchObj:
-            if self.verbose:
-                print 'data:', data
+            if self.verbose: print 'data:',data
             if self.root:
-                self.getCurrentNode().setContentFromString(data.strip())
+                self.getCurrentNode().setContentFromString(string.strip(data))
 
     def handle_cdata(self, data):
-        if self.verbose:
-            print 'cdata:', repr(data)
+        if self.verbose: print 'cdata:', `data`
 
     def handle_proc(self, name, data):
-        if self.verbose:
-            print 'processing:', name, repr(data)
+        if self.verbose: print 'processing:',name,`data`
 
     def handle_special(self, data):
-        if self.verbose:
-            print 'special:', repr(data)
+        if self.verbose: print 'special:',`data`
 
     def handle_starttag(self, tag, method, attrs):
 
-        if tag in self.dtd:
+        if self.dtd.has_key(tag):
 
             # Check that attributes are valid
-            validDict = self.dtd[tag]
+	    validDict = self.dtd[tag]
             validAttrs = validDict.keys()
             attrnames = attrs.keys()
             for attrname in attrnames:
                 if attrname not in validAttrs:
                     self.cdml_syntax_error(self.lineno,
-                                           'unknown attribute %s of element %s' %
-                                           (attrname, tag))
+                                      'unknown attribute %s of element %s' %
+                                      (attrname, tag))
                 else:
-                    (atttype, attdefault) = validDict[attrname]
-                    if isinstance(atttype, tuple):
+                    (atttype,attdefault)=validDict[attrname]
+                    if type(atttype)==type((0,)):
                         attrval = attrs[attrname]
                         if attrval not in atttype:
                             self.cdml_syntax_error(self.lineno,
-                                                   'invalid attribute value %s=%s of element %s, must be one of %s' %
-                                                   (attrname, attrval, tag, atttype))
+                                              'invalid attribute value %s=%s of element %s, must be one of %s' %
+                                              (attrname,attrval,tag,atttype))
 
             # Check that required attributes are present,
             # and add default values
             for attrname in validAttrs:
-                (atttype, attdefault) = validDict[attrname]
-                if attdefault == CDML.Required and attrname not in attrnames:
+                (atttype,attdefault)=validDict[attrname]
+                if attdefault==CDML.Required and attrname not in attrnames:
                     self.cdml_syntax_error(self.lineno,
-                                           'element %s requires an attribute %s' %
-                                           (tag, attrname))
-                if isinstance(attdefault, basestring) and attrname not in attrnames:
-                    attrs[attrname] = attdefault
-
-        method(attrs)
+                                      'element %s requires an attribute %s' %
+                                      (tag,attrname))
+                if type(attdefault)==type("") and attrname not in attrnames:
+                    attrs[attrname]=attdefault
+            
+	method(attrs)
 
     #------------------------------------------------------------------------
     # CDML tags
 
-    def start_attr(self, attrs):
-        if self.verbose:
-            print 'attr:', attrs
+    def start_attr(self,attrs):
+        if self.verbose: print 'attr:',attrs
         name = attrs['name']
         datatype = attrs['datatype']
-        attr = cdmsNode.AttrNode(name, None)
+        attr = cdmsNode.AttrNode(name,None)
         attr.datatype = datatype
         self.pushCurrentNode(attr)
 
@@ -119,35 +113,33 @@ class CDMLParser(XMLParser):
     def end_attr(self):
         attr = self.popCurrentNode()
         var = self.getCurrentNode()
-        attr.setValueFromString(attr.getContent(), attr.datatype)
+        attr.setValueFromString(attr.getContent(),attr.datatype)
         var.setExternalAttrFromAttr(attr)
 
     #------------------------------------------------------------------------
 
-    def start_axis(self, attrs):
-        if self.verbose:
-            print 'axis:', attrs
+    def start_axis(self,attrs):
+        if self.verbose: print 'axis:',attrs
         id = attrs['id']
         length_s = attrs['length']
         datatype = attrs.get('datatype')
         if _Integer.match(length_s) is None:
-            raise InvalidAttribute('length=' + length_s)
-        length = int(length_s)
-        axis = cdmsNode.AxisNode(id, length, datatype)
+            raise InvalidAttribute, 'length='+length_s
+        length = string.atoi(length_s)
+        axis = cdmsNode.AxisNode(id,length,datatype)
         partstring = attrs.get('partition')
         if partstring is not None:
             axis.setPartitionFromString(partstring)
         axis.setExternalDict(attrs)
-        self.getCurrentNode().addId(id, axis)
+        self.getCurrentNode().addId(id,axis)
         self.pushCurrentNode(axis)
-
+                
     def end_axis(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
     def start_cdml(self, attrs):
-        if self.verbose:
-            print 'cdml:', attrs
+        if self.verbose: print 'cdml:',attrs
 
     def end_cdml(self):
         pass
@@ -155,40 +147,36 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_component(self, attrs):
-        if self.verbose:
-            print 'component:', attrs
+        if self.verbose: print 'component:',attrs
 
     def end_component(self):
         pass
 
     #------------------------------------------------------------------------
     def start_compoundAxis(self, attrs):
-        if self.verbose:
-            print 'compoundAxis:', attrs
+        if self.verbose: print 'compoundAxis:',attrs
 
     def end_compoundAxis(self):
         pass
 
     #------------------------------------------------------------------------
     def start_data(self, attrs):
-        if self.verbose:
-            print 'data:', attrs
+        if self.verbose: print 'data:',attrs
 
     def end_data(self):
         pass
 
     #------------------------------------------------------------------------
 
-    def start_dataset(self, attrs):
-        if self.verbose:
-            print 'dataset:', attrs
+    def start_dataset(self,attrs):
+        if self.verbose: print 'dataset:',attrs
         id = attrs['id']
         dataset = cdmsNode.DatasetNode(id)
         dataset.setExternalDict(attrs)
         if self.root:
-            self.getCurrentNode().addId(id, dataset)
+            self.getCurrentNode().addId(id,dataset)
         self.pushCurrentNode(dataset)
-
+                
     def end_dataset(self):
         dataset = self.popCurrentNode()
         dataset.validate()
@@ -196,8 +184,7 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_doclink(self, attrs):
-        if self.verbose:
-            print 'docLink:', attrs
+        if self.verbose: print 'docLink:',attrs
         uri = attrs['href']
         doclink = cdmsNode.DocLinkNode(uri)
         doclink.setExternalDict(attrs)
@@ -210,20 +197,19 @@ class CDMLParser(XMLParser):
     #------------------------------------------------------------------------
 
     def start_domElem(self, attrs):
-        if self.verbose:
-            print 'domElem:', attrs
+        if self.verbose: print 'domElem:',attrs
         name = attrs['name']
         start_s = attrs.get('start')
         length_s = attrs.get('length')
         if start_s is not None:
-            start = int(start_s)
+            start = string.atoi(start_s)
         else:
             start = None
         if length_s is not None:
-            length = int(length_s)
+            length = string.atoi(length_s)
         else:
             length = None
-        domElem = cdmsNode.DomElemNode(name, start, length)
+        domElem = cdmsNode.DomElemNode(name,start,length)
         domElem.setExternalDict(attrs)
         self.getCurrentNode().add(domElem)
 
@@ -232,8 +218,7 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
     def start_domain(self, attrs):
-        if self.verbose:
-            print 'domain:', attrs
+        if self.verbose: print 'domain:',attrs
         domain = cdmsNode.DomainNode()
         self.getCurrentNode().setDomain(domain)
         self.pushCurrentNode(domain)
@@ -243,42 +228,40 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
 
-    def start_rectGrid(self, attrs):
-        if self.verbose:
-            print 'rectGrid:', attrs
+    def start_rectGrid(self,attrs):
+        if self.verbose: print 'rectGrid:',attrs
         id = attrs['id']
         gridtype = attrs['type']
         latitude = attrs['latitude']
         longitude = attrs['longitude']
-        grid = cdmsNode.RectGridNode(id, latitude, longitude, gridtype)
+        grid = cdmsNode.RectGridNode(id,latitude,longitude,gridtype)
         grid.setExternalDict(attrs)
-        self.getCurrentNode().addId(id, grid)
+        self.getCurrentNode().addId(id,grid)
         self.pushCurrentNode(grid)
-
+                
     def end_rectGrid(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
 
     def start_linear(self, attrs):
-        if self.verbose:
-            print 'linear:', attrs
+        if self.verbose: print 'linear:',attrs
         start_s = attrs['start']
         delta_s = attrs['delta']
         length_s = attrs['length']
         try:
-            start = float(start_s)
+            start=string.atof(start_s)
         except ValueError:
-            raise InvalidAttribute('start=' + start_s)
+            raise InvalidAttribute, 'start='+start_s
         try:
-            delta = float(delta_s)
+            delta=string.atof(delta_s)
         except ValueError:
-            raise InvalidAttribute('delta=' + delta_s)
+            raise InvalidAttribute, 'delta='+delta_s
         try:
-            length = int(length_s)
+            length=string.atoi(length_s)
         except ValueError:
-            raise InvalidAttribute('length=' + length_s)
-        linear = cdmsNode.LinearDataNode(start, delta, length)
+            raise InvalidAttribute, 'length='+length_s
+        linear = cdmsNode.LinearDataNode(start,delta,length)
         self.getCurrentNode().setLinearData(linear)
 
     def end_linear(self):
@@ -286,30 +269,28 @@ class CDMLParser(XMLParser):
 
     #------------------------------------------------------------------------
 
-    def start_variable(self, attrs):
-        if self.verbose:
-            print 'variable:', attrs
+    def start_variable(self,attrs):
+        if self.verbose: print 'variable:',attrs
         id = attrs['id']
         datatype = attrs['datatype']
-        variable = cdmsNode.VariableNode(id, datatype, None)
+        variable = cdmsNode.VariableNode(id,datatype,None)
         variable.setExternalDict(attrs)
-        self.getCurrentNode().addId(id, variable)
+        self.getCurrentNode().addId(id,variable)
         self.pushCurrentNode(variable)
-
+                
     def end_variable(self):
         self.popCurrentNode()
 
     #------------------------------------------------------------------------
 
     def start_xlink(self, attrs):
-        if self.verbose:
-            print 'xlink:', attrs
+        if self.verbose: print 'xlink:',attrs
         id = attrs['id']
         uri = attrs['href']
         contentRole = attrs['content-role']
-        xlink = cdmsNode.XLinkNode(id, uri, contentRole)
+        xlink = cdmsNode.XLinkNode(id,uri,contentRole)
         xlink.setExternalDict(attrs)
-        self.getCurrentNode().addId(id, xlink)
+        self.getCurrentNode().addId(id,xlink)
         self.pushCurrentNode(xlink)
 
     def end_xlink(self):
@@ -321,33 +302,30 @@ class CDMLParser(XMLParser):
         print 'error near line %d:' % lineno, message
 
     def unknown_starttag(self, tag, attrs):
-        if self.verbose:
-            print '**' + tag + '**:', attrs
+        if self.verbose: print '**'+tag+'**:',attrs
 
     def unknown_endtag(self, tag):
         pass
 
     def unknown_entityref(self, ref):
-        self.flush()
-        if self.verbose:
-            print '*** unknown entity ref: &' + ref + ';'
+	self.flush()
+        if self.verbose: print '*** unknown entity ref: &' + ref + ';'
 
     def unknown_charref(self, ref):
-        self.flush()
-        if self.verbose:
-            print '*** unknown char ref: &#' + ref + ';'
+	self.flush()
+        if self.verbose: print '*** unknown char ref: &#' + ref + ';'
 
     def close(self):
-        XMLParser.close(self)
+	XMLParser.close(self)
 
 if __name__ == '__main__':
     import sys
-
+    
     sampfile = open(sys.argv[1])
     text = sampfile.read()
     sampfile.close()
 
-    if len(sys.argv) == 2:
+    if len(sys.argv)==2:
         verbose = 0
     else:
         verbose = 1
@@ -355,3 +333,4 @@ if __name__ == '__main__':
     p.feed(text)
     p.close()
     p.root.dump()
+    
diff --git a/Packages/cdms2/Lib/MV2.py b/Packages/cdms2/Lib/MV2.py
index 297cd7362..b203bc6af 100644
--- a/Packages/cdms2/Lib/MV2.py
+++ b/Packages/cdms2/Lib/MV2.py
@@ -1,31 +1,31 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 "CDMS Variable objects, MaskedArray interface"
 import numpy
-from . import typeconv
+import typeconv
 from numpy import character, float, float32, float64, int, int8, int16, int32
 from numpy.ma import allclose, allequal, common_fill_value, compress, make_mask_none, dot, filled, \
-    getmask, getmaskarray, identity, indices, innerproduct, masked, put, putmask, rank, ravel, \
+     getmask, getmaskarray, identity, indices, innerproduct, masked, put, putmask, rank, ravel, \
      set_fill_value, shape, size, isMA, isMaskedArray, is_mask, isarray, \
      make_mask, make_mask_none, mask_or, nomask
 from numpy import sctype2char, get_printoptions, set_printoptions
-from .avariable import AbstractVariable, getNumericCompatibility
-from .tvariable import TransientVariable, asVariable
-from .grid import AbstractRectGrid
-from .error import CDMSError
-# from numpy.ma import *
-from .axis import allclose as axisAllclose, TransientAxis, concatenate as axisConcatenate, take as axisTake
+from avariable import AbstractVariable, getNumericCompatibility
+from tvariable import TransientVariable, asVariable
+from grid import AbstractRectGrid
+from error import CDMSError
+#from numpy.ma import *
+from axis import allclose as axisAllclose, TransientAxis, concatenate as axisConcatenate, take as axisTake
+
 
 
 create_mask = make_mask_none
 e = numpy.e
 pi = numpy.pi
-# NewAxis = numpy.oldnumeric.NewAxis
+#NewAxis = numpy.oldnumeric.NewAxis
 newaxis = numpy.newaxis
 counter = 0
 
-
 def fill_value(ar):
     return ar.fill_value
 
@@ -34,14 +34,13 @@ def _makeMaskedArg(x):
     """If x is a variable, turn it into a TransientVariable."""
     if isinstance(x, AbstractVariable) and not isinstance(x, TransientVariable):
         return x.subSlice()
-    elif isinstance(x, TransientVariable):
+    elif isinstance(x,TransientVariable):
         return x
     else:
         return array(x)
 
 
-def _extractMetadata(
-        a, axes=None, attributes=None, id=None, omit=None, omitall=False):
+def _extractMetadata(a, axes=None, attributes=None, id=None, omit=None, omitall=False):
     """Extract axes, attributes, id from 'a', if arg is None."""
     resultgrid = None
     if isinstance(a, AbstractVariable):
@@ -53,7 +52,7 @@ def _extractMetadata(
             attributes = a.attributes
         if id is None:
             id = "variable_%i" % TransientVariable.variable_count
-            TransientVariable.variable_count += 1
+            TransientVariable.variable_count+=1
 
         # If the grid is rectilinear, don't return an explicit grid: it's implicitly defined
         # by the axes.
@@ -61,16 +60,13 @@ def _extractMetadata(
         if (resultgrid is None) or (isinstance(resultgrid, AbstractRectGrid)) or (axes is None):
             resultgrid = None
 
-        # If the omitted axis was associated with the grid, the result will not
-        # be gridded.
+        # If the omitted axis was associated with the grid, the result will not be gridded.
         elif (omit is not None) and (resultgrid is not None) and (a.getAxis(omit) in resultgrid.getAxisList()):
             resultgrid = None
 
     return axes, attributes, id, resultgrid
 
-
 class var_unary_operation:
-
     def __init__(self, mafunc):
         """ var_unary_operation(mafunc)
         mafunc is an numpy.ma masked_unary_function.
@@ -78,31 +74,26 @@ class var_unary_operation:
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
 
-    def __call__(self, a):
+    def __call__ (self, a):
         axes, attributes, id, grid = _extractMetadata(a)
         maresult = self.mafunc(_makeMaskedArg(a))
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
-
 class var_unary_operation_with_axis:
-
     def __init__(self, mafunc):
         """ var_unary_operation(mafunc)
         mafunc is an numpy.ma masked_unary_function.
         """
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
-
-    def __call__(self, a, axis=0):
+    def __call__ (self, a, axis=0):
         axis = _conv_axis_arg(axis)
         ta = _makeMaskedArg(a)
         maresult = self.mafunc(ta, axis=axis)
-        axes, attributes, id, grid = _extractMetadata(
-            a, omit=axis, omitall=(axis is None))
+        axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
-
-def commonDomain(a, b, omit=None):
+def commonDomain(a,b,omit=None):
     """commonDomain(a,b) tests that the domains of variables/arrays a and b are equal,
     and returns the common domain if equal, or None if not equal. The domains may
     differ in that one domain may have leading axes not common
@@ -110,15 +101,14 @@ def commonDomain(a, b, omit=None):
     If <omit> is specified, as an integer i, skip comparison of the ith dimension
     and return None for the ith (common) dimension.
     """
-
+    
     if isinstance(b, AbstractVariable):
         bdom = b.getAxisList()
     else:
         bdom = None
-    return commonAxes(a, bdom, omit=omit)
+    return commonAxes(a,bdom,omit=omit)
 
-
-def commonAxes(a, bdom, omit=None):
+def commonAxes(a,bdom,omit=None):
     """Helper function for commonDomain. 'a' is a variable or array,
     'b' is an axislist or None.
     """
@@ -126,40 +116,40 @@ def commonAxes(a, bdom, omit=None):
         adom = a.getAxisList()
         arank = len(adom)
         brank = len(bdom)
-        if arank > brank:
+        if arank>brank:
             maxrank = arank
             minrank = brank
         else:
             maxrank = brank
             minrank = arank
-        diffrank = maxrank - minrank
-        if maxrank == arank:
+        diffrank = maxrank-minrank
+        if maxrank==arank:
             maxdom = adom
         else:
             maxdom = bdom
-        common = [None] * maxrank
+        common = [None]*maxrank
         if omit is None:
             iomit = None
         else:
-            iomit = omit - minrank
+            iomit = omit-minrank
 
         # Check shared dimensions, last to first
         for i in range(minrank):
-            j = -i - 1
-            if j == iomit:
+            j = -i-1
+            if j==iomit:
                 continue
             aj = adom[j]
             bj = bdom[j]
-            if len(aj) != len(bj):
+            if len(aj)!=len(bj):
                 return None
-            elif axisAllclose(aj, bj):
+            elif axisAllclose(aj,bj):
                 common[j] = aj
             else:
                 common[j] = TransientAxis(numpy.arange(len(aj)))
 
         # Copy leading (non-shared) axes
         for i in range(diffrank):
-            common[i] = maxdom[i]
+            common[i]=maxdom[i]
 
         return common
     elif isinstance(a, AbstractVariable):
@@ -172,7 +162,6 @@ def commonAxes(a, bdom, omit=None):
             bdom[omit] = None
         return bdom
 
-
 def commonGrid(a, b, axes):
     """commonGrid(a,b,axes) tests if the grids associated with variables a, b are equal,
     and consistent with the list of axes. If so, the common grid is returned, else None
@@ -193,7 +182,6 @@ def commonGrid(a, b, axes):
 
     return commonGrid1(a, gb, axes)
 
-
 def commonGrid1(a, gb, axes):
     """Helper function for commonGrid."""
     if isinstance(a, AbstractVariable):
@@ -222,9 +210,7 @@ def commonGrid1(a, gb, axes):
 
     return result
 
-
 class var_binary_operation:
-
     def __init__(self, mafunc):
         """ var_binary_operation(mafunc)
         mafunc is an numpy.ma masked_binary_function.
@@ -232,29 +218,29 @@ class var_binary_operation:
         self.mafunc = mafunc
         self.__doc__ = mafunc.__doc__
 
-    def __call__(self, a, b):
+    def __call__ (self, a, b):
         id = "variable_%i" % TransientVariable.variable_count
-        TransientVariable.variable_count += 1
-        axes = commonDomain(a, b)
-        grid = commonGrid(a, b, axes)
+        TransientVariable.variable_count+=1
+        axes = commonDomain(a,b)
+        grid = commonGrid(a,b,axes)
         ta = _makeMaskedArg(a)
         tb = _makeMaskedArg(b)
-        maresult = self.mafunc(ta, tb)
-        return TransientVariable(maresult, axes=axes, grid=grid, no_update_from=True, id=id)
+        maresult = self.mafunc(ta,tb)
+        return TransientVariable(maresult, axes=axes, grid=grid,no_update_from=True,id=id)
 
-    def reduce(self, target, axis=0):
+    def reduce (self, target, axis=0):
         ttarget = _makeMaskedArg(target)
         maresult = self.mafunc.reduce(ttarget, axis=axis)
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
 
-    def accumulate(self, target, axis=0):
+    def accumulate (self, target, axis=0):
         ttarget = _makeMaskedArg(target)
         maresult = self.mafunc.accumulate(ttarget, axis=axis)
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
-
-    def outer(self, a, b):
+        
+    def outer (self, a, b):
         """Return the function applied to the outer product of a and b"""
         a1 = _makeMaskedArg(a)
         b1 = _makeMaskedArg(b)
@@ -262,13 +248,11 @@ class var_binary_operation:
         return TransientVariable(maresult)
 
 
-def compress(a, b):
-    __doc__ = numpy.ma.__doc__
-    import warnings
-    warnings.warn(
-        "arguments order for compress function has changed\nit is now: MV2.copmress(array,condition), if your code seems to not react or act wrong to a call to compress, please check this",
-        Warning)
-    return TransientVariable(numpy.ma.compress(a, b), copy=1)
+def compress(a,b):
+   __doc__=numpy.ma.__doc__
+   import warnings
+   warnings.warn("arguments order for compress function has changed\nit is now: MV2.copmress(array,condition), if your code seems to not react or act wrong to a call to compress, please check this", Warning)
+   return TransientVariable(numpy.ma.compress(a,b),copy=1)
 
 
 sqrt = var_unary_operation(numpy.ma.sqrt)
@@ -285,31 +269,27 @@ less_equal = var_binary_operation(numpy.ma.less_equal)
 greater_equal = var_binary_operation(numpy.ma.greater_equal)
 less = var_binary_operation(numpy.ma.less)
 greater = var_binary_operation(numpy.ma.greater)
-
-
-def power(a, b, third=None):
+def power (a, b, third=None):
     "a**b"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(b)
-    maresult = numpy.ma.power(ta, tb, third)
+    maresult = numpy.ma.power(ta,tb,third)
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
-
-def left_shift(a, n):
+def left_shift (a, n):
     "Left shift n bits"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(n)
-    maresult = numpy.ma.left_shift(ta, numpy.ma.filled(tb))
+    maresult = numpy.ma.left_shift(ta,numpy.ma.filled(tb))
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
-
-def right_shift(a, n):
+def right_shift (a, n):
     "Right shift n bits"
     ta = _makeMaskedArg(a)
     tb = _makeMaskedArg(n)
-    maresult = numpy.ma.right_shift(ta, numpy.ma.filled(tb))
+    maresult = numpy.ma.right_shift(ta,numpy.ma.filled(tb))
     axes, attributes, id, grid = _extractMetadata(a)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id)
 
@@ -320,33 +300,27 @@ def _convdtype(dtype, typecode):
         dtype = typeconv.convtypecode2(typecode)
     return dtype
 
-
 def _conv_axis_arg(axis):
     "Handle backward compatibility with numpy for axis arg"
     if getNumericCompatibility() and axis is None:
-        axis = 0
+        axis=0
     return axis
 
-
 def is_masked(x):
     "Is x a 0-D masked value?"
-    return isMaskedArray(x) and x.size == 1 and x.ndim == 0 and x.mask.item()
-
+    return isMaskedArray(x) and x.size==1 and x.ndim==0 and x.mask.item()
 
 def is_floating(x):
     "Is x a scalar float, either python or numpy?"
     return (isinstance(x, numpy.floating) or isinstance(x, float))
 
-
 def is_integer(x):
     "Is x a scalar integer, either python or numpy?"
     return (isinstance(x, numpy.integer) or isinstance(x, int) or isinstance(x, long))
 
-
 def get_print_limit():
     return get_printoptions()['threshold']
 
-
 def set_print_limit(limit=numpy.inf):
     set_printoptions(threshold=limit)
 
@@ -395,113 +369,75 @@ bitwise_or = var_binary_operation(numpy.ma.bitwise_or)
 bitwise_xor = var_binary_operation(numpy.ma.bitwise_xor)
 
 
-def count(a, axis=None):
-    "Count of the non-masked elements in a, or along a certain axis."
+def count (a, axis = None):
+    "Count of the non-masked elements in a, or along a certain axis."   
     if axis is None:
-        return numpy.ma.count(a, axis)
+        return numpy.ma.count(a,axis)
     else:
         ta = _makeMaskedArg(a)
-        maresult = numpy.ma.count(ta, axis)
-        axes, attributes, id, grid = _extractMetadata(a, omit=axis)
-        F = getattr(a, "fill_value", 1.e20)
+        maresult = numpy.ma.count(ta,axis)
+        axes, attributes, id, grid = _extractMetadata(a,omit=axis)
+        F=getattr(a,"fill_value",1.e20)
         return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-
-def sum(a, axis=None, fill_value=0, dtype=None):
+def sum (a, axis = None, fill_value=0, dtype=None):
     "Sum of elements along a certain axis."
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.sum(ta, axis, dtype=dtype)
-    axes, attributes, id, grid = _extractMetadata(
-        a, omit=axis, omitall=(axis is None))
-    F = getattr(a, "fill_value", 1.e20)
+    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-
-def product(a, axis=0, dtype=None):
+def product (a, axis = 0, dtype=None):
     "Product of elements along axis."
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.product(ta, axis, dtype=dtype)
     axes, attributes, id, grid = _extractMetadata(a, omit=axis)
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-
-def average(a, axis=None, weights=None, returned=False):
+def average (a, axis=None, weights=None, returned=False):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.average(ta, axis, weights, returned)
-    axes, attributes, id, grid = _extractMetadata(
-        a, omit=axis, omitall=(axis is None))
+    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
     if returned:
-        if isinstance(maresult, tuple):
-            maresult, wresult = maresult
-        else:
-            # ok it's masked constant need to return both things by hand
-            wresult = numpy.ma.masked
-    F = getattr(a, "fill_value", 1.e20)
-    r1 = TransientVariable(
-        maresult,
-        axes=axes,
-     attributes=attributes,
-     grid=grid,
-     id=id,
-     no_update_from=True,
-     fill_value=F)
+      if isinstance(maresult,tuple):
+        maresult, wresult = maresult
+      else:
+        #ok it's masked constant need to return both things by hand
+        wresult = numpy.ma.masked
+    F=getattr(a,"fill_value",1.e20)
+    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
     if returned:
-        F = getattr(a, "fill_value", 1.e20)
-        w1 = TransientVariable(
-            wresult,
-            axes=axes,
-            grid=grid,
-            id=id,
-            no_update_from=True,
-            fill_value=F)
+        F=getattr(a,"fill_value",1.e20)
+        w1 = TransientVariable(wresult, axes=axes, grid=grid, id=id,no_update_from=True, fill_value=F)
         return r1, w1
     else:
         return r1
 average.__doc__ = numpy.ma.average.__doc__
 
-
-def max(a, axis=None):
+def max (a, axis=None):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.max(ta, axis)
-    axes, attributes, id, grid = _extractMetadata(
-        a, omit=axis, omitall=(axis is None))
-    F = getattr(a, "fill_value", 1.e20)
-    r1 = TransientVariable(
-        maresult,
-        axes=axes,
-     attributes=attributes,
-     grid=grid,
-     id=id,
-     no_update_from=True,
-     fill_value=F)
+    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
+    F=getattr(a,"fill_value",1.e20)
+    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
     return r1
 max.__doc__ = numpy.ma.max.__doc__
-
-
-def min(a, axis=None):
+def min (a, axis=None):
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.min(ta, axis)
-    axes, attributes, id, grid = _extractMetadata(
-        a, omit=axis, omitall=(axis is None))
-    F = getattr(a, "fill_value", 1.e20)
-    r1 = TransientVariable(
-        maresult,
-        axes=axes,
-     attributes=attributes,
-     grid=grid,
-     id=id,
-     no_update_from=True,
-     fill_value=F)
+    axes, attributes, id, grid = _extractMetadata(a, omit=axis, omitall=(axis is None))
+    F=getattr(a,"fill_value",1.e20)
+    r1 = TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id,no_update_from=True, fill_value=F)
     return r1
 min.__doc__ = numpy.ma.min.__doc__
 
-
-def sort(a, axis=-1):
+def sort (a, axis=-1):
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.sort(a.asma(), axis)
     axes, attributes, id, grid = _extractMetadata(a)
@@ -509,13 +445,11 @@ def sort(a, axis=-1):
     if (grid is not None) and (sortaxis in grid.getAxisList()):
         grid = None
     axes[axis] = TransientAxis(numpy.arange(len(sortaxis)))
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
-sort.__doc__ = numpy.ma.sort.__doc__ + \
-    "The sort axis is replaced with a dummy axis."
+sort.__doc__ = numpy.ma.sort.__doc__ + "The sort axis is replaced with a dummy axis."
 
-
-def choose(indices, t):
+def choose (indices, t):
     """Returns an array shaped like indices containing elements chosen
       from t.
       If an element of t is the special element masked, any element
@@ -524,38 +458,35 @@ def choose(indices, t):
       The result has only the default axes.
     """
     maresult = numpy.ma.choose(indices, map(_makeMaskedArg, t))
-    F = getattr(t, "fill_value", 1.e20)
+    F=getattr(t,"fill_value",1.e20)
     return TransientVariable(maresult, fill_value=F)
 
-
-def where(condition, x, y):
-    "where(condition, x, y) is x where condition is true, y otherwise"
-# axes = commonDomain(x,y)
-# grid = commonGrid(x,y,axes)
+def where (condition, x, y):
+    "where(condition, x, y) is x where condition is true, y otherwise" 
+##    axes = commonDomain(x,y)
+##    grid = commonGrid(x,y,axes)
     maresult = numpy.ma.where(condition, _makeMaskedArg(x), _makeMaskedArg(y))
     axes, attributes, id, grid = _extractMetadata(condition)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, grid=grid, id=id, fill_value=F)
 
-
 def masked_where(condition, x, copy=1):
-    """Return x as an array masked where condition is true.
+    """Return x as an array masked where condition is true. 
        Also masked where x or condition masked.
     """
     tx = _makeMaskedArg(x)
     tcondition = _makeMaskedArg(condition)
     maresult = numpy.ma.masked_where(tcondition, tx, copy)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_greater(x, value):
     "masked_greater(x, value) = x masked where x > value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_greater(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
 
@@ -564,37 +495,33 @@ def masked_greater_equal(x, value):
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_greater_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_less(x, value):
     "masked_less(x, value) = x masked where x < value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_less(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_less_equal(x, value):
     "masked_less_equal(x, value) = x masked where x <= value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_less_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_not_equal(x, value):
     "masked_not_equal(x, value) = x masked where x != value"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_not_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_equal(x, value):
     """masked_equal(x, value) = x masked where x == value
        For floating point consider masked_values(x, value) instead.
@@ -602,43 +529,40 @@ def masked_equal(x, value):
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_equal(tx, value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_outside(x, v1, v2):
     "x with mask of all values of x that are outside [v1,v2]"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_outside(tx, v1, v2)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def masked_inside(x, v1, v2):
     "x with mask of all values of x that are inside [v1,v2]"
     tx = _makeMaskedArg(x)
     maresult = numpy.ma.masked_inside(tx, v1, v2)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
-def concatenate(arrays, axis=0, axisid=None, axisattributes=None):
+def concatenate (arrays, axis=0, axisid=None, axisattributes=None):
     """Concatenate the arrays along the given axis. Give the extended axis the id and
     attributes provided - by default, those of the first array."""
 
     tarrays = [_makeMaskedArg(a) for a in arrays]
     maresult = numpy.ma.concatenate(arrays, axis=axis)
-    if len(arrays) > 1:
+    if len(arrays)>1:
         varattributes = None
         varid = None
-        axes = commonDomain(tarrays[0], tarrays[1], omit=axis)
+        axes = commonDomain(tarrays[0],tarrays[1],omit=axis)
         grid = commonGrid(tarrays[0], tarrays[1], axes)
-        for i in range(len(arrays) - 2):
+        for i in range(len(arrays)-2):
             if axes is None:
                 break
-            axes = commonAxes(tarrays[i + 2], axes, omit=axis)
+            axes = commonAxes(tarrays[i+2],axes,omit=axis)
             grid = commonGrid1(a, grid, axes)
     else:
         axes = tarrays[0].getAxisList()
@@ -651,37 +575,32 @@ def concatenate(arrays, axis=0, axisid=None, axisattributes=None):
     if axes is not None:
         if axisid is None:
             axisid = tarrays[0].getAxis(axis).id
-        allunitsequal = True
+        allunitsequal=True
         try:
-            allunits = tarrays[0].getAxis(axis).units
+            allunits=tarrays[0].getAxis(axis).units
         except:
-            allunits = None
+            allunits=None
         for t in tarrays[1:]:
             try:
-                tunits = t.getAxis(axis).units
+                tunits=t.getAxis(axis).units
             except:
-                tunits = None
-            if tunits != allunits:
-                allunitsequal = False
+                tunits=None
+            if tunits!=allunits:
+                allunitsequal=False
         if allunitsequal:
             if axisattributes is None:
                 axisattributes = tarrays[0].getAxis(axis).attributes
-            axes[axis] = axisConcatenate(
-                [t.getAxis(axis) for t in tarrays],
-                axisid,
-                axisattributes)
+            axes[axis] = axisConcatenate([t.getAxis(axis) for t in tarrays], axisid, axisattributes)
 
-    # If the grid doesn't match the axislist (e.g., catenation was on
-    # latitude) then omit it.
+    # If the grid doesn't match the axislist (e.g., catenation was on latitude) then omit it.
     if grid is not None:
         for item in grid.getAxisList():
             if item not in axes:
                 grid = None
-    F = getattr(arrays[0], "fill_value", 1.e20)
-    return TransientVariable(maresult, axes=axes, attributes=varattributes, id=varid, grid=grid, fill_value=F)
-
+    F=getattr(arrays[0],"fill_value",1.e20)
+    return TransientVariable(maresult, axes=axes, attributes=varattributes,id=varid,grid=grid, fill_value=F)
 
-def take(a, indices, axis=None):
+def take (a, indices, axis=None):
     "take(a, indices, axis=None) returns selection of items from a."
     axis = _conv_axis_arg(axis)
     ta = _makeMaskedArg(a)
@@ -689,16 +608,15 @@ def take(a, indices, axis=None):
     # ma compatibility interface has a bug
     maresult = numpy.ma.take(ta, indices, axis=axis)
     axes, attributes, id, grid = _extractMetadata(a, omitall=(axis is None))
-
+    
     # If the take is on a grid axis, omit the grid.
     if (grid is not None) and (axes[axis] in grid.getAxisList()):
         grid = None
     if axes is not None:
         axes[axis] = axisTake(axes[axis], indices)
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
 def transpose(a, axes=None):
     "transpose(a, axes=None) reorder dimensions per tuple axes"
     ta = _makeMaskedArg(a)
@@ -710,53 +628,49 @@ def transpose(a, axes=None):
     newaxes = None
     if oldaxes is not None:
         newaxes = [oldaxes[i] for i in axes]
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=newaxes, attributes=attributes, id=id, grid=grid, copy=1, fill_value=F)
 
-
 class _minimum_operation:
-
     "Object to calculate minima"
-
-    def __init__(self):
+    def __init__ (self):
         """minimum(a, b) or minimum(a)
            In one argument case returns the scalar minimum.
         """
         pass
 
-    def __call__(self, a, b=None):
+    def __call__ (self, a, b=None):
         "Execute the call behavior."
         a = _makeMaskedArg(a)
         if b is None:
             m = getmask(a)
-            if m is nomask:
+            if m is nomask: 
                 d = numpy.min(filled(a).ravel())
                 return d
-# ac = a.compressed()
-# if len(ac) == 0:
-# return masked
+##             ac = a.compressed()
+##             if len(ac) == 0:
+##                 return masked
             else:
                 return numpy.ma.min(a)
         else:
             return where(less(a, b), a, b)[...]
-
-    def reduce(self, target, axis=0):
+       
+    def reduce (self, target, axis=0):
         """Reduce target along the given axis."""
         a = _makeMaskedArg(target)
         axes, attributes, id, grid = _extractMetadata(a, omit=axis)
         m = getmask(a)
         if m is nomask:
             t = filled(a)
-            result = masked_array(numpy.minimum.reduce(t, axis))
+            result = masked_array (numpy.minimum.reduce (t, axis))
         else:
-            t = numpy.minimum.reduce(
-                filled(a, numpy.ma.minimum_fill_value(a)), axis)
+            t = numpy.minimum.reduce(filled(a, numpy.ma.minimum_fill_value(a)), axis)
             m = numpy.logical_and.reduce(m, axis)
             result = masked_array(t, m, fill_value(a))
         return TransientVariable(result, axes=axes, copy=0,
-                                 fill_value=fill_value(a), grid=grid, id=id)
+                     fill_value=fill_value(a), grid=grid, id=id)
 
-    def outer(self, a, b):
+    def outer (self, a, b):
         "Return the function applied to the outer product of a and b."
         a = _makeMaskedArg(a)
         b = _makeMaskedArg(b)
@@ -771,51 +685,47 @@ class _minimum_operation:
         d = numpy.minimum.outer(filled(a), filled(b))
         return TransientVariable(d, mask=m, copy=0)
 
-minimum = _minimum_operation()
-
-
+minimum = _minimum_operation () 
+    
 class _maximum_operation:
-
     "Object to calculate maxima"
-
-    def __init__(self):
+    def __init__ (self):
         """maximum(a, b) or maximum(a)
            In one argument case returns the scalar maximum.
         """
         pass
 
-    def __call__(self, a, b=None):
+    def __call__ (self, a, b=None):
         "Execute the call behavior."
         a = _makeMaskedArg(a)
         if b is None:
             m = getmask(a)
-            if m is nomask:
+            if m is nomask: 
                 d = numpy.max(filled(a).ravel())
                 return d
-# ac = a.compressed()
-# if len(ac) == 0:
-# return masked
+##             ac = a.compressed()
+##             if len(ac) == 0:
+##                 return masked
             else:
                 return numpy.ma.max(a)
         else:
             return where(greater(a, b), a, b)[...]
-
-    def reduce(self, target, axis=0):
+       
+    def reduce (self, target, axis=0):
         """Reduce target along the given axis."""
         axes, attributes, id, grid = _extractMetadata(target, omit=axis)
         a = _makeMaskedArg(target)
         m = getmask(a)
         if m is nomask:
             t = filled(a)
-            return masked_array(numpy.maximum.reduce(t, axis))
+            return masked_array (numpy.maximum.reduce (t, axis))
         else:
-            t = numpy.maximum.reduce(
-                filled(a, numpy.ma.maximum_fill_value(a)), axis)
+            t = numpy.maximum.reduce(filled(a, numpy.ma.maximum_fill_value(a)), axis)
             m = numpy.logical_and.reduce(m, axis)
             return TransientVariable(t, mask=m, fill_value=fill_value(a),
-                                     axes=axes, grid=grid, id=id)
+                        axes = axes, grid=grid, id=id)
 
-    def outer(self, a, b):
+    def outer (self, a, b):
         "Return the function applied to the outer product of a and b."
         a = _makeMaskedArg(a)
         b = _makeMaskedArg(b)
@@ -830,9 +740,8 @@ class _maximum_operation:
         d = numpy.maximum.outer(filled(a), filled(b))
         return TransientVariable(d, mask=m)
 
-maximum = _maximum_operation()
-
-
+maximum = _maximum_operation () 
+    
 def asarray(data, typecode=None, dtype=None):
     """asarray(data, typecode=None, dtype=None) is equivalent to array(data, dtype=None, copy=0)
        Returns data if dtype is None or data is a MaskedArray of the same dtype.
@@ -842,12 +751,10 @@ def asarray(data, typecode=None, dtype=None):
     if isinstance(data, AbstractVariable) and (dtype is None or sctype2char(dtype) == data.dtype.char):
         return data
     else:
-        F = getattr(data, "fill_value", 1.e20)
+        F=getattr(data,"fill_value",1.e20)
         return TransientVariable(data, dtype=dtype, copy=0, fill_value=F)
 
-
-def arrayrange(start, stop=None, step=1, typecode=None,
-               axis=None, attributes=None, id=None, dtype=None):
+def arrayrange(start, stop=None, step=1, typecode=None, axis=None, attributes=None, id=None, dtype=None):
     """Just like range() except it returns a variable whose type can be specfied
     by the keyword argument typecode. The axis of the result variable may be specified.
     """
@@ -860,19 +767,15 @@ def arrayrange(start, stop=None, step=1, typecode=None,
 
 arange = arrayrange
 
-
-def zeros(shape, typecode=float, savespace=0, axes=None,
-          attributes=None, id=None, grid=None, dtype=None):
-    """zeros(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) =
+def zeros (shape, typecode=float, savespace=0, axes=None, attributes=None, id=None, grid=None, dtype=None):
+    """zeros(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) = 
      an array of all zeros of the given length or shape."""
     dtype = _convdtype(dtype, typecode)
     maresult = numpy.ma.zeros(shape, dtype=dtype)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid)
-
-
-def ones(shape, typecode=float, savespace=0, axes=None,
-         attributes=None, id=None, grid=None, dtype=None):
-    """ones(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) =
+    
+def ones (shape, typecode=float, savespace=0, axes=None, attributes=None, id=None, grid=None, dtype=None):
+    """ones(n, typecode=float, savespace=0, axes=None, attributes=None, id=None) = 
      an array of all ones of the given length or shape."""
     dtype = _convdtype(dtype, typecode)
     maresult = numpy.ma.ones(shape, dtype=dtype)
@@ -880,31 +783,28 @@ def ones(shape, typecode=float, savespace=0, axes=None,
 
 as_masked = numpy.ma.array
 
-
 def outerproduct(a, b):
     """outerproduct(a,b) = {a[i]*b[j]}, has shape (len(a),len(b))"""
-    ta = asVariable(a, writeable=1)
-    tb = asVariable(b, writeable=1)
-    maresult = numpy.ma.outerproduct(ta, tb)
-    axes = (ta.getAxis(0), tb.getAxis(0))
-    F = getattr(a, "fill_value", 1.e20)
+    ta = asVariable(a,writeable=1)
+    tb = asVariable(b,writeable=1)
+    maresult = numpy.ma.outerproduct(ta,tb)
+    axes = (ta.getAxis(0),tb.getAxis(0))
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, fill_value=F)
 
-
-def argsort(x, axis=-1, fill_value=None):
+def argsort (x, axis = -1, fill_value=None):
     """Treating masked values as if they have the value fill_value,
        return sort indices for sorting along given axis.
        if fill_value is None, use fill_value(x)
-    """
+    """        
     tx = _makeMaskedArg(x)
-    maresult = numpy.ma.argsort(tx, axis=axis, fill_value=fill_value)
+    maresult = numpy.ma.argsort(tx,axis=axis,fill_value=fill_value)
     axes, attributes, id, grid = _extractMetadata(x)
-    F = getattr(x, "fill_value", 1.e20)
+    F=getattr(x,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
 array = TransientVariable
 
-
 def repeat(a, repeats, axis=None):
     """repeat elements of a repeats times along axis
        repeats is a sequence of length a.shape[axis]
@@ -918,85 +818,68 @@ def repeat(a, repeats, axis=None):
         grid = None
     if axes is not None:
         axes[axis] = None
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, no_update_from=True, fill_value=F)
 
-
-def reshape(a, newshape, axes=None, attributes=None, id=None, grid=None):
+def reshape (a, newshape, axes=None, attributes=None, id=None, grid=None):
     ignore, attributes, id, ignore = _extractMetadata(a, axes, attributes, id)
     if axes is not None:
         axesshape = [len(item) for item in axes]
-        if axesshape != list(newshape):
-            raise CDMSError('axes must be shaped %s' % repr(newshape))
+        if axesshape!=list(newshape):
+            raise CDMSError, 'axes must be shaped %s'%`newshape`
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.reshape(ta, newshape)
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, no_update_from=True, fill_value=F)
-reshape.__doc__ = "numpy doc: %s\naxes/attributes/grid are applied onto the new variable" % numpy.reshape.__doc__
-
+reshape.__doc__="numpy doc: %s\naxes/attributes/grid are applied onto the new variable" % numpy.reshape.__doc__
 
-def resize(a, new_shape, axes=None, attributes=None, id=None, grid=None):
+def resize (a, new_shape, axes=None, attributes=None, id=None, grid=None):
     """resize(a, new_shape) returns a new array with the specified shape.
     The original array's total size can be any size."""
     ignore, attributes, id, ignore = _extractMetadata(a, axes, attributes, id)
     if axes is not None:
         axesshape = [len(item) for item in axes]
-        if axesshape != list(new_shape):
-            raise CDMSError('axes must be shaped %s' % repr(newshape))
+        if axesshape!=list(new_shape):
+            raise CDMSError, 'axes must be shaped %s'%`newshape`
     ta = _makeMaskedArg(a)
     maresult = numpy.ma.resize(ta, new_shape)
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
-def masked_array(a, mask=None, fill_value=None,
-                 axes=None, attributes=None, id=None):
-    """masked_array(a, mask=None) =
+def masked_array (a, mask=None, fill_value=None, axes=None, attributes=None, id=None):
+    """masked_array(a, mask=None) = 
        array(a, mask=mask, copy=0, fill_value=fill_value)
        Use fill_value(a) if None.
     """
-    maresult = numpy.ma.masked_array(
-        _makeMaskedArg(a),
-        mask=mask,
-     fill_value=fill_value)
+    maresult = numpy.ma.masked_array(_makeMaskedArg(a), mask=mask, fill_value=fill_value)
     axes, attributes, id, grid = _extractMetadata(a, axes, attributes, id)
-    F = getattr(a, "fill_value", 1.e20)
+    F=getattr(a,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
-def masked_values(data, value, rtol=1.e-5, atol=1.e-8, copy=1,
-                  savespace=0, axes=None, attributes=None, id=None):
+def masked_values (data, value, rtol=1.e-5, atol=1.e-8, copy=1,
+    savespace=0, axes=None, attributes=None, id=None): 
     """
        masked_values(data, value, rtol=1.e-5, atol=1.e-8)
        Create a masked array; mask is None if possible.
        May share data values with original array, but not recommended.
        Masked where abs(data-value)<= atol + rtol * abs(value)
     """
-    maresult = numpy.ma.masked_values(
-        _makeMaskedArg(data),
-        value,
-     rtol=rtol,
-     atol=atol,
-     copy=copy)
+    maresult = numpy.ma.masked_values(_makeMaskedArg(data), value, rtol=rtol, atol=atol, copy=copy)
     axes, attributes, id, grid = _extractMetadata(data, axes, attributes, id)
-    F = getattr(data, "fill_value", 1.e20)
+    F=getattr(data,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
 
-
-def masked_object(data, value, copy=1,
-                  savespace=0, axes=None, attributes=None, id=None):
+def masked_object (data, value, copy=1, savespace=0, axes=None, attributes=None, id=None):
     "Create array masked where exactly data equal to value"
     maresult = numpy.ma.masked_object(_makeMaskedArg(data), value, copy=copy)
     axes, attributes, id, grid = _extractMetadata(data, axes, attributes, id)
-    F = getattr(data, "fill_value", 1.e20)
+    F=getattr(data,"fill_value",1.e20)
     return TransientVariable(maresult, axes=axes, attributes=attributes, id=id, grid=grid, fill_value=F)
-
-
-def isMaskedVariable(x):
+    
+def isMaskedVariable (x):
     "Is x a masked variable, that is, an instance of AbstractVariable?"
     return isinstance(x, AbstractVariable)
 
-
 def set_default_fill_value(value_type, value):
     """Set the default fill value for value_type to value.
     value_type is a string: 'real','complex','character','integer',or 'object'.
@@ -1013,23 +896,24 @@ def set_default_fill_value(value_type, value):
     elif value_type == 'object':
         numpy.ma.default_object_fill_value = value
 
-
-def fromfunction(f, dimensions):
+def fromfunction (f, dimensions):
     """Apply f to s to create an array as in numpy."""
     return TransientVariable(numpy.ma.fromfunction(f, dimensions))
 
-
-def diagonal(a, offset=0, axis1=0, axis2=1):
-    """diagonal(a, offset=0, axis1=0, axis2 = 1) returns the given
+def diagonal (a, offset = 0, axis1=0, axis2 = 1):
+    """diagonal(a, offset=0, axis1=0, axis2 = 1) returns the given 
        diagonals defined by the two dimensions of the array.
     """
-    F = getattr(a, "fill_value", 1.e20)
-    return TransientVariable(numpy.ma.diagonal(_makeMaskedArg(a),
-                                               offset, axis1, axis2), fill_value=F)
+    F=getattr(a,"fill_value",1.e20)
+    return TransientVariable(numpy.ma.diagonal(_makeMaskedArg(a), 
+            offset, axis1, axis2), fill_value=F)
 
-
-def fromstring(s, t):
+def fromstring (s, t):
     """Construct a masked array from a string. Result will have no mask.
        t is a typecode.
     """
-    return TransientArray(numpy.ma.fromstring(s, t))
+    return TransientArray(numpy.ma.fromstring(s,t))
+
+
+
+
diff --git a/Packages/cdms2/Lib/__init__.py b/Packages/cdms2/Lib/__init__.py
index e3e53ff09..874601632 100644
--- a/Packages/cdms2/Lib/__init__.py
+++ b/Packages/cdms2/Lib/__init__.py
@@ -2,35 +2,35 @@
 CDMS module-level API
 """
 import cdat_info
-cdat_info.pingPCMDIdb("cdat", "cdms2")
+cdat_info.pingPCMDIdb("cdat","cdms2")
 
-__all__ = ["cdmsobj", "axis", "coord", "grid", "hgrid", "avariable",
-           "sliceut", "error", "variable", "fvariable", "tvariable", "dataset",
-           "database", "cache", "selectors", "MV2", "convention", "bindex",
-           "auxcoord", "gengrid", "gsHost", "gsStaticVariable", "gsTimeVariable",
-           "mvBaseWriter", "mvSphereMesh", "mvVsWriter", "mvCdmsRegrid"]
+__all__ = ["cdmsobj", "axis", "coord", "grid", "hgrid", "avariable", \
+"sliceut", "error", "variable", "fvariable", "tvariable", "dataset", \
+"database", "cache", "selectors", "MV2", "convention", "bindex", \
+"auxcoord", "gengrid", "gsHost", "gsStaticVariable", "gsTimeVariable", \
+"mvBaseWriter", "mvSphereMesh", "mvVsWriter", "mvCdmsRegrid"]
 
 # Errors
-from .error import CDMSError
+from error import CDMSError
 
 # CDMS datatypes
-from .cdmsobj import CdArray, CdChar, CdByte, CdDouble, CdFloat, CdFromObject, CdInt, CdLong, CdScalar, CdShort, CdString
+from cdmsobj import CdArray, CdChar, CdByte, CdDouble, CdFloat, CdFromObject, CdInt, CdLong, CdScalar, CdShort, CdString
 
 # Functions which operate on all objects or groups of objects
-from .cdmsobj import Unlimited, getPathFromTemplate, matchPattern, matchingFiles, searchPattern, searchPredicate, setDebugMode
+from cdmsobj import Unlimited, getPathFromTemplate, matchPattern, matchingFiles, searchPattern, searchPredicate, setDebugMode
 
 # Axis functions and classes
-from .axis import AbstractAxis, axisMatches, axisMatchAxis, axisMatchIndex
-from .axis import createAxis, createEqualAreaAxis, createGaussianAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, setAutoBounds, getAutoBounds
+from axis import AbstractAxis, axisMatches, axisMatchAxis, axisMatchIndex
+from axis import createAxis, createEqualAreaAxis, createGaussianAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, setAutoBounds, getAutoBounds
 
 # Grid functions
-from .grid import createGenericGrid, createGlobalMeanGrid, createRectGrid, createUniformGrid, createZonalGrid, setClassifyGrids, createGaussianGrid, writeScripGrid, isGrid
+from grid import createGenericGrid, createGlobalMeanGrid, createRectGrid, createUniformGrid, createZonalGrid, setClassifyGrids, createGaussianGrid, writeScripGrid, isGrid
 
 # Dataset functions
-from .dataset import createDataset, openDataset, useNetcdf3, \
-    getNetcdfClassicFlag, getNetcdfShuffleFlag, getNetcdfDeflateFlag, getNetcdfDeflateLevelFlag,\
+from dataset import createDataset, openDataset, useNetcdf3, \
+        getNetcdfClassicFlag, getNetcdfShuffleFlag, getNetcdfDeflateFlag, getNetcdfDeflateLevelFlag,\
         setNetcdfClassicFlag, setNetcdfShuffleFlag, setNetcdfDeflateFlag, setNetcdfDeflateLevelFlag,\
-        setNetcdfUseNCSwitchModeFlag, getNetcdfUseNCSwitchModeFlag,\
+        setNetcdfUseNCSwitchModeFlag,getNetcdfUseNCSwitchModeFlag,\
         setCompressionWarnings,\
         setNetcdf4Flag, getNetcdf4Flag,\
         setNetcdfUseParallelFlag, getNetcdfUseParallelFlag, \
@@ -39,31 +39,31 @@ from .dataset import createDataset, openDataset, useNetcdf3, \
 open = openDataset
 
 # Database functions
-from .database import connect, Base, Onelevel, Subtree
+from database import connect, Base, Onelevel, Subtree
 
-# Selectors
-from . import selectors
-from .selectors import longitude, latitude, time, level, required, \
-    longitudeslice, latitudeslice, levelslice, timeslice
+#Selectors
+import selectors
+from selectors import longitude, latitude, time, level, required, \
+                      longitudeslice, latitudeslice, levelslice, timeslice
 
-from .avariable import order2index, orderparse, setNumericCompatibility, getNumericCompatibility
+from avariable import order2index, orderparse, setNumericCompatibility, getNumericCompatibility
 # TV
-from .tvariable import asVariable, createVariable, isVariable
+from tvariable import asVariable, createVariable, isVariable
 
-from .mvSphereMesh import SphereMesh
-from .mvBaseWriter import BaseWriter
-from .mvVsWriter import VsWriter
-from .mvVTKSGWriter import VTKSGWriter
-from .mvVTKUGWriter import VTKUGWriter
-from .mvCdmsRegrid import CdmsRegrid
+from mvSphereMesh import SphereMesh
+from mvBaseWriter import BaseWriter
+from mvVsWriter import VsWriter
+from mvVTKSGWriter import VTKSGWriter
+from mvVTKUGWriter import VTKUGWriter
+from mvCdmsRegrid import CdmsRegrid
 
 # Gridspec is not installed by default so just pass on if it isn't installed
 try:
-    from .gsStaticVariable import StaticFileVariable
-    from .gsTimeVariable import TimeFileVariable
+    from gsStaticVariable import StaticFileVariable
+    from gsTimeVariable import TimeFileVariable
 except:
     pass
 
-from .restApi import esgfConnection, esgfDataset, FacetConnection
+from restApi import esgfConnection,esgfDataset,FacetConnection
 
 MV = MV2
diff --git a/Packages/cdms2/Lib/auxcoord.py b/Packages/cdms2/Lib/auxcoord.py
index 334831dce..24a97e8c3 100644
--- a/Packages/cdms2/Lib/auxcoord.py
+++ b/Packages/cdms2/Lib/auxcoord.py
@@ -4,66 +4,50 @@ CDMS 1-D auxiliary coordinates.
 Note: In contrast to Axis objects (concrete classes subclassed from AbstractAxis), auxiliary coordinate variables are not monotonic in value, and do not share a name with the dimension.
 """
 
-# import internattr
-from .error import CDMSError
-from .coord import AbstractCoordinateAxis
-from .fvariable import FileVariable
-from .variable import DatasetVariable
-from .tvariable import TransientVariable
-from .avariable import AbstractVariable
-
+## import internattr
+from error import CDMSError
+from coord import AbstractCoordinateAxis
+from fvariable import FileVariable
+from variable import DatasetVariable
+from tvariable import TransientVariable
+from avariable import AbstractVariable
 
 class AbstractAuxAxis1D(AbstractCoordinateAxis):
 
-    def __init__(self, parent=None, variableNode=None, bounds=None):
-        AbstractCoordinateAxis.__init__(
-            self,
-            parent,
-            variableNode,
-            bounds=bounds)
+    def __init__ (self, parent=None, variableNode=None, bounds=None):
+        AbstractCoordinateAxis.__init__(self, parent, variableNode, bounds=bounds)
 
-    def clone(self, copyData=1):
+    def clone (self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        result = TransientAuxAxis1D(
-            self[:],
-            copy=copyData,
-            axes=self.getAxisList(),
-            attributes=self.attributes,
-            bounds=self.getBounds())
+        result = TransientAuxAxis1D(self[:], copy=copyData, axes=self.getAxisList(), attributes=self.attributes, bounds=self.getBounds())
         return result
 
     def setBounds(self, bounds):
         if bounds is not None:
-            if len(bounds.shape) != 2:
-                raise CDMSError('Bounds must have rank=2')
-            if bounds.shape[0:1] != self.shape:
-                raise CDMSError(
-                    'Bounds shape %s is inconsistent with axis shape %s' %
-                    (repr(bounds.shape), repr(self.shape)))
+            if len(bounds.shape)!=2:
+                raise CDMSError, 'Bounds must have rank=2'
+            if bounds.shape[0:1]!=self.shape:
+                raise CDMSError, 'Bounds shape %s is inconsistent with axis shape %s'%(`bounds.shape`,`self.shape`)
         AbstractCoordinateAxis.setBounds(self, bounds)
 
-    def subSlice(self, *specs, **keys):
+    def subSlice (self, *specs, **keys):
         # Take a subslice, returning a TransientAuxAxis1D
         avar = AbstractVariable.subSlice(self, *specs, **keys)
         bounds = self.getBounds()
         if bounds is None:
             newbounds = None
         else:
-            newbounds = bounds[
-                specs]   # bounds can be a numarray or DatasetVariable
+            newbounds = bounds[specs]   # bounds can be a numarray or DatasetVariable
 
-        # Note: disable axis copy to preserve identity of grid and variable
-        # domains
-        result = TransientAuxAxis1D(avar, bounds=newbounds, copyaxes=0)
+        # Note: disable axis copy to preserve identity of grid and variable domains
+        result = TransientAuxAxis1D(avar, bounds=newbounds, copyaxes=0)    
         return result
 
-
 class DatasetAuxAxis1D(AbstractAuxAxis1D, DatasetVariable):
 
     # Note: node is a VariableNode
-
     def __init__(self, parent, id=None, variableNode=None, bounds=None):
         AbstractAuxAxis1D.__init__(self, parent, variableNode, bounds=bounds)
         DatasetVariable.__init__(self, parent, id, variableNode)
@@ -71,13 +55,11 @@ class DatasetAuxAxis1D(AbstractAuxAxis1D, DatasetVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<DatasetAuxAxis1D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
+            return "<DatasetAuxAxis1D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
         else:
-            return "<DatasetAuxAxis1D: %s, file: **CLOSED**>" % self.id
-
-# internattr.initialize_internal_attributes(DatasetAuxAxis1D) # Copy
-# internal attrs from parents
+            return "<DatasetAuxAxis1D: %s, file: **CLOSED**>"%self.id
 
+## internattr.initialize_internal_attributes(DatasetAuxAxis1D) # Copy internal attrs from parents
 
 class FileAuxAxis1D(AbstractAuxAxis1D, FileVariable):
 
@@ -88,18 +70,15 @@ class FileAuxAxis1D(AbstractAuxAxis1D, FileVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<FileAuxAxis1D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
+            return "<FileAuxAxis1D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
         else:
-            return "<FileAuxAxis1D: %s, file: **CLOSED**>" % self.id
-
-# internattr.initialize_internal_attributes(FileAuxAxis1D) # Copy internal
-# attrs from parents
+            return "<FileAuxAxis1D: %s, file: **CLOSED**>"%self.id
 
+## internattr.initialize_internal_attributes(FileAuxAxis1D) # Copy internal attrs from parents
 
 class TransientAuxAxis1D(AbstractAuxAxis1D, TransientVariable):
 
-    def __init__(
-        self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
+    def __init__(self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
                  axes=None, attributes=None, id=None, copyaxes=1, bounds=None):
         """Create a transient, auxiliary 1-D axis.
         All arguments are as for TransientVariable.
@@ -107,12 +86,11 @@ class TransientAuxAxis1D(AbstractAuxAxis1D, TransientVariable):
           nvert is the max number of vertices per cell.
         """
         AbstractAuxAxis1D.__init__(self, None, None, bounds=bounds)
-        TransientVariable.__init__(
-            self, data, typecode=typecode, copy=copy, savespace=savespace,
+        TransientVariable.__init__(self, data, typecode=typecode, copy=copy, savespace=savespace,
                                    mask=mask, fill_value=fill_value, axes=axes, attributes=attributes,
                                    id=id, copyaxes=copyaxes)
         if axes is not None:
             self.setBounds(bounds)
 
-# internattr.initialize_internal_attributes(TransientAuxAxis1D) # Copy
-# internal attrs from parents
+## internattr.initialize_internal_attributes(TransientAuxAxis1D) # Copy internal attrs from parents
+
diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index 1a6b0d2f3..f15d7a069 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -1,26 +1,28 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+## Further modified to be pure new numpy June 24th 2008
 
 "CDMS Variable objects, abstract interface"
 import numpy
+import types
+import string
 import re
 import warnings
-from . import cdmsNode
-from .cdmsobj import CdmsObj
+import cdmsNode
+from cdmsobj import CdmsObj
 import cdms2
-from .slabinterface import Slab
-from .sliceut import *
-from .error import CDMSError
-from .axis import axisMatchIndex, axisMatchAxis, axisMatches, unspecified, CdtimeTypes, AbstractAxis
-from . import selectors
+from slabinterface import Slab
+from sliceut import *
+from error import CDMSError
+from axis import axisMatchIndex, axisMatchAxis, axisMatches, unspecified, CdtimeTypes, AbstractAxis
+import selectors
 import copy
 # from regrid2 import Regridder, PressureRegridder, CrossSectionRegridder
-from .mvCdmsRegrid import CdmsRegrid
+from mvCdmsRegrid import CdmsRegrid
 from regrid2.mvGenericRegrid import guessPeriodicity
-# import PropertiedClasses
-from .convention import CF1
-from .grid import AbstractRectGrid
-# import internattr
+#import PropertiedClasses
+from convention import CF1
+from grid import AbstractRectGrid
+#import internattr
 
 InvalidRegion = "Invalid region: "
 OutOfRange = "Coordinate interval is out of range or intersection has no data: "
@@ -32,10 +34,9 @@ _numeric_compatibility = False          # Backward compatibility with numpy beha
                                         # True:  return 0-D arrays
                                         #        MV axis=1 by default
 
-
 def getMinHorizontalMask(var):
     """
-    Get the minimum mask associated with 'x' and 'y' (i.e. with the
+    Get the minimum mask associated with 'x' and 'y' (i.e. with the 
     min number of ones) across all axes
     @param var CDMS variable with a mask
     @return mask array or None if order 'x' and 'y' were not found
@@ -47,7 +48,7 @@ def getMinHorizontalMask(var):
 
     shp = var.shape
     ndims = len(shp)
-    order = var.getOrder()  # e.g. 'zxty-', ndims = 5
+    order = var.getOrder() # e.g. 'zxty-', ndims = 5
 
     # run a few checks
     numX = order.count('x')
@@ -58,43 +59,43 @@ def getMinHorizontalMask(var):
         msg = """
 Not able to locate the horizontal (y, x) axes for order = %s in getMinHorizontalMask
         """ % str(order)
-        raise CDMSError(msg)
-
-    ps = []  # index position of x/y, e.g. [1,3]
-    es = []  # end indices, sizes of x/y axes
+        raise CDMSError, msg
+        
+    
+    ps = [] # index position of x/y, e.g. [1,3]
+    es = [] # end indices, sizes of x/y axes
     nonHorizShape = []
     found = False
-    for i in range(ndims - 1, -1, -1):
-        # iterate backwards because the horizontal
+    for i in range(ndims-1, -1, -1):
+        # iterate backwards because the horizontal 
         # axes are more likely to be last
         o = order[i]
         # curvilinear coordinates have '-' in place of
-        # x or y, also check for '-' but exit if we think
+        # x or y, also check for '-' but exit if we think 
         # we found the x and y coords
         if not found and (o in 'xy') or (not hasXY and o == '-'):
-            ps = [i, ] + ps
-            es = [shp[i], ] + es
+            ps = [i,] + ps
+            es = [shp[i],] + es
             if len(ps) == 2:
                 found = True
         else:
-            nonHorizShape = [shp[i], ] + nonHorizShape
+            nonHorizShape = [shp[i],] + nonHorizShape
 
     if len(ps) == 2:
-        # found all the horizontal axes, start with mask
+        # found all the horizontal axes, start with mask 
         # set to invalid everywhere
         mask = numpy.ones(es, numpy.bool8)
         # iterate over all non-horizontal axes, there can be as
         # many as you want...
         for it in MultiArrayIter(nonHorizShape):
-            inds = it.getIndices()  # (i0, i1, i2)
+            inds = it.getIndices() # (i0, i1, i2)
             # build the slice operator, there are three parts
-            # (head, middle, and tail), some parts may be
+            # (head, middle, and tail), some parts may be 
             # missing
             # slce = 'i0,' + ':,'   +   'i1,'  +   ':,' + 'i2,'
-            slce = ('%d,' * ps[0]) % tuple(inds[:ps[0]]) + ':,'            \
-                + ('%d,' * (ps[1] - ps[0] - 1)) % tuple(inds[ps[0]:ps[1] - 1])   \
-                + ':,' + \
-                ('%d,' * (ndims - ps[1] - 1)) % tuple(inds[ps[1] - 1:])
+            slce = ('%d,'*ps[0]) % tuple(inds[:ps[0]]) + ':,'            \
+                + ('%d,'*(ps[1]-ps[0]-1)) % tuple(inds[ps[0]:ps[1]-1])   \
+                + ':,' + ('%d,'*(ndims-ps[1]-1)) % tuple(inds[ps[1]-1:])
             # evaluate the slice for this time, level....
             mask &= eval('var.mask[' + slce + ']')
         return mask
@@ -102,49 +103,45 @@ Not able to locate the horizontal (y, x) axes for order = %s in getMinHorizontal
         msg = """
 Could not find all the horizontal axes for order = %s in getMinHorizontalMask
         """ % str(order)
-        raise CDMSError(msg)
-    return None
-
+        raise CDMSError, msg
+    return None  
 
 def setNumericCompatibility(mode):
     global _numeric_compatibility
-    if mode or mode == 'on':
+    if mode==True or mode=='on':
         _numeric_compatibility = True
-    elif mode == False or mode == 'off':
+    elif mode==False or mode=='off':
         _numeric_compatibility = False
 
-
 def getNumericCompatibility():
     return _numeric_compatibility
 
-
 class AbstractVariable(CdmsObj, Slab):
-
-    def __init__(self, parent=None, variableNode=None):
+    def __init__ (self, parent=None, variableNode=None):
         """Not to be called by users.
            variableNode is the variable tree node, if any.
            parent is the containing dataset instance.
         """
-        if variableNode is not None and variableNode.tag != 'variable':
-            raise CDMSError('Node is not a variable node')
+        if variableNode is not None and variableNode.tag !='variable':
+            raise CDMSError, 'Node is not a variable node'
         CdmsObj.__init__(self, variableNode)
-        val = self.__cdms_internals__ + ['id', 'domain']
-        self.___cdms_internals__ = val
+        val = self.__cdms_internals__ + ['id','domain']
+        self.___cdms_internals__ = val 
         Slab.__init__(self)
         self.id = None                  # Transient variables key on this to create a default ID
         self.parent = parent
         self._grid_ = None      # Variable grid, if any
-        if not hasattr(self, 'missing_value'):
+        if not hasattr(self,'missing_value'):
             self.missing_value = None
         elif numpy.isnan(self.missing_value):
-            self.missing_value = None
+          self.missing_value = None
 
         # Reminder: children to define self.shape and set self.id
 
-    def __array__(self, t=None, context=None):  # Numeric, ufuncs call this
+    def __array__ (self, t=None, context=None):  #Numeric, ufuncs call this
         return numpy.ma.filled(self.getValue(squeeze=0))
 
-    def __call__(self, *args, **kwargs):
+    def __call__ (self, *args,  **kwargs):
         "Selection of a subregion using selectors"
         # separate options from selector specs
         d = kwargs.copy()
@@ -157,13 +154,13 @@ class AbstractVariable(CdmsObj, Slab):
         s = selectors.Selector(*args, **d)
         # get the selection
         return s.unmodified_select(self, raw=raw,
-                                   squeeze=squeeze,
-                                   order=order,
-                                   grid=grid)
+                                         squeeze=squeeze, 
+                                         order=order, 
+                                         grid=grid)
 
     select = __call__
 
-    def rank(self):
+    def rank (self):
         return len(self.shape)
 
     def _returnArray(self, ar, squeeze, singles=None):
@@ -171,90 +168,85 @@ class AbstractVariable(CdmsObj, Slab):
         # job is to make sure we return an numpy.ma or a scalar.
         # If singles is not None, squeeze dimension indices in singles
         inf = 1.8e308
-        if isinstance(ar, cdms2.tvariable.TransientVariable):
-            result = numpy.ma.array(ar._data, mask=ar.mask)
-        elif numpy.ma.isMaskedArray(ar):  # already numpy.ma, only need squeeze.
+        if isinstance(ar,cdms2.tvariable.TransientVariable):
+            result = numpy.ma.array(ar._data,mask=ar.mask)
+        elif numpy.ma.isMaskedArray(ar):   #already numpy.ma, only need squeeze.
             result = ar
         elif isinstance(ar, numpy.ndarray):
             missing = self.getMissing()
             if missing is None:
                 result = numpy.ma.masked_array(ar)
-            elif missing == inf or missing != missing:  # (x!=x) ==> x is NaN
+            elif missing==inf or missing!=missing: # (x!=x) ==> x is NaN
                 result = numpy.ma.masked_object(ar, missing, copy=0)
-            elif ar.dtype.char == 'c':
+            elif ar.dtype.char=='c':
                 # umath.equal is not implemented
-                resultmask = (ar == missing)
+                resultmask = (ar==missing)
                 if not resultmask.any():
                     resultmask = numpy.ma.nomask
-                result = numpy.ma.masked_array(
-                    ar,
-                    mask=resultmask,
-                    fill_value=missing)
+                result = numpy.ma.masked_array(ar, mask=resultmask, fill_value=missing)
             else:
                 result = numpy.ma.masked_values(ar, missing, copy=0)
         elif ar is numpy.ma.masked:
-            return ar
-        else:  # scalar, but it might be the missing value
+            return ar  
+        else: # scalar, but it might be the missing value
             missing = self.getMissing()
             if missing is None:
-                return ar  # scalar
+                return ar #scalar
             else:
                 result = numpy.ma.masked_values(ar, missing, copy=0)
 
         squoze = 0
         if squeeze:
             n = 1
-            newshape = []
+            newshape=[]
             for s in result.shape:
-                if s == 1:
-                    squoze = 1
-                    continue
-                else:
-                    n = n * s
-                    newshape.append(s)
+               if s == 1: 
+                   squoze = 1
+                   continue
+               else:
+                   n = n * s
+                   newshape.append(s)
         elif singles is not None:
             n = 1
-            newshape = []
+            newshape=[]
             oldshape = result.shape
             for i in range(len(oldshape)):
-                if i in singles:
-                    squoze = 1
-                    continue
-                else:
-                    s = oldshape[i]
-                    n = n * s
-                    newshape.append(s)
-
+               if i in singles: 
+                   squoze = 1
+                   continue
+               else:
+                   s = oldshape[i]
+                   n = n * s
+                   newshape.append(s)
+            
         else:
             n = numpy.ma.size(result)
         if n == 1 and squeeze:
-            return numpy.ma.ravel(result)[0]  # scalar or masked
+            return numpy.ma.ravel(result)[0] # scalar or masked
         if squoze:
             result.shape = newshape
         return result
 
     def generateGridkey(self, convention, vardict):
-        """ generateGridkey(): Determine if the variable is gridded,
+        """ generateGridkey(): Determine if the variable is gridded, 
             and generate ((latname, lonname, order, maskname, class), lat, lon) if gridded,
             or (None, None, None) if not gridded. vardict is the variable dictionary of the parent"""
 
         lat, nlat = convention.getVarLatId(self, vardict)
         lon, nlon = convention.getVarLonId(self, vardict)
         if (lat is not None) and (lat is lon):
-            raise CDMSError(
-                "Axis %s is both a latitude and longitude axis! Check standard_name and/or axis attributes." %
-                lat.id)
+            raise CDMSError, "Axis %s is both a latitude and longitude axis! Check standard_name and/or axis attributes."%lat.id
 
         # Check for 2D grid
         if (lat is None) or (lon is None):
             return None, lat, lon
 
         # Check for a rectilinear grid
-        if isinstance(lat, AbstractAxis) and isinstance(lon, AbstractAxis) and (lat.rank() == lon.rank() == 1):
+        if isinstance(lat, AbstractAxis) and isinstance(lon, AbstractAxis) and (lat.rank()==lon.rank()==1):
             return self.generateRectGridkey(lat, lon), lat, lon
 
         # Check for a curvilinear grid:
-        if lat.rank() == lon.rank() == 2:
+        if lat.rank()==lon.rank()==2:
 
             # check that they are defined on the same indices as self
             vardomain = self.getAxisIds()
@@ -268,7 +260,7 @@ class AbstractVariable(CdmsObj, Slab):
                     if axisid not in vardomain:
                         allok = 0
                         break
-
+            
             # It's a curvilinear grid
             if allok:
                 if hasattr(lat, 'maskid'):
@@ -278,7 +270,7 @@ class AbstractVariable(CdmsObj, Slab):
                 return (lat.id, lon.id, 'yx', maskid, 'curveGrid'), lat, lon
 
         # Check for a generic grid:
-        if lat.rank() == lon.rank() == 1:
+        if lat.rank()==lon.rank()==1:
 
             # check that they are defined on the same indices as self
             vardomain = self.getAxisIds()
@@ -292,7 +284,7 @@ class AbstractVariable(CdmsObj, Slab):
                     if axisid not in vardomain:
                         allok = 0
                         break
-
+            
             # It's a generic grid
             if allok:
                 if hasattr(lat, 'maskid'):
@@ -317,14 +309,12 @@ class AbstractVariable(CdmsObj, Slab):
                 ilat = k
             k += 1
 
-        if ilat == -1:
-            raise CDMSError(
-                "Cannot find latitude axis; check standard_name and/or axis attributes")
-        if ilon == -1:
-            raise CDMSError(
-                "Cannot find longitude axis; check standard_name and/or axis attributes")
+        if ilat==-1:
+            raise CDMSError, "Cannot find latitude axis; check standard_name and/or axis attributes"
+        if ilon==-1:
+            raise CDMSError, "Cannot find longitude axis; check standard_name and/or axis attributes"
 
-        if ilat < ilon:
+        if ilat<ilon:
             order = "yx"
         else:
             order = "xy"
@@ -343,14 +333,12 @@ class AbstractVariable(CdmsObj, Slab):
             alist = [d[0] for d in self.getDomain()]
             gridok = grid.checkAxes(alist)
         if not gridok:
-            raise CDMSError(
-                "grid does not match axes for variable %s" %
-                self.id)
+            raise CDMSError, "grid does not match axes for variable %s"%self.id
         self._grid_ = grid
 
-    def getDomain(self):
+    def getDomain (self):
         "Get the list of axes"
-        raise CDMSError("getDomain not overriden in child")
+        raise CDMSError, "getDomain not overriden in child"
 
     def getConvention(self):
         "Get the metadata convention associated with this object."
@@ -359,15 +347,14 @@ class AbstractVariable(CdmsObj, Slab):
         else:
             result = CF1
         return result
-
+            
 # A child class may want to override this
     def getAxis(self, n):
         "Get the n-th axis"
-        if n < 0:
-            n = n + self.rank()
+        if n < 0: n = n + self.rank()
         return self.getDomain()[n][0]
 
-    def getAxisIndex(self, axis_spec):
+    def getAxisIndex (self, axis_spec):
         """Return the index of the axis specificed by axis_spec.
          Argument axis_spec and be as for axisMatches
          Return -1 if no match.
@@ -387,7 +374,7 @@ class AbstractVariable(CdmsObj, Slab):
                 return True
         return False
 
-    def getAxisListIndex(self, axes=None, omit=None, order=None):
+    def getAxisListIndex (self, axes=None, omit=None, order=None):
         """Return a list of indices of axis objects;
            If axes is not None, include only certain axes.
            less the ones specified in omit. If axes is None,
@@ -396,15 +383,15 @@ class AbstractVariable(CdmsObj, Slab):
         """
         return axisMatchIndex(self.getAxisList(), axes, omit, order)
 
-    def getAxisList(self, axes=None, omit=None, order=None):
-        """Get the list of axis objects;
+    def getAxisList(self, axes = None, omit=None, order=None):
+        """Get the list of axis objects; 
            If axes is not None, include only certain axes.
            If omit is not None, omit those specified by omit.
            Arguments omit or axes  may be as specified in axisMatchAxis
            order is an optional string determining the output order
         """
         alist = [d[0] for d in self.getDomain()]
-        return axisMatchAxis(alist, axes, omit, order)
+        return axisMatchAxis (alist, axes, omit, order)
 
     def getAxisIds(self):
         "Get a list of axis identifiers"
@@ -422,12 +409,12 @@ class AbstractVariable(CdmsObj, Slab):
         except:
             mv = self.missing_value
 
-        if mv is None and hasattr(self, '_FillValue'):
+        if mv is None and hasattr(self,'_FillValue'):
             mv = self._FillValue
-
-        if asarray == 0 and isinstance(mv, numpy.ndarray):
+            
+        if asarray==0 and isinstance(mv, numpy.ndarray):
             mv = mv[0]
-        if isinstance(mv, basestring) and self.dtype.char not in ['?', 'c', 'O', 'S']:
+        if type(mv) is types.StringType and self.dtype.char not in ['?','c','O','S']:
             mv = float(mv)
         return mv
 
@@ -444,26 +431,26 @@ class AbstractVariable(CdmsObj, Slab):
         if value is None:
             self._basic_set('missing_value', value)
             return
-
+            
         selftype = self.typecode()
-        if isinstance(value, numpy.ndarray):
+        valuetype = type(value)
+        if valuetype is numpy.ndarray:
             value = value.astype(selftype).item()
-        elif isinstance(value, (numpy.floating, numpy.integer)):
+        elif isinstance(value, numpy.floating) or isinstance(value, numpy.integer):
             value = numpy.array([value], selftype)
-        elif isinstance(value, (float, int, long, complex)):
+        elif valuetype in [types.FloatType, types.IntType, types.LongType, types.ComplexType]:
             try:
                 value = numpy.array([value], selftype)
             except:                     # Set fill value when ar[i:j] returns a masked value
-                value = numpy.array(
-                    [numpy.ma.default_fill_value(self)],
-                    selftype)
-        elif isinstance(value, (basestring, numpy.string_, numpy.str, numpy.string0, numpy.str_)) and selftype in ['?', 'c', 'O', 'S']:  # '?' for Boolean and object
+                value = numpy.array([numpy.ma.default_fill_value(self)], selftype)
+        elif isinstance(value,(str,numpy.string_,numpy.str,numpy.string0,numpy.str_)) and selftype in ['?','c','O','S']: # '?' for Boolean and object
             pass
         else:
-            raise CDMSError('Invalid missing value %s' % repr(value))
-
+            raise CDMSError, 'Invalid missing value %s'%`value`
+        
         self.missing_value = value
 
+
     def getTime(self):
         "Get the first time dimension, or None if not found"
         for k in range(self.rank()):
@@ -483,12 +470,11 @@ class AbstractVariable(CdmsObj, Slab):
                 break
         else:
             return None
-
     def getForecast(self):
         return self.getForecastTime()
 
     def getLevel(self):
-        """Get the first vertical level dimension in the domain,
+        """Get the first vertical level dimension in the domain, 
            or None if not found.
         """
         for k in range(self.rank()):
@@ -506,7 +492,7 @@ class AbstractVariable(CdmsObj, Slab):
             result = grid.getLatitude()
         else:
             result = None
-
+            
         if result is None:
             for k in range(self.rank()):
                 result = self.getAxis(k)
@@ -514,7 +500,7 @@ class AbstractVariable(CdmsObj, Slab):
                     break
             else:
                 result = None
-
+                
         return result
 
     def getLongitude(self):
@@ -524,7 +510,7 @@ class AbstractVariable(CdmsObj, Slab):
             result = grid.getLongitude()
         else:
             result = None
-
+            
         if result is None:
             for k in range(self.rank()):
                 result = self.getAxis(k)
@@ -532,20 +518,21 @@ class AbstractVariable(CdmsObj, Slab):
                     break
             else:
                 result = None
-
+                
         return result
+
     # Get an order string, such as "tzyx"
     def getOrder(self, ids=0):
         """getOrder(ids=0) returns the order string, such as tzyx.
 
          if ids == 0 (the default) for an axis that is not t,z,x,y
          the order string will contain a '-' in that location.
-         The result string will be of the same length as the number
+         The result string will be of the same length as the number 
          of axes. This makes it easy to loop over the dimensions.
 
          if ids == 1 those axes will be represented in the order
          string as (id) where id is that axis' id. The result will
-         be suitable for passing to order2index to get the
+         be suitable for passing to order2index to get the 
          corresponding axes, and to orderparse for dividing up into
          components.
         """
@@ -553,56 +540,50 @@ class AbstractVariable(CdmsObj, Slab):
         for k in range(self.rank()):
             axis = self.getAxis(k)
             if axis.isLatitude():
-                order = order + "y"
+                order = order+"y"
             elif axis.isLongitude():
-                order = order + "x"
+                order = order+"x"
             elif axis.isLevel():
-                order = order + "z"
+                order = order+"z"
             elif axis.isTime():
-                order = order + "t"
+                order = order+"t"
             elif ids:
                 order = order + '(' + axis.id + ')'
             else:
                 order = order + "-"
         return order
 
-    def subSlice(self, *specs, **keys):
-        speclist = self._process_specs(specs, keys)
-        numericSqueeze = keys.get('numericSqueeze', 0)
+    def subSlice (self, *specs, **keys):
+        speclist = self._process_specs (specs, keys)
+        numericSqueeze = keys.get('numericSqueeze',0)
 
         # Get a list of single-index specs
         if numericSqueeze:
             singles = self._single_specs(specs)
         else:
             singles = None
-        slicelist = self.specs2slices(speclist, force=1)
-        d = self.expertSlice(slicelist)
-        squeeze = keys.get('squeeze', 0)
-        raw = keys.get('raw', 0)
+        slicelist = self.specs2slices(speclist,force=1)
+        d = self.expertSlice (slicelist)
+        squeeze = keys.get ('squeeze', 0)
+        raw = keys.get('raw',0)
         order = keys.get('order', None)
         grid = keys.get('grid', None)
-        forceaxes = keys.get(
-            'forceaxes',
-            None)  # Force result to have these axes
-        raweasy = raw == 1 and order is None and grid is None
+        forceaxes = keys.get('forceaxes', None) # Force result to have these axes
+        raweasy = raw==1 and order is None and grid is None
         if not raweasy:
             if forceaxes is None:
                 axes = []
-                allaxes = [None] * self.rank()
+                allaxes = [None]*self.rank()
                 for i in range(self.rank()):
-                    slice = slicelist[i]
-                    if squeeze and numpy.ma.size(d, i) == 1:
-                        continue
-                    elif numericSqueeze and i in singles:
-                        continue
-                    # Don't wrap square-bracket slices
-                    axis = self.getAxis(
-                        i).subaxis(slice.start,
-                                   slice.stop,
-                                   slice.step,
-                                   wrap=(numericSqueeze == 0))
-                    axes.append(axis)
-                    allaxes[i] = axis
+                   slice = slicelist[i]
+                   if squeeze and numpy.ma.size(d, i) == 1:
+                       continue
+                   elif numericSqueeze and i in singles:
+                       continue
+                   # Don't wrap square-bracket slices
+                   axis = self.getAxis(i).subaxis(slice.start, slice.stop, slice.step, wrap=(numericSqueeze==0))
+                   axes.append(axis)
+                   allaxes[i] = axis
             else:
                 axes = forceaxes
 
@@ -613,46 +594,42 @@ class AbstractVariable(CdmsObj, Slab):
                 resultgrid = None
             else:
                 alist = [item[0] for item in self.getDomain()]
-                gridslices, newaxes = selfgrid.getGridSlices(
-                    alist, allaxes, slicelist)
+                gridslices, newaxes = selfgrid.getGridSlices(alist, allaxes, slicelist)
 
                 # If one of the grid axes was squeezed, the result grid is None
                 if None in newaxes:
                     resultgrid = None
                 else:
-                    resultgrid = selfgrid.subSlice(
-                        *gridslices,
-                        **{'forceaxes': newaxes})
+                    resultgrid = apply(selfgrid.subSlice, gridslices, {'forceaxes': newaxes})
 
         resultArray = self._returnArray(d, squeeze, singles=singles)
         if self.isEncoded():
-            resultArray = self.decode(resultArray)
+            resultArray  = self.decode(resultArray)
             newmissing = resultArray.fill_value
         else:
             newmissing = self.getMissing()
 
         if raweasy:
             return resultArray
-        elif len(axes) > 0:
+        elif len(axes)>0:
 
             # If forcing use of input axes, make sure they are not copied.
-            # Same if the grid is not rectilinear - this is when forceaxes is
-            # set.
+            # Same if the grid is not rectilinear - this is when forceaxes is set.
             copyaxes = (forceaxes is None) and (resultgrid is None)
-            result = TransientVariable(resultArray,
-                                       copy=0,
-                                       fill_value=newmissing,
-                                       axes=axes,
-                                       copyaxes=copyaxes,
-                                       grid=resultgrid,
-                                       attributes=self.attributes,
-                                       id=self.id)
+            result = TransientVariable(resultArray, 
+                                     copy=0,
+                                     fill_value = newmissing,
+                                     axes=axes,
+                                     copyaxes = copyaxes,
+                                     grid = resultgrid,
+                                     attributes=self.attributes,
+                                     id = self.id)
             if grid is not None:
                 order2 = grid.getOrder()
                 if order is None:
                     order = order2
                 elif order != order2:
-                    raise CDMSError('grid, order options not compatible.')
+                    raise CDMSError, 'grid, order options not compatible.'
             result = result.reorder(order).regrid(grid)
             if raw == 0:
                 return result
@@ -662,7 +639,7 @@ class AbstractVariable(CdmsObj, Slab):
         else:               # Return numpy.ma for zero rank, so that __cmp__ works.
             return resultArray
 
-    def getSlice(self, *specs, **keys):
+    def getSlice (self, *specs, **keys):
         """x.getSlice takes arguments of the following forms and produces
            a return array. The keyword argument squeeze determines whether
            or not the shape of the returned array contains dimensions whose
@@ -687,21 +664,21 @@ class AbstractVariable(CdmsObj, Slab):
            of that dimension, as in normal Python indexing.
         """
         # Turn on squeeze and raw options by default.
-        keys['numericSqueeze'] = keys.get('numericSqueeze', 0)
-        keys['squeeze'] = keys.get('squeeze', 1 - keys['numericSqueeze'])
-        keys['raw'] = keys.get('raw', 1)
+        keys['numericSqueeze'] = keys.get('numericSqueeze',0)
+        keys['squeeze'] = keys.get('squeeze',1-keys['numericSqueeze'])
+        keys['raw'] = keys.get('raw',1)
         keys['order'] = keys.get('order', None)
         keys['grid'] = keys.get('grid', None)
         isitem = keys.get('isitem', 0)
         result = self.subSlice(*specs, **keys)
 
         # return a scalar for 0-D slices
-        if isitem and result.size == 1 and (not _numeric_compatibility) and not result.mask.item():
+        if isitem and result.size==1 and (not _numeric_compatibility) and not result.mask.item():
             result = result.item()
         return result
 
     def expertSlice(self, slicelist):
-        raise CDMSError(NotImplemented + 'expertSlice')
+        raise CDMSError, NotImplemented + 'expertSlice'
 
     def getRegion(self, *specs, **keys):
         """getRegion
@@ -737,67 +714,65 @@ class AbstractVariable(CdmsObj, Slab):
         """
 
         # By default, squeeze and raw options are on
-        keys['squeeze'] = keys.get('squeeze', 1)
-        keys['raw'] = keys.get('raw', 1)
+        keys['squeeze'] = keys.get ('squeeze', 1)
+        keys['raw'] = keys.get('raw',1)
         keys['order'] = keys.get('order', None)
         keys['grid'] = keys.get('grid', None)
         return self.subRegion(*specs, **keys)
 
-    def subRegion(self, *specs, **keys):
+    def subRegion (self, *specs, **keys):
 
-        speclist = self._process_specs(specs, keys)
-        slicelist = self.reg_specs2slices(speclist)
+        speclist = self._process_specs (specs, keys)
+        slicelist = self.reg_specs2slices (speclist)
 
-        squeeze = keys.get('squeeze', 0)
-        raw = keys.get('raw', 0)
+        squeeze = keys.get ('squeeze', 0)
+        raw = keys.get('raw',0)
         order = keys.get('order', None)
         grid = keys.get('grid', None)
-        raweasy = raw == 1 and order is None and grid is None
+        raweasy = raw==1 and order is None and grid is None
         if grid is not None and order is None:
             order = grid.getOrder()
 
+
         # Check if any slice wraps around.
 
         wrapdim = -1
-
+        
         axes = []
 
         circulardim = None
-
+        
         for idim in range(len(slicelist)):
             item = slicelist[idim]
             axis = self.getAxis(idim)
             axislen = len(axis)
 
-            if(axis.isCircular()):
-                circulardim = idim
+            if(axis.isCircular()): circulardim=idim
 
-            wraptest1 = (axis.isCircular() and speclist[idim] != unspecified)
+            wraptest1 = ( axis.isCircular() and speclist[idim] != unspecified)
             start, stop = item.start, item.stop
-            wraptest2 = not ((start is None or (0 <= start < axislen)) and (
-                stop is None or (0 <= stop <= axislen)))
+            wraptest2 = not ((start is None or (0<=start<axislen)) and (stop is None or (0<=stop<=axislen)))
 
-            if (wraptest1 and wraptest2):
+            if ( wraptest1 and wraptest2):
                 if wrapdim >= 0:
-                    raise CDMSError("Too many dimensions wrap around.")
+                    raise CDMSError, "Too many dimensions wrap around."
                 wrapdim = idim
                 break
-
+                    
         else:
 
             # No wraparound, just read the data
 
-            # redo the speclist -> slice if passed circular test but not
-            # wrapped test
+            # redo the speclist -> slice if passed circular test but not wrapped test
 
             if(circulardim is not None):
-                slicelist = self.reg_specs2slices(speclist, force=circulardim)
-
-            d = {'raw': raw,
-                 'squeeze': squeeze,
-                 'order': order,
-                 'grid': grid,
-                 }
+                slicelist = self.reg_specs2slices (speclist,force=circulardim)
+                
+            d = {'raw':raw, 
+                 'squeeze':squeeze,
+                 'order':order,
+                 'grid':grid,
+                }
             return self.subSlice(*slicelist, **d)
 
         #
@@ -812,11 +787,11 @@ class AbstractVariable(CdmsObj, Slab):
         # shift the wrap slice to the positive side and calc number of cycles shifted
         #
 
-        wb = wrapslice.start
-        we = wrapslice.stop
-        ws = wrapslice.step
-        size = length
-        cycle = self.getAxis(wrapdim).getModulo()
+        wb=wrapslice.start
+        we=wrapslice.stop
+        ws=wrapslice.step
+        size=length
+        cycle=self.getAxis(wrapdim).getModulo()
 
         #
         # ncycle:
@@ -826,93 +801,92 @@ class AbstractVariable(CdmsObj, Slab):
         # ncyclesrev:
         #    resetting the world coordinate for reversed direction
         #
-
-        ncycles = 0
-        ncyclesrev = 0
-
-        if(ws > 0):
-
-            if(wb > 0):
-                ncycles = 1
-                while(wb >= 0):
-                    wb = wb - size
-                    we = we - size
-                    ncycles = ncycles - 1
+        
+        ncycles=0
+        ncyclesrev=0
+
+        if(ws>0):
+
+            if(wb>0):
+                ncycles=1
+                while(wb>=0):
+                    wb=wb-size
+                    we=we-size
+                    ncycles=ncycles-1
             else:
-                ncycles = 0
-                while(wb < 0):
-                    wb = wb + size
-                    we = we + size
-                    ncycles = ncycles + 1
-
+                ncycles=0
+                while(wb<0):
+                    wb=wb+size
+                    we=we+size
+                    ncycles=ncycles+1
+                    
             if(wb < 0):
-                wb = wb + size
-                we = we + size
-
+                wb=wb+size
+                we=we+size
+                
         #  reversed direction
-
+        
         else:
 
             # do the ncycles for resetting world coordinate
-            wbrev = wb
-            werev = we
-            werevNoneTest = 0
+            wbrev=wb
+            werev=we
+            werevNoneTest=0
             if(werev is None):
-                werev = 0
-                werevNoneTest = 1
+                werev=0
+                werevNoneTest=1
 
-            ncycleRevStart = 1
+            ncycleRevStart=1
             if(wbrev > 0):
-                ncyclesrev = ncycleRevStart
-                while(wbrev >= 0):
-                    wbrev = wbrev - size
-                    werev = werev - size
-                    ncyclesrev = ncyclesrev - 1
+                ncyclesrev=ncycleRevStart
+                while(wbrev>=0):
+                    wbrev=wbrev-size
+                    werev=werev-size
+                    ncyclesrev=ncyclesrev-1
             else:
-                ncyclesrev = 0
-                while(wbrev < 0):
-                    wbrev = wbrev + size
-                    werev = werev + size
-                    ncyclesrev = ncyclesrev + 1
+                ncyclesrev=0
+                while(wbrev<0):
+                    wbrev=wbrev+size
+                    werev=werev+size
+                    ncyclesrev=ncyclesrev+1
 
             while(werev < 0):
-                wbrev = wbrev + size
-                werev = werev + size
+                wbrev=wbrev+size
+                werev=werev+size
 
             # number of cycles to make the slice positive
-            while(we < 0 and we is not None):
-                wb = wb + size
-                we = we + size
-                ncycles = ncycles + 1
-
-            wb = wbrev
-            we = werev
-            if(werevNoneTest):
-                we = None
-
-        wrapslice = slice(wb, we, ws)
-
+            while( we<0 and we != None ):
+                wb=wb+size
+                we=we+size
+                ncycles=ncycles+1
+
+            wb=wbrev
+            we=werev
+            if(werevNoneTest): we=None
+            
+        wrapslice=slice(wb,we,ws)
+        
         #
         #  calc the actual positive slices and create data array
         #
 
-        donew = 1
+        donew=1
 
         if(donew):
 
             wraps = splitSliceExt(wrapslice, length)
 
-            for kk in range(0, len(wraps)):
-                sl = wraps[kk]
-
+            for kk in range(0,len(wraps)):
+                sl=wraps[kk]
+                
                 slicelist[wrapdim] = sl
 
                 if(kk == 0):
                     ar1 = self.getSlice(squeeze=0, *slicelist)
-                    result = ar1
+                    result=ar1
                 else:
                     ar2 = self.getSlice(squeeze=0, *slicelist)
-                    result = numpy.ma.concatenate((result, ar2), axis=wrapdim)
+                    result = numpy.ma.concatenate((result,ar2),axis=wrapdim)
 
         else:
 
@@ -921,7 +895,8 @@ class AbstractVariable(CdmsObj, Slab):
             ar1 = self.getSlice(squeeze=0, *slicelist)
             slicelist[wrapdim] = wrap2
             ar2 = self.getSlice(squeeze=0, *slicelist)
-            result = numpy.ma.concatenate((ar1, ar2), axis=wrapdim)
+            result = numpy.ma.concatenate((ar1,ar2),axis=wrapdim)
+
 
         if raweasy:
             return self._returnArray(result, squeeze)
@@ -932,23 +907,22 @@ class AbstractVariable(CdmsObj, Slab):
         #
         #----------------------------------------------------------------------
 
-        wrapspec = speclist[wrapdim]
-
+        wrapspec=speclist[wrapdim]
+        
         axes = []
         for i in range(self.rank()):
-            if squeeze and numpy.ma.size(result, i) == 1:
-                continue
+            if squeeze and numpy.ma.size(result, i) == 1: continue
 
             sl = slicelist[i]
 
             if i == wrapdim:
 
                 axis = self.getAxis(i).subAxis(wb, we, ws)
-
+                
                 if(ws > 0):
-                    delta_beg_wrap_dimvalue = ncycles * cycle
+                    delta_beg_wrap_dimvalue = ncycles*cycle
                 else:
-                    delta_beg_wrap_dimvalue = ncyclesrev * cycle
+                    delta_beg_wrap_dimvalue = ncyclesrev*cycle
 
                 isGeneric = [False]
                 b = axis.getBounds(isGeneric) - delta_beg_wrap_dimvalue
@@ -962,18 +936,18 @@ class AbstractVariable(CdmsObj, Slab):
 
         result = self._returnArray(result, squeeze)
         result = TransientVariable(result,
-                                   copy=0,
-                                   fill_value=self.missing_value,
-                                   axes=axes,
-                                   attributes=self.attributes,
-                                   id=self.id)
+                                 copy=0, 
+                                 fill_value = self.missing_value,
+                                 axes=axes,
+                                 attributes=self.attributes,
+                                 id = self.id)
         if grid is not None:
             order2 = grid.getOrder()
             if order is None:
                 order = order2
             elif order != order2:
-                raise CDMSError('grid, order options not compatible.')
-
+                raise CDMSError, 'grid, order options not compatible.'
+            
         result = result.reorder(order).regrid(grid)
         if raw == 0:
             return result
@@ -983,22 +957,22 @@ class AbstractVariable(CdmsObj, Slab):
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         return self.getSlice(Ellipsis, squeeze=squeeze)
+    
+    def assignValue(self,data):
+        raise CDMSError, NotImplemented + 'assignValue'
 
-    def assignValue(self, data):
-        raise CDMSError(NotImplemented + 'assignValue')
-
-    def reorder(self, order):
+    def reorder (self, order):
         """return self reordered per the specification order"""
-        if order is None:
+        if order is None: 
             return self
         axes = self.getAxisList()
         permutation = order2index(axes, order)
         if permutation == range(len(axes)):
             return self
-        return MV.transpose(self, permutation)
+        return MV.transpose (self, permutation)
 
-    def regrid(self, togrid, missing=None, order=None, mask=None, **keywords):
-        """return self regridded to the new grid.
+    def regrid (self, togrid, missing=None, order=None, mask=None, **keywords):
+        """return self regridded to the new grid.  
         One can use the regrid2.Regridder optional arguments as well.
 
         Example:
@@ -1013,49 +987,48 @@ class AbstractVariable(CdmsObj, Slab):
         @param keywords optional keyword arguments dependent on regridTool
         @return regridded variable
         """
-        # there is a circular dependency between cdms2 and regrid2. In
+        # there is a circular dependency between cdms2 and regrid2. In 
         # principle, cdms2 files should not import regrid2, we're bending
         # rules here...
         import regrid2
         from regrid2 import Horizontal
 
-        if togrid is None:
+        if togrid is None: 
             return self
         else:
 
-            fromgrid = self.getGrid()  # this returns the horizontal grid only
+            fromgrid = self.getGrid() # this returns the horizontal grid only
 
             # default w/o bounds
-            regridTool = 'libcf'
+            regridTool = 'libcf'   
             regridMethod = 'linear'
 
-            if 'topology' in self.getAxis(-1).attributes:
+            if self.getAxis(-1).attributes.has_key('topology'):
                 if self.getAxis(-1).attributes['topology'] == 'circular':
                     # for the ESMF regridders
-                    keywords['periodicity'] = guessPeriodicity(
-                        self.getAxis(-1).getBounds())
+                    keywords['periodicity'] = guessPeriodicity(self.getAxis(-1).getBounds())
                     keywords['mkCyclic'] = 1    # for LibCF regridder
 
             # check if there are bounds and we have esmf
-            if fromgrid.getBounds() is not None and hasattr(regrid2, "ESMFRegrid"):
+            if fromgrid.getBounds() is not None and hasattr(regrid2,"ESMFRegrid"):
                 regridTool = 'esmf'
                 regridMethod = 'linear'
                 # Hum ok if only 1 longitude regrid fails, let's check
-                if len(togrid.getLongitude()) == 1:
-                    # esmf can't deal with this
-                    regridTool = "regrid2"
+                if len(togrid.getLongitude())==1:
+                  # esmf can't deal with this
+                  regridTool   = "regrid2"
 
             # let user override
             userSpecifiesMethod = False
             for rm in 'rm', 'method', 'regridmethod', 'regrid_method', 'regridMethod':
-                if rm in keywords:
+                if keywords.has_key(rm):
                     regridMethod = keywords[rm]
                     del keywords[rm]
                     userSpecifiesMethod = True
 
             userSpecifiesTool = False
             for rt in 'rt', 'tool', 'regridtool', 'regrid_tool', 'regridTool':
-                if rt in keywords:
+                if keywords.has_key(rt):
                     regridTool = keywords[rt]
                     del keywords[rt]
                     userSpecifiesTool = True
@@ -1068,8 +1041,8 @@ class AbstractVariable(CdmsObj, Slab):
 
             # make sure the tool can do it
             if re.search('^regrid', regridTool, re.I) is not None and \
-                    (len(fromgrid.getLatitude().shape) > 1 or
-                     len(togrid.getLatitude().shape) > 1):
+                    (  len(fromgrid.getLatitude().shape) > 1 or \
+                         len(togrid.getLatitude().shape) > 1  ):
                 message = """
 avariable.regrid: regrid2 cannot do curvilinear, will switch to esmf..."
                 """
@@ -1079,7 +1052,7 @@ avariable.regrid: regrid2 cannot do curvilinear, will switch to esmf..."
             if re.search('esmf', regridTool, re.I):
                 # make sure source grids have bounds
                 haveBounds = True
-                for g in fromgrid, :
+                for g in fromgrid,:
                     for c in g.getLatitude(), g.getLongitude():
                         haveBounds &= (c.getBounds() is not None)
                 if not haveBounds:
@@ -1089,18 +1062,19 @@ avariable.regrid: regridTool = 'esmf' requires bounds for source grid, will swit
                     warnings.warn(message, Warning)
                     regridTool = 'libcf'
                     regridMethod = 'linear'
-                if not hasattr(regrid2, "ESMFRegrid"):
-                    message = """
+                if not hasattr(regrid2,"ESMFRegrid"):
+                  message = """
 avariable.regrid: regridTool = 'esmf' but your version does not seems to be built with esmf, will switch to regridTool = 'libcf'
                   """
-                    warnings.warn(message, Warning)
-                    regridTool = 'libcf'
-                    regridMethod = 'linear'
+                  warnings.warn(message, Warning)
+                  regridTool = 'libcf'
+                  regridMethod = 'linear'
+
 
             if re.search('conserv', regridMethod, re.I):
                 # make sure destination grid has bounds
                 haveBounds = True
-                for g in togrid, :
+                for g in togrid,:
                     for c in g.getLatitude(), g.getLongitude():
                         haveBounds &= (c.getBounds() is not None)
                 if not haveBounds:
@@ -1114,48 +1088,47 @@ avariable.regrid: regridMethod = 'conserve' requires bounds for destination grid
                 message = """
 avariable.regrid: We chose regridTool = %s for you among the following choices:
    Tools ->    'regrid2' (old behavior)
-               'esmf' (conserve, patch, linear) or
+               'esmf' (conserve, patch, linear) or 
                'libcf' (linear)""" % regridTool
                 warnings.warn(message, Warning)
 
             if not userSpecifiesMethod and re.search('^regrid', regridTool, re.I) is None:
                 message = """
-avariable.regrid: We chose regridMethod = %s for you among the following choices:
+avariable.regrid: We chose regridMethod = %s for you among the following choices: 
     'conserve' or 'linear' or 'patch'""" % regridMethod
                 warnings.warn(message, Warning)
 
             if re.search('^regrid', regridTool, re.I):
-                if 'diag' in keywords and \
-                        isinstance(keywords['diag'], dict):
+                if keywords.has_key('diag') and \
+                        type(keywords['diag']) == types.DictType:
                     keywords['diag']['regridTool'] = 'regrid'
 
                 # the original cdms2 regridder
                 regridf = Horizontal(fromgrid, togrid)
-                return regridf(self, missing=missing, order=order,
+                return regridf(self, missing=missing, order=order, 
                                mask=mask, **keywords)
 
             # emsf or libcf...
 
             srcGridMask = None
             # set the source mask if a mask is defined with the source data
-            if numpy.any(self.mask):
+            if numpy.any(self.mask == True):
                 srcGridMask = getMinHorizontalMask(self)
 
             # compute the interpolation weights
-            ro = CdmsRegrid(fromgrid, togrid,
-                            dtype=self.dtype,
-                            regridMethod=regridMethod,
-                            regridTool=regridTool,
-                            srcGridMask=srcGridMask,
-                            srcGridAreas=None,
-                            dstGridMask=None,
-                            dstGridAreas=None,
+            ro = CdmsRegrid(fromgrid, togrid, 
+                            dtype = self.dtype,
+                            regridMethod = regridMethod,
+                            regridTool = regridTool,
+                            srcGridMask = srcGridMask, 
+                            srcGridAreas = None,
+                            dstGridMask = None,
+                            dstGridAreas = None,
                             **keywords)
             # now interpolate
             return ro(self, **keywords)
 
-    def pressureRegrid(
-            self, newLevel, missing=None, order=None, method="log"):
+    def pressureRegrid (self, newLevel, missing=None, order=None, method="log"):
         """Return the variable regridded to new pressure levels.
         The variable should be a function of lat, lon, pressure, and (optionally) time.
         <newLevel> is an axis of the result pressure levels.
@@ -1167,13 +1140,12 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
         fromlevel = self.getLevel()
         if fromlevel is None:
-            raise CDMSError('No pressure level')
+            raise CDMSError, 'No pressure level'
         pregridf = PressureRegridder(fromlevel, newLevel)
         result = pregridf(self, missing=missing, order=order, method=method)
         return result
 
-    def crossSectionRegrid(
-            self, newLevel, newLatitude, missing=None, order=None, method="log"):
+    def crossSectionRegrid(self, newLevel, newLatitude, missing=None, order=None, method="log"):
         """Return the variable regridded to new pressure levels and latitudes.
         The variable should be a function of lat, level, and (optionally) time.
         <newLevel> is an axis of the result pressure levels.
@@ -1187,18 +1159,14 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         fromlevel = self.getLevel()
         fromlat = self.getLatitude()
         if fromlevel is None:
-            raise CDMSError('No pressure level')
+            raise CDMSError, 'No pressure level'
         if fromlat is None:
-            raise CDMSError('No latitude level')
-        xregridf = CrossSectionRegridder(
-            fromlat,
-            newLatitude,
-            fromlevel,
-            newLevel)
+            raise CDMSError, 'No latitude level'
+        xregridf = CrossSectionRegridder(fromlat, newLatitude, fromlevel, newLevel)
         result = xregridf(self, missing=missing, order=order, method=method)
         return result
 
-    def _process_specs(self, specs, keys):
+    def _process_specs (self, specs, keys):
         """Process the arguments for a getSlice, getRegion, etc.
            Returns an array of specifications for all dimensions.
            Any Ellipsis has been eliminated.
@@ -1209,39 +1177,36 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         if Ellipsis in specs:
             nellipses = 1
         elif numpy.newaxis in specs:
-            raise CDMSError(
-                'Sorry, you cannot use NewAxis in this context ' + str(specs))
+            raise CDMSError, 'Sorry, you cannot use NewAxis in this context ' + str(specs)
         else:
             nellipses = 0
-        if nsupplied - nellipses > myrank:
-            raise CDMSError(InvalidRegion +
-                            "too many dimensions: %d, for variable %s" % (len(specs), self.id))
+        if nsupplied-nellipses > myrank:
+            raise CDMSError, InvalidRegion + \
+              "too many dimensions: %d, for variable %s"%(len(specs),self.id)
 
-        speclist = [unspecified] * myrank
+        speclist = [unspecified]*myrank
         i = 0
         j = 0
         while i < nsupplied:
             if specs[i] is Ellipsis:
-                j = myrank - (nsupplied - (i + 1))
+               j = myrank  - (nsupplied - (i+1)) 
             else:
-                speclist[j] = specs[i]
-                j = j + 1
+               speclist[j] = specs[i]
+               j = j + 1
             i = i + 1
 
         for k, v in keys.items():
-            if k in ['squeeze', 'raw', 'grid', 'order']:
+            if k in ['squeeze','raw','grid','order']: 
                 continue
             i = self.getAxisIndex(k)
             if i >= 0:
                 if speclist[i] is not unspecified:
-                    raise CDMSError(
-                        'Conflict between specifier %s and %s' %
-                        (repr(speclist[i]), repr(keys)))
+                    raise CDMSError, 'Conflict between specifier %s and %s'%(`speclist[i]`,`keys`)
                 speclist[i] = v
 
         return speclist
 
-    def _single_specs(self, specs):
+    def _single_specs (self, specs):
         """Return a list of dimension indices where the spec is an index."""
         myrank = self.rank()
         nsupplied = len(specs)
@@ -1250,15 +1215,15 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         singles = []
         while i < nsupplied:
             if specs[i] is Ellipsis:
-                j = myrank - (nsupplied - (i + 1))
+                j = myrank  - (nsupplied - (i+1)) 
             else:
-                if isinstance(specs[i], int):
+                if isinstance(specs[i], types.IntType):
                     singles.append(j)
                 j = j + 1
             i = i + 1
         return singles
 
-    def specs2slices(self, speclist, force=None):
+    def specs2slices (self, speclist, force=None):
         """Create an equivalent list of slices from an index specification
            An index specification is a list of acceptable items, which are
            -- an integer
@@ -1269,22 +1234,22 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
            The size of the speclist must be self.rank()
         """
         if len(speclist) != self.rank():
-            raise CDMSError("Incorrect length of speclist in specs2slices.")
+            raise CDMSError, "Incorrect length of speclist in specs2slices."
         slicelist = []
         for i in range(self.rank()):
             key = speclist[i]
-            if isinstance(key, int):  # x[i]
-                slicelist.append(slice(key, key + 1))
-            elif isinstance(key, slice):  # x[i:j:k]
+            if isinstance(key, types.IntType):  # x[i]
+                slicelist.append (slice(key,key+1))
+            elif type(key) is types.SliceType: # x[i:j:k]
                 slicelist.append(key)
             elif key is unspecified or key is None or key == ':':
-                slicelist.append(slice(0, len(self.getAxis(i))))
+                slicelist.append (slice(0, len(self.getAxis(i))))
             elif key is Ellipsis:
-                raise CDMSError("Misuse of ellipsis in specification.")
-            elif isinstance(key, tuple):
+                raise CDMSError, "Misuse of ellipsis in specification."
+            elif type(key) is types.TupleType:
                 slicelist.append(slice(*key))
             else:
-                raise CDMSError('invalid index: %s' % str(key))
+                raise CDMSError, 'invalid index: %s'% str(key)
         # Change default or negative start, stop to positive
         for i in range(self.rank()):
             axis = self.getAxis(i)
@@ -1295,106 +1260,92 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
             #
             # allow negative indices in a wrapped (isCircular() = 1) axis
             #
-            circular = (axis.isCircular() and force is None)
+            circular=(axis.isCircular() and force is None)
 
             altered = 0
-            if step is None:
+            if step is None: 
                 altered = 1
-                step = 1
+                step=1
 
-            if ((start is None or stop is None or start < 0 or stop < 0) and (circular == 0)):
+            if ( ( start is None or stop is None or start<0 or stop<0 ) and ( circular == 0 ) ):
                 altered = 1
                 adjustit = 1
-                if step > 0:
-                    if start is None:
-                        start = 0
-                    if stop is None:
-                        stop = length
-                    if start == -1 and stop == 0:
-                        stop = length
+                if step>0:
+                    if start is None: 
+                        start=0
+                    if stop is None: 
+                        stop=length
+                    if start==-1 and stop==0:
+                        stop=length
                 else:
-                    if start is None:
-                        start = length - 1
+                    if start is None: 
+                        start=length-1
                     if stop is None:
                         # stop=-1
                         adjustit = 0
-                if start < 0:
-                    start = start % length
-                if stop < 0 and adjustit:
-                    stop = stop % length
-            if altered:
+                if start<0: 
+                    start=start%length
+                if stop<0 and adjustit: 
+                    stop=stop%length
+            if altered: 
                 slicelist[i] = slice(start, stop, step)
         return slicelist
 
-    def reg_specs2slices(self, initspeclist, force=None):
+    def reg_specs2slices(self, initspeclist,force=None):
 
         # Don't use input to store return value
-        speclist = copy.copy(initspeclist)
+        speclist=copy.copy(initspeclist)
 
         for i in range(self.rank()):
             item = speclist[i]
-            if isinstance(item, slice):
+            if isinstance(item, types.SliceType):
                 newitem = item
-            elif item == ':' or item is None or item is unspecified:
+            elif item==':' or item is None or item is unspecified:
                 axis = self.getAxis(i)
-                newitem = slice(0, len(axis))
-            elif isinstance(item, list) or \
-                    isinstance(item, tuple):
+                newitem = slice(0,len(axis))
+            elif isinstance(item, types.ListType) or \
+                 isinstance(item, types.TupleType):
                 axis = self.getAxis(i)
-                if len(item) == 2:        # (start,end)
+                if len(item)==2:        # (start,end)
                     indexInterval = axis.mapIntervalExt(item)
-                elif len(item) == 3:      # (start,end,'xxx')
-                    coordInterval = (item[0], item[1])
-                    indexInterval = axis.mapIntervalExt(coordInterval, item[2])
-                elif len(item) == 4:
-                    coordInterval = (item[0], item[1])
-                    indexInterval = axis.mapIntervalExt(
-                        coordInterval, item[2], item[3])
-                elif len(item) == 5:
-                    coordInterval = (item[0], item[1])
-                    indexInterval = axis.mapIntervalExt(
-                        coordInterval,
-                        item[2],
-                        item[3],
-                        item[4])
-                elif len(item) == 6:
-                    coordInterval = (item[0], item[1])
-                    indexInterval = axis.mapIntervalExt(
-                        coordInterval,
-                        item[2],
-                        item[3],
-                        item[4],
-                        item[5])
+                elif len(item)==3:      # (start,end,'xxx')
+                    coordInterval = (item[0],item[1])
+                    indexInterval = axis.mapIntervalExt(coordInterval,item[2])
+                elif len(item)==4:
+                    coordInterval = (item[0],item[1])
+                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3])
+                elif len(item)==5:
+                    coordInterval = (item[0],item[1])
+                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3],item[4])
+                elif len(item)==6:
+                    coordInterval = (item[0],item[1])
+                    indexInterval = axis.mapIntervalExt(coordInterval,item[2],item[3],item[4],item[5])
                 else:
-                    raise CDMSError(
-                        InvalidRegion + "invalid format for coordinate interval: %s" %
-                        str(item))
+                    raise CDMSError, InvalidRegion + "invalid format for coordinate interval: %s"%str(item)
                 if indexInterval is None:
-                    raise CDMSError(OutOfRange + str(item))
-                newitem = slice(
-                    indexInterval[0],
-                    indexInterval[1],
-                    indexInterval[2])
-            elif isinstance(item, (numpy.floating, float, numpy.integer, int, long, basestring)) or type(item) in CdtimeTypes:
+                    raise CDMSError, OutOfRange + str(item)
+                newitem = slice(indexInterval[0],indexInterval[1],indexInterval[2])
+            elif isinstance(item, numpy.floating) or \
+                 isinstance(item, types.FloatType) or \
+                 isinstance(item, numpy.integer) or \
+                 isinstance(item, types.IntType) or \
+                 isinstance(item, types.LongType) or \
+                 isinstance(item, types.StringType) or \
+                 type(item) in CdtimeTypes:
                 axis = self.getAxis(i)
                 #
                 # default is 'ccn' in axis.mapIntervalExt
                 #
-                indexInterval = axis.mapIntervalExt((item, item))
+                indexInterval = axis.mapIntervalExt((item,item))
                 if indexInterval is None:
-                    raise CDMSError(OutOfRange + str(item))
-                newitem = slice(
-                    indexInterval[0],
-                    indexInterval[1],
-                    indexInterval[2])
+                    raise CDMSError, OutOfRange + str(item)
+                newitem = slice(indexInterval[0],indexInterval[1],indexInterval[2])
             else:
-                raise CDMSError(
-                    InvalidRegion + "invalid format for coordinate interval: %s" %
-                    str(item))
+                raise CDMSError, InvalidRegion + "invalid format for coordinate interval: %s"%str(item)
 
             speclist[i] = newitem
 
-        slicelist = self.specs2slices(speclist, force)
+        slicelist = self.specs2slices(speclist,force)
         return slicelist
 
     def _decodedType(self):
@@ -1409,7 +1360,7 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
 
     def isEncoded(self):
         "True iff self is represented as packed data."
-        return (hasattr(self, "scale_factor") or hasattr(self, "add_offset"))
+        return (hasattr(self,"scale_factor") or hasattr(self,"add_offset"))
 
     def decode(self, ar):
         "Decode compressed data. ar is a masked array, scalar, or numpy.ma.masked"
@@ -1417,20 +1368,18 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
         if hasattr(self, 'scale_factor'):
             scale_factor = self.scale_factor
         else:
-            scale_factor = numpy.array([1.0], resulttype)
-
+            scale_factor = numpy.array([1.0],resulttype)
+            
         if hasattr(self, 'add_offset'):
             add_offset = self.add_offset
         else:
-            add_offset = numpy.array([0.0], resulttype)
-
+            add_offset = numpy.array([0.0],resulttype)
+            
         if ar is not numpy.ma.masked:
-            result = scale_factor * ar + add_offset
-            if isinstance(result, numpy.ma.MaskedArray):
+            result = scale_factor*ar + add_offset
+            if isinstance(result,numpy.ma.MaskedArray):
                 result = result.astype(resulttype)
-                numpy.ma.set_fill_value(
-                    result,
-                    numpy.ma.default_fill_value(0.))
+                numpy.ma.set_fill_value(result, numpy.ma.default_fill_value(0.))
             else:
                 tmp = numpy.array(result)
                 result = tmp.astype(resulttype)[0]
@@ -1452,121 +1401,116 @@ avariable.regrid: We chose regridMethod = %s for you among the following choices
                         result.append(j)
                         break
                 else:
-                    raise CDMSError(
-                        'Variable and grid do not share common dimensions: %s' %
-                        self.id)
+                    raise CDMSError, 'Variable and grid do not share common dimensions: %s'%self.id
 
         return tuple(result)
 
     # numpy.ma overrides
 
     def __getitem__(self, key):
-        if isinstance(key, tuple):
+        if type(key) is types.TupleType:
             speclist = self._process_specs(key, {})
         else:
-            if isinstance(key, int) and key >= len(self):
-                raise IndexError("Index too large: %d" % key)
+            if isinstance(key, types.IntType) and key>=len(self):
+                raise IndexError, "Index too large: %d"%key
             speclist = self._process_specs([key], {})
 
         # Note: raw=0 ensures that a TransientVariable is returned
         return self.getSlice(numericSqueeze=1, raw=0, isitem=1, *speclist)
-
+        
     def __getslice__(self, low, high):
 
         # Note: raw=0 ensures that a TransientVariable is returned
-        return self.getSlice(slice(low, high), numericSqueeze=1, raw=0)
+        return self.getSlice (slice(low, high), numericSqueeze = 1, raw=0)
 
     def typecode(self):
-        raise CDMSError(NotImplemented + 'typecode')
+        raise CDMSError, NotImplemented + 'typecode'
 
-    def __abs__(self):
+    def __abs__(self): 
         return MV.absolute(self)
 
-    def __neg__(self):
+    def __neg__(self): 
         return MV.negative(self)
 
     def __add__(self, other):
         return MV.add(self, other)
-
+                        
     __radd__ = __add__
 
-    def __lshift__(self, n):
+    def __lshift__ (self, n):
         return MV.left_shift(self, n)
 
-    def __rshift__(self, n):
+    def __rshift__ (self, n):
         return MV.right_shift(self, n)
-
-    def __sub__(self, other):
+                        
+    def __sub__(self, other): 
         return MV.subtract(self, other)
 
-    def __rsub__(self, other):
+    def __rsub__(self, other): 
         return MV.subtract(other, self)
 
     def __mul__(self, other):
         return MV.multiply(self, other)
-
+    
     __rmul__ = __mul__
 
-    def __div__(self, other):
+    def __div__(self, other): 
         return MV.divide(self, other)
 
-    def __rdiv__(self, other):
+    def __rdiv__(self, other): 
         return MV.divide(other, self)
 
-    def __pow__(self, other, third=None):
+    def __pow__(self,other, third=None): 
         return MV.power(self, other, third)
 
-    def __iadd__(self, other):
+    def __iadd__(self, other): 
         "Add other to self in place."
         return MV.add(self, other)   # See if we can improve these later.
 
-    def __isub__(self, other):
+    def __isub__(self, other): 
         "Subtract other from self in place."
         return MV.subtract(self, other)   # See if we can improve these later.
 
-    def __imul__(self, other):
+    def __imul__(self, other): 
         "Multiply self by other in place."
         return MV.multiply(self, other)   # See if we can improve these later.
 
-    def __idiv__(self, other):
+    def __idiv__(self, other): 
         "Divide self by other in place."
         return MV.divide(self, other)   # See if we can improve these later.
 
-    def __eq__(self, other):
-        return MV.equal(self, other)
+    def __eq__(self,other): 
+        return MV.equal(self,other)
 
-    def __ne__(self, other):
-        return MV.not_equal(self, other)
+    def __ne__(self,other): 
+        return MV.not_equal(self,other)
 
-    def __lt__(self, other):
-        return MV.less(self, other)
+    def __lt__(self,other): 
+        return MV.less(self,other)
 
-    def __le__(self, other):
-        return MV.less_equal(self, other)
+    def __le__(self,other): 
+        return MV.less_equal(self,other)
 
-    def __gt__(self, other):
-        return MV.greater(self, other)
+    def __gt__(self,other): 
+        return MV.greater(self,other)
 
-    def __ge__(self, other):
-        return MV.greater_equal(self, other)
+    def __ge__(self,other): 
+        return MV.greater_equal(self,other)
 
-    def __sqrt__(self):
+    def __sqrt__(self): 
         return MV.sqrt(self)
 
-    def astype(self, tc):
+    def astype (self, tc):
         "return self as array of given type."
         return self.subSlice().astype(tc)
+    
 
-
-# internattr.add_internal_attribute(AbstractVariable, 'id', 'parent')
-# PropertiedClasses.set_property(AbstractVariable, 'missing_value',
-# acts=AbstractVariable._setmissing, nodelete=1)
+## internattr.add_internal_attribute(AbstractVariable, 'id', 'parent')   
+#PropertiedClasses.set_property(AbstractVariable, 'missing_value', acts=AbstractVariable._setmissing, nodelete=1)
 
 __rp = r'\s*([-txyz0-9]{1,1}|\(\s*\w+\s*\)|[.]{3,3})\s*'
 __crp = re.compile(__rp)
-
-
-def orderparse(order):
+def orderparse (order):
     """Parse an order string. Returns a list of axes specifiers.
        Order elements can be:
           Letters t, x, y, z meaning time, longitude, latitude, level
@@ -1576,33 +1520,31 @@ def orderparse(order):
             remaining axes.
           (name) meaning an axis whose id is name
     """
-    if not isinstance(order, basestring):
-        raise CDMSError('order arguments must be strings.')
+    if not isinstance(order, types.StringType):
+        raise CDMSError, 'order arguments must be strings.'
     pos = 0
-    result = []
+    result=[]
     order = order.strip()
     while pos < len(order):
         m = __crp.match(order, pos)
-        if m is None:
-            break
+        if m is None: break
         r = m.group(1)
         if r[0] == '(':
             pass
         elif r == '...':
             r = Ellipsis
         elif len(r) == 1:
-            if r.isdigit():
-                r = int(r)
+            if r in string.digits:
+                r = string.atoi(r)
         result.append(r)
         pos = m.end(0)
 
     if pos != len(order):
-        raise CDMSError('Order string "' + order +
-                        '" malformed, index ' + str(pos))
+        raise CDMSError, 'Order string "' + order + \
+                          '" malformed, index '+str(pos)
     return result
 
-
-def order2index(axes, order):
+def order2index (axes, order):
     """Find the index permutation of axes to match order.
        The argument order is a string.
        Order elements can be:
@@ -1613,28 +1555,27 @@ def order2index(axes, order):
             remaining axes.
           (name) meaning an axis whose id is name
     """
-    if isinstance(order, basestring):
+    if isinstance(order, types.StringType):
         result = orderparse(order)
-    elif isinstance(order, list):
+    elif isinstance(order, types.ListType):
         result = order
     else:
-        raise CDMSError(
-            'order2index, order specified of bad type:' + str(type(order)))
+        raise CDMSError, 'order2index, order specified of bad type:' + str(type(order))
     n = len(axes)
     ie = n
-    permutation = [None] * n
+    permutation = [None]*n
     j = 0
     pos = 0
     while j < len(result):
         item = result[j]
-        if isinstance(item, basestring):
-            if item == 't':
+        if isinstance(item, types.StringType):
+            if item == 't': 
                 spec = 'time'
-            elif item == 'x':
+            elif item == 'x': 
                 spec = 'longitude'
-            elif item == 'y':
+            elif item == 'y': 
                 spec = 'latitude'
-            elif item == 'z':
+            elif item == 'z': 
                 spec = 'level'
             elif item == '-':
                 pos += 1
@@ -1645,26 +1586,25 @@ def order2index(axes, order):
             for k in range(n):
                 if axisMatches(axes[k], spec):
                     if k in permutation:
-                        raise CDMSError('Duplicate item in order %s' % order)
+                        raise CDMSError, 'Duplicate item in order %s' % order
                     permutation[pos] = k
                     pos += 1
                     break
             else:
-                raise CDMSError('No axis matching order spec %s' % str(item))
-        elif isinstance(item, int):
+                raise CDMSError, 'No axis matching order spec %s' %str(item)
+        elif isinstance(item, types.IntType):
             if item in permutation:
-                raise CDMSError('Duplicate item in order %s' % order)
+                raise CDMSError, 'Duplicate item in order %s' % order
             if item >= n:
-                raise CDMSError('Index %d out of range in order %s' %
-                                (item, order))
+                raise CDMSError, 'Index %d out of range in order %s' %\
+                                 (item,order)
             permutation[pos] = item
             pos += 1
         elif item is Ellipsis:
             nleft = len(result) - j - 1
             pos = n - nleft
         else:
-            raise CDMSError(
-                'List specified for order contains bad item: ' + repr(item))
+            raise CDMSError, 'List specified for order contains bad item: ' + repr(item)
         j += 1
 
     for i in range(n):
@@ -1673,7 +1613,7 @@ def order2index(axes, order):
                 if permutation[j] is None:
                     permutation[j] = i
                     break
-    return permutation
+    return permutation    
 
-from .tvariable import TransientVariable
-from . import MV2 as MV
+from tvariable import TransientVariable
+import MV2 as MV
diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index 9313441a4..209fbda42 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -1,13 +1,12 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS Axis objects
 """
 _debug = 0
 std_axis_attributes = ['name', 'units', 'length', 'values', 'bounds']
-import sys
-import copy
+import string, sys, types, copy
 import numpy
 # import regrid2._regrid
 import cdmsNode
@@ -17,26 +16,19 @@ from cdmsobj import CdmsObj, Max32int
 from sliceut import reverseSlice, splitSlice, splitSliceExt
 from error import CDMSError
 import forecast
-# import internattr
+#import internattr
 from UserList import UserList
-
-
 class AliasList (UserList):
-
     def __init__(self, alist):
-        UserList.__init__(self, alist)
-
-    def __setitem__(self, i, value):
-        self.data[i] = value.lower()
-
+        UserList.__init__(self,alist)
+    def __setitem__ (self, i, value):
+        self.data[i] = string.lower(value)
     def __setslice(self, i, j, values):
-        self.data[i:j] = map(lambda x: x.lower(), values)
-
+        self.data[i:j] = map(lambda x: string.lower(x), values)
     def append(self, value):
-        self.data.append(value.lower())
-
+        self.data.append(string.lower(value))
     def extend(self, values):
-        self.data.extend(map("".lower, values))
+        self.data.extend(map(string.lower, values))
 
 level_aliases = AliasList(['plev'])
 longitude_aliases = AliasList([])
@@ -54,35 +46,35 @@ ReadOnlyAxis = "Axis is read-only: "
 InvalidNCycles = "Invalid number of cycles requested for wrapped dimension: "
 
 ComptimeType = type(cdtime.comptime(0))
-ReltimeType = type(cdtime.reltime(0, "days"))
+ReltimeType = type(cdtime.reltime(0,"days"))
 CdtimeTypes = (ComptimeType, ReltimeType)
 
 # Map between cdtime calendar and CF tags
 calendarToTag = {
-    cdtime.MixedCalendar: 'gregorian',
-    cdtime.NoLeapCalendar: 'noleap',
-    cdtime.GregorianCalendar: 'proleptic_gregorian',
-    cdtime.JulianCalendar: 'julian',
-    cdtime.Calendar360: '360_day',
-    cdtime.ClimCalendar: 'clim_noncf',
-    cdtime.ClimLeapCalendar: 'climleap_noncf',
-    cdtime.DefaultCalendar: 'gregorian',
-    cdtime.StandardCalendar: 'proleptic_gregorian',
+    cdtime.MixedCalendar : 'gregorian',
+    cdtime.NoLeapCalendar : 'noleap',
+    cdtime.GregorianCalendar : 'proleptic_gregorian',
+    cdtime.JulianCalendar : 'julian',
+    cdtime.Calendar360 : '360_day',
+    cdtime.ClimCalendar : 'clim_noncf',
+    cdtime.ClimLeapCalendar : 'climleap_noncf',
+    cdtime.DefaultCalendar : 'gregorian',
+    cdtime.StandardCalendar : 'proleptic_gregorian',
     }
 
 tagToCalendar = {
-    'gregorian': cdtime.MixedCalendar,
-    'standard': cdtime.GregorianCalendar,
-    'noleap': cdtime.NoLeapCalendar,
-    'julian': cdtime.JulianCalendar,
-    'proleptic_gregorian': cdtime.GregorianCalendar,
-    '360_day': cdtime.Calendar360,
-    '360': cdtime.Calendar360,
-    '365_day': cdtime.NoLeapCalendar,
-    'clim': cdtime.ClimCalendar,
-    'clim_noncf': cdtime.ClimCalendar,
-    'climleap_noncf': cdtime.ClimLeapCalendar,
-    'climleap': cdtime.ClimLeapCalendar,
+    'gregorian' : cdtime.MixedCalendar,
+    'standard' : cdtime.GregorianCalendar,
+    'noleap' : cdtime.NoLeapCalendar,
+    'julian' : cdtime.JulianCalendar,
+    'proleptic_gregorian' : cdtime.GregorianCalendar,
+    '360_day' : cdtime.Calendar360,
+    '360' : cdtime.Calendar360,
+    '365_day' : cdtime.NoLeapCalendar,
+    'clim' : cdtime.ClimCalendar,
+    'clim_noncf' : cdtime.ClimCalendar,
+    'climleap_noncf' : cdtime.ClimLeapCalendar,
+    'climleap' : cdtime.ClimLeapCalendar,
     }
 
 # This is not an error message, it is used to detect which things have
@@ -94,24 +86,20 @@ _autobounds = 2                         # Automatically generate axis and grid b
                                         # Modes:
                                         # 0 : off (not bounds generation)
                                         # 1 : on  (generate bounds)
-                                        # 2 : grid (generate bounds for lat/lon
-                                        # grids only)
+                                        # 2 : grid (generate bounds for lat/lon grids only)
 
 # Set autobounds mode to 'on' or 'off'. If on, getBounds will automatically
 # generate boundary information for an axis or grid, if not explicitly defined.
 # If 'off', and no boundary data is explicitly defined, the bounds will NOT
 # be generated; getBounds will return None for the boundaries.
-
-
 def setAutoBounds(mode):
     global _autobounds
-    if mode == 'on' or mode == 1:
-        _autobounds = 1
-    elif mode == 'off' or mode == 0:
-        _autobounds = 0
-    elif mode == 'grid' or mode == 2:
-        _autobounds = 2
-
+    if mode=='on' or mode==1:
+        _autobounds=1
+    elif mode=='off' or mode==0:
+        _autobounds=0
+    elif mode=='grid' or mode==2:
+        _autobounds=2
 
 def getAutoBounds():
     return _autobounds
@@ -121,48 +109,41 @@ def createAxis(data, bounds=None, id=None, copy=0, genericBounds=False):
     return TransientAxis(data, bounds=bounds, id=id, copy=copy, genericBounds=genericBounds)
 
 # Generate a Gaussian latitude axis, north-to-south
-
-
 def createGaussianAxis(nlat):
     import regrid2._regrid
 
-    lats, wts, bnds = regrid2._regrid.gridattr(nlat, 'gaussian')
-
-    # For odd number of latitudes, gridattr returns 0 in the second half of
-    # lats
-    if nlat % 2:
-        mid = nlat / 2
-        lats[mid + 1:] = -lats[:mid][::-1]
+    lats,wts,bnds = regrid2._regrid.gridattr(nlat,'gaussian')
 
-    latBounds = numpy.zeros((nlat, 2), numpy.float)
-    latBounds[:, 0] = bnds[:-1]
-    latBounds[:, 1] = bnds[1:]
-    lat = createAxis(lats, latBounds, id="latitude")
+    # For odd number of latitudes, gridattr returns 0 in the second half of lats
+    if nlat%2:
+        mid = nlat/2
+        lats[mid+1:] = -lats[:mid][::-1]
+        
+    latBounds = numpy.zeros((nlat,2),numpy.float)
+    latBounds[:,0] = bnds[:-1]
+    latBounds[:,1] = bnds[1:]
+    lat = createAxis(lats,latBounds,id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     return lat
 
 # Generate an equal-area latitude axis, north-to-south
-
-
 def createEqualAreaAxis(nlat):
     import regrid2._regrid
 
-    lats, wts, bnds = regrid2._regrid.gridattr(nlat, 'equalarea')
-    latBounds = numpy.zeros((nlat, 2), numpy.float)
-    latBounds[:, 0] = bnds[:-1]
-    latBounds[:, 1] = bnds[1:]
-    lat = createAxis(lats, latBounds, id="latitude")
+    lats,wts,bnds = regrid2._regrid.gridattr(nlat,'equalarea')
+    latBounds = numpy.zeros((nlat,2),numpy.float)
+    latBounds[:,0] = bnds[:-1]
+    latBounds[:,1] = bnds[1:]
+    lat = createAxis(lats,latBounds,id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     return lat
 
 # Generate a uniform latitude axis
-
-
 def createUniformLatitudeAxis(startLat, nlat, deltaLat):
-    latArray = startLat + deltaLat * numpy.arange(nlat)
-    lat = createAxis(latArray, id="latitude")
+    latArray = startLat + deltaLat*numpy.arange(nlat)
+    lat = createAxis(latArray,id="latitude")
     lat.designateLatitude()
     lat.units = "degrees_north"
     latBounds = lat.genGenericBounds(width=deltaLat)
@@ -170,21 +151,18 @@ def createUniformLatitudeAxis(startLat, nlat, deltaLat):
     return lat
 
 # Generate a uniform longitude axis
-
-
 def createUniformLongitudeAxis(startLon, nlon, deltaLon):
-    lonArray = startLon + deltaLon * numpy.arange(nlon)
-    lon = createAxis(lonArray, id="longitude")
+    lonArray = startLon + deltaLon*numpy.arange(nlon)
+    lon = createAxis(lonArray,id="longitude")
     lon.designateLongitude()
     lon.units = "degrees_east"
     lonBounds = lon.genGenericBounds(width=deltaLon)
     lon.setBounds(lonBounds)
     return lon
 
-
-def mapLinearIntersection(xind, yind, iind,
-                          aMinusEps, aPlusEps, bPlusEps, bMinusEps,
-                          boundLeft, nodeSubI, boundRight):
+def mapLinearIntersection(xind,yind,iind,
+                          aMinusEps,aPlusEps,bPlusEps,bMinusEps,
+                          boundLeft,nodeSubI,boundRight):
     """
 
     Return true iff the coordinate interval (a,b) intersects the node
@@ -204,35 +182,34 @@ def mapLinearIntersection(xind, yind, iind,
     """
 
     if(iind == 'n' or iind == 'e'):
-        testC_ = (aMinusEps <= nodeSubI)
-        test_C = (nodeSubI <= bPlusEps)
-        testO_ = (aPlusEps < nodeSubI)
-        test_O = (nodeSubI < bMinusEps)
+        testC_ = ( aMinusEps  <= nodeSubI   )
+        test_C = (  nodeSubI  <= bPlusEps   )
+        testO_ = (  aPlusEps  <  nodeSubI   )
+        test_O = (  nodeSubI  <  bMinusEps  )
     elif(iind == 'b'):
-        testC_ = (aMinusEps <= boundRight)
-        test_C = (boundLeft <= bPlusEps)
-        testO_ = (aPlusEps < boundRight)
-        test_O = (boundLeft < bMinusEps)
+        testC_ = ( aMinusEps  <= boundRight )
+        test_C = ( boundLeft  <= bPlusEps   )
+        testO_ = ( aPlusEps   <  boundRight )
+        test_O = ( boundLeft  <  bMinusEps  )
     elif(iind == 's'):
-        testC_ = (aMinusEps <= boundLeft)
-        test_C = (boundRight <= bPlusEps)
-        testO_ = (aPlusEps < boundLeft)
-        test_O = (boundRight < bMinusEps)
+        testC_ = ( aMinusEps  <= boundLeft  )
+        test_C = ( boundRight <= bPlusEps   )
+        testO_ = ( aPlusEps   <  boundLeft  )
+        test_O = ( boundRight <  bMinusEps  )
 
     if(xind == 'c' and yind == 'c'):
-        test = (testC_ and test_C)
+        test=(testC_ and test_C)
     elif(xind == 'c' and yind == 'o'):
-        test = (testC_ and test_O)
+        test=(testC_ and test_O)
     elif(xind == 'o' and yind == 'c'):
-        test = (testO_ and test_C)
+        test=(testO_ and test_C)
     elif(xind == 'o' and yind == 'o'):
-        test = (testO_ and test_O)
+        test=(testO_ and test_O)
 
     return(test)
 
+def mapLinearExt(axis, bounds, interval, indicator ='ccn', epsilon=None, stride=1, wrapped=0):
 
-def mapLinearExt(axis, bounds, interval,
-                 indicator='ccn', epsilon=None, stride=1, wrapped=0):
     """Map coordinate interval to index interval, without
     wraparound. interval has the form (x,y) where x and y are the
     endpoints in coordinate space. indicator is a three-character
@@ -245,139 +222,137 @@ def mapLinearExt(axis, bounds, interval,
     'b' - the interval intersects the cell bounds
     's' - the cell bounds are a subset of the interval
     'e' - same as 'n', plus an extra node on either side.
-
+    
     Returns the corresponding index interval (i,j), where i<j,
     indicating the half-open index interval [i,j), or None if the
     intersection is empty.
     """
-
-    indicator = indicator.lower()
+    
+    indicator = string.lower(indicator)
     length = len(axis)
 
     # Make the interval and search array non-decreasing
-    x, y = interval
-
-    iind = indicator[2]
-
-    if x > y:
-        x, y = y, x
-        xind = indicator[1]
-        yind = indicator[0]
-
+    x,y = interval
+
+    iind  = indicator[2]
+    
+    if x>y:
+        x,y = y,x
+        xind  = indicator[1]
+        yind  = indicator[0]
+        
     else:
         xind = indicator[0]
         yind = indicator[1]
 
-    if axis[0] > axis[-1]:
+    if axis[0]>axis[-1]:
         ar = axis[::-1]
-        if bounds[0, 0] < bounds[0, 1]:
+        if bounds[0,0]<bounds[0,1]:
             bd = bounds[::-1]
         else:
-            bd = bounds[::-1, ::-1]
+            bd = bounds[::-1,::-1]
         direc = 'dec'
     else:
         ar = axis
-        if bounds[0, 0] < bounds[0, 1]:
+        if bounds[0,0]<bounds[0,1]:
             bd = bounds
         else:
-            bd = bounds[:, ::-1]
+            bd = bounds[:,::-1]
         direc = 'inc'
 
     if(epsilon is None):
-        eps = 1.0e-5
-        if len(ar) > 1:
-            epsilon = eps * min(abs(ar[1] - ar[0]), abs(ar[-1] - ar[-2]))
+        eps=1.0e-5
+        if len(ar)>1:
+            epsilon = eps * min(abs(ar[1]-ar[0]), abs(ar[-1]-ar[-2]))
         else:
-            epsilon = eps
+            epsilon=eps
 
     #
     #  interval bound +/- epsilon
     #
 
-    aMinusEps = (x - epsilon)
-    aPlusEps = (x + epsilon)
-    bMinusEps = (y - epsilon)
-    bPlusEps = (y + epsilon)
+    aMinusEps=(x-epsilon)
+    aPlusEps=(x+epsilon)
+    bMinusEps=(y-epsilon)
+    bPlusEps=(y+epsilon)
+
 
-    # oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
+    #oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
     #
     # out-of-bounds requests
     #
-    # oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
+    #oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo
 
-    if iind in ['n', 'e']:
+    if iind in ['n','e']:
         mina = ar[0]
         maxa = ar[-1]
     else:
-        mina = bd[0, 0]
-        maxa = bd[-1, 1]
-
+        mina = bd[0,0]
+        maxa = bd[-1,1]
+        
     if(bPlusEps < mina or aMinusEps > maxa):
         return None
 
-    # nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+    #nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
     #
     # empty node check
     #
-    # nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
+    #nnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnnn
 
     # Handle empty intersections
     if (
-        (((aPlusEps) > ar[-1]) and (iind == 'n') and (xind == 'o')) or
-        (((aMinusEps) >= ar[-1]) and (iind == 'n') and (xind == 'c')) or
-        (((bMinusEps) < ar[0]) and (iind == 'n') and (yind == 'o')) or
-        (((bPlusEps) <= ar[0]) and (iind == 'n') and (yind == 'c'))
+        ( ((aPlusEps)  >  ar[-1]) and (iind == 'n') and (xind == 'o') ) or
+        ( ((aMinusEps) >= ar[-1]) and (iind == 'n') and (xind == 'c') ) or
+        ( ((bMinusEps) <  ar[0] ) and (iind == 'n') and (yind == 'o') ) or
+        ( ((bPlusEps)  <= ar[0] ) and (iind == 'n') and (yind == 'c') ) 
         ):
         return None
 
-    bdMaxRight = max(bd[-1][0], bd[-1][1])
-    bdMinLeft = min(bd[0][0], bd[0][1])
+
+    bdMaxRight=max(bd[-1][0],bd[-1][1])
+    bdMinLeft=min(bd[0][0],bd[0][1])
     if (
-        (((aMinusEps) > bdMaxRight) and (iind != 'n') and (xind == 'o')) or
-        (((aMinusEps) >= bdMaxRight) and (iind != 'n') and (xind == 'c')) or
-        (((bPlusEps) < bdMinLeft) and (iind != 'n') and (yind == 'o')) or
-        (((bPlusEps) <= bdMinLeft) and (iind != 'n') and (yind == 'c'))
-        ):
+        ( ( (aMinusEps) >  bdMaxRight ) and (iind != 'n') and (xind == 'o') ) or
+        ( ( (aMinusEps) >= bdMaxRight ) and (iind != 'n') and (xind == 'c') ) or
+        ( ( (bPlusEps)  <  bdMinLeft  ) and (iind != 'n') and (yind == 'o') ) or
+        ( ( (bPlusEps)  <= bdMinLeft  ) and (iind != 'n') and (yind == 'c') ) 
+        ): 
         return None
+    
+    # The intersection is nonempty; use searchsorted to get left/right limits for testing
 
-    # The intersection is nonempty; use searchsorted to get left/right limits
-    # for testing
-
-    ii, jj = numpy.searchsorted(ar, (x, y))
+    ii,jj = numpy.searchsorted(ar,(x,y))
 
     #
     #  find index range for left (iStart,iEnd) and right (jStart,jEnd)
     #
-
+    
     # iEnd + 2 because last point in loop not done
-    iStart = ii - 1
-    iEnd = ii + 2
-    if(iStart < 0):
-        iStart = 0
-    if(iEnd >= length):
-        iEnd = length - 1
-
-    jStart = jj - 1
-    jEnd = jj + 2
-    if(jStart < 0):
-        jStart = 0
-    if(jEnd >= length):
-        jEnd = length - 1
+    iStart=ii-1
+    iEnd=ii+2
+    if(iStart < 0): iStart=0
+    if( iEnd >= length ): iEnd = length - 1
+
+    jStart=jj-1
+    jEnd=jj+2
+    if( jStart < 0 ): jStart=0
+    if( jEnd >= length ): jEnd = length - 1
 
     #
     #  initialise the index to -1 (does not exist)
     #
 
-    iInterval = -1
-    jInterval = -1
-    iIntervalB = -1
-    jIntervalB = -1
+    iInterval=-1
+    jInterval=-1
+    iIntervalB=-1
+    jIntervalB=-1
 
-    # pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
+    #pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
     #
     #  preliminary checks
     #
-    # pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
+    #pppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppppp
+
 
     if(iStart == jStart == iEnd == jEnd):
         iInterval = jInterval = iStart
@@ -387,173 +362,172 @@ def mapLinearExt(axis, bounds, interval,
 
     else:
 
-        # llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
+        #llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
         #
         #  left interval check
         #
-        # llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
+        #llllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllllll
 
         # - user check
+        
+        for i in range(iStart,iEnd+1):
 
-        for i in range(iStart, iEnd + 1):
-
-            nodeSubI = ar[i]
-            boundLeft = bd[i][0]
-            boundRight = bd[i][1]
+            nodeSubI=ar[i]
+            boundLeft=bd[i][0]
+            boundRight=bd[i][1]
 
-            test = mapLinearIntersection(xind, yind, iind,
-                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
-                                       boundLeft, nodeSubI, boundRight)
+            test=mapLinearIntersection(xind,yind,iind,
+                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
+                                       boundLeft,nodeSubI,boundRight)
 
-            if(iInterval == -1 and test):
+            if( iInterval == -1 and test ):
                 iInterval = i
                 break
 
         # - "B" check for extension
+        
+        for i in range(iStart,iEnd+1):
 
-        for i in range(iStart, iEnd + 1):
-
-            nodeSubI = ar[i]
-            boundLeft = bd[i][0]
-            boundRight = bd[i][1]
+            nodeSubI=ar[i]
+            boundLeft=bd[i][0]
+            boundRight=bd[i][1]
 
-            testB = mapLinearIntersection(xind, yind, 'b',
-                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
-                                       boundLeft, nodeSubI, boundRight)
+            testB=mapLinearIntersection(xind,yind,'b',
+                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
+                                       boundLeft,nodeSubI,boundRight)
 
-            if(iIntervalB == -1 and testB):
+            if( iIntervalB == -1 and testB ):
                 iIntervalB = i
                 break
 
-        # rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
+        #rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
         #
         #  right interval check
         #
-        # rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
+        #rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr
 
-        for j in range(jStart, jEnd + 1):
+        for j in range(jStart,jEnd+1):
 
-            nodeSubI = ar[j]
-            boundLeft = bd[j][0]
-            boundRight = bd[j][1]
+            nodeSubI=ar[j]
+            boundLeft=bd[j][0]
+            boundRight=bd[j][1]
 
             #
             #  user test
             #
 
-            test = mapLinearIntersection(xind, yind, iind,
-                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
-                                       boundLeft, nodeSubI, boundRight)
+            test=mapLinearIntersection(xind,yind,iind,
+                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
+                                       boundLeft,nodeSubI,boundRight)
 
-            if((jInterval == -1 and iInterval != -1 and test == 0 and j <= jEnd)):
-                jInterval = j - 1
+            if( ( jInterval == -1 and iInterval != -1 and test == 0  and j <= jEnd ) ):
+                jInterval = j-1
 
-            if((j == length - 1 and test == 1)):
+            if( (j == length-1 and test == 1) ):
                 jInterval = j
-
+                
                 # no break here...
 
         #
-        #  B test on right
+        #  B test on right 
         #
 
-        for j in range(jStart, jEnd + 1):
+        for j in range(jStart,jEnd+1):
 
-            nodeSubI = ar[j]
-            boundLeft = bd[j][0]
-            boundRight = bd[j][1]
+            nodeSubI=ar[j]
+            boundLeft=bd[j][0]
+            boundRight=bd[j][1]
 
-            testB = mapLinearIntersection(xind, yind, 'b',
-                                       aMinusEps, aPlusEps, bPlusEps, bMinusEps,
-                                       boundLeft, nodeSubI, boundRight)
+            testB=mapLinearIntersection(xind,yind,'b',
+                                       aMinusEps,aPlusEps,bPlusEps,bMinusEps,
+                                       boundLeft,nodeSubI,boundRight)
 
-            if((jIntervalB == -1 and iIntervalB != -1 and testB == 0 and j <= jEnd)):
-                jIntervalB = j - 1
+            if( ( jIntervalB == -1 and iIntervalB != -1 and testB == 0  and j <= jEnd ) ):
+                jIntervalB = j-1
 
-            if((j == length - 1 and testB == 1)):
+            if( ( j == length-1 and testB == 1) ):
                 jIntervalB = j
 
-    # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+
+    #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
     #
     #  extension check
     #
-    # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+    #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
 
     if(iind == 'e'):
 
         # if B index does not exist return
-        if(iIntervalB < 0 or jIntervalB < 0):
+        if(iIntervalB < 0 or jIntervalB <0):
             return None
 
         # if user index exists:
-        elif ((iInterval > -1 and jInterval > -1)):
+        elif ( ( iInterval > -1 and jInterval > -1 ) ):
 
             if(jInterval < iInterval):
-
-                npoints = iInterval - jInterval
+                
+                npoints=iInterval-jInterval
                 if(npoints > 0):
-                    (iInterval, jInterval) = (jInterval + 1, iInterval + 1)
-
+                    (iInterval,jInterval)=(jInterval+1,iInterval+1)
+                    
                 else:
-                    jInterval = iInterval
-                    iInterval = jInterval + 1
-
+                    jInterval=iInterval
+                    iInterval=jInterval+1
+                    
             else:
 
-                iInterval = iInterval - 1
-                jInterval = jInterval + 1
-
+                iInterval = iInterval-1
+                jInterval = jInterval+1
+                
         # else set index interval to B index interval
         else:
-
-            iInterval = iIntervalB
-            jInterval = jIntervalB
+            
+            iInterval=iIntervalB
+            jInterval=jIntervalB
 
         if(iInterval == jInterval):
-            if(x < ar[iInterval] and iInterval > 0):
-                iInterval = jInterval - 1
-            elif(jIntervalB < length - 1):
-                jInterval = iInterval + 1
+            if( x < ar[iInterval] and iInterval > 0 ):
+                iInterval=jInterval-1
+            elif( jIntervalB < length-1 ): 
+                jInterval=iInterval+1
 
         if(jInterval < iInterval):
-            npoints = jInterval - iInterval
+            npoints=jInterval-iInterval
             if(npoints > 2):
-                jInterval = iIntervalB
-                iInterval = jIntervalB
+                jInterval=iIntervalB
+                iInterval=jIntervalB
             else:
-                jInterval = iIntervalB
-                iInterval = jIntervalB + 1
-
-        # Since the lookup is linear, ensure that the result is in range
-        # [0..length)
-        iInterval = max(iInterval, 0)
-        jInterval = min(jInterval, length - 1)
+                jInterval=iIntervalB
+                iInterval=jIntervalB+1
 
-    # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+        # Since the lookup is linear, ensure that the result is in range [0..length)
+        iInterval = max(iInterval,0)
+        jInterval = min(jInterval,length-1)
+            
+    #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
     #
     # final checks
     #
-    # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+    #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
 
     # if jInteval < iInterval have a single point; set to iInterval
 
     if(jInterval < iInterval):
-        jInterval = iInterval
-
+        jInterval=iInterval
+        
     elif(jInterval < 0 and iInterval < 0):
         return None
-
+    
     # Reverse back if necessary
-    if direc == 'dec':
-        iInterval, jInterval = length - jInterval - 1, length - iInterval - 1
-
-    iReturn = iInterval
-    jReturn = jInterval + 1
+    if direc=='dec':
+        iInterval,jInterval = length-jInterval-1,length-iInterval-1
+    
+    iReturn=iInterval
+    jReturn=jInterval+1
 
-    return (iReturn, jReturn)
+    return (iReturn,jReturn)
 
-
-def lookupArray(ar, value):
+def lookupArray(ar,value):
     """Lookup value in array ar. Return index such that:
     (a) ar is monotonically increasing:
     value <= ar[index], index==0..len(ar)-1
@@ -563,46 +537,42 @@ def lookupArray(ar, value):
     value < ar[index], index==len(ar)
     """
     ar = numpy.ma.filled(ar)
-    ascending = (ar[0] < ar[-1]) or len(ar) == 1
+    ascending = (ar[0]<ar[-1]) or len(ar)==1
     if ascending:
-        index = numpy.searchsorted(ar, value)
+        index = numpy.searchsorted(ar,value)
     else:
         revar = ar[::-1]
-        index = numpy.searchsorted(revar, value)
-        if index < len(revar) and value == revar[index]:
-            index = len(ar) - index - 1
+        index = numpy.searchsorted(revar,value)
+        if index<len(revar) and value==revar[index]:
+            index = len(ar)-index-1
         else:
-            index = len(ar) - index
+            index = len(ar)-index
     return index
 
-# Lookup a value in a monotonic 1-D array. value is a scalar
-# Always returns a valid index for ar
-# def lookupArray(ar,value):
-# ascending = (ar[0]<ar[-1])
-# if ascending:
-# index = numpy.searchsorted(ar,value)
-# else:
-# index = numpy.searchsorted(ar[::-1],value)
-# index = len(ar)-index-1
-# index = max(index,0)
-# index = min(index,len(ar))
-# return index
+## # Lookup a value in a monotonic 1-D array. value is a scalar
+## # Always returns a valid index for ar
+## def lookupArray(ar,value):
+##     ascending = (ar[0]<ar[-1])
+##     if ascending:
+##         index = numpy.searchsorted(ar,value)
+##     else:
+##         index = numpy.searchsorted(ar[::-1],value)
+##         index = len(ar)-index-1
+##     index = max(index,0)
+##     index = min(index,len(ar))
+##     return index
 
 # Return true if vector vec1 is a subset of vec2, within tolerance tol.
 # Return second arg of index, if it is a subset
-
-
 def isSubsetVector(vec1, vec2, tol):
-    index = lookupArray(vec2, vec1[0])
-    if index > (len(vec2) - len(vec1)):
-        return (0, -1)                   # vec1 is too large, cannot be a subset
-    issubset = numpy.alltrue(
-    numpy.less(numpy.absolute(vec1 - vec2[index:index + len(vec1)]), tol))
+    index = lookupArray(vec2,vec1[0])
+    if index>(len(vec2)-len(vec1)):
+        return (0,-1)                   # vec1 is too large, cannot be a subset
+    issubset = numpy.alltrue(numpy.less(numpy.absolute(vec1-vec2[index:index+len(vec1)]),tol))
     if issubset:
-        return (issubset, index)
+        return (issubset,index)
     else:
-        return (0, -1)
-
+        return (0,-1)
 
 def isOverlapVector(vec1, vec2, atol=1.e-8):
     """Returns (isoverlap, index) where:
@@ -610,35 +580,32 @@ def isOverlapVector(vec1, vec2, atol=1.e-8):
     index is the index such that vec1[0]<=vec2[index]. If index==len(vec2),
     then vec1[0]>vec2[len(vec2)-1]
     """
-    index = lookupArray(vec2, vec1[0])
-    if index == 0 and abs(vec1[0] - vec2[0]):
-        return (0, index)
-    elif index == len(vec2):
-        return (1, index)
+    index = lookupArray(vec2,vec1[0])
+    if index==0 and abs(vec1[0]-vec2[0]):
+        return (0,index)
+    elif index==len(vec2):
+        return (1,index)
     else:
-        ar2 = vec2[index:index + len(vec1)]
+        ar2 = vec2[index:index+len(vec1)]
         ar1 = vec1[:len(ar2)]
         isoverlap = numpy.ma.allclose(ar1, ar2, atol=atol)
     if isoverlap:
-        return (isoverlap, index)
+        return (isoverlap,index)
     else:
-        return (0, index)
-
+        return (0,index)
 
 def allclose(ax1, ax2, rtol=1.e-5, atol=1.e-8):
     """True if all elements of axes ax1 and ax2 are close,
     in the sense of numpy.ma.allclose."""
-    return ((ax1 is ax2) or numpy.ma.allclose(ax1[:], ax2[:], rtol=rtol, atol=atol))
+    return ((ax1 is ax2) or numpy.ma.allclose(ax1[:],ax2[:],rtol=rtol,atol=atol))
 
-# AbstractAxis defines the common axis interface.
+# AbstractAxis defines the common axis interface. 
 # Concrete axis classes are derived from this class.
 
-
 class AbstractAxis(CdmsObj):
-
-    def __init__(self, parent, node):
-        CdmsObj.__init__(self, node)
-        val = self.__cdms_internals__ + ['id', ]
+    def __init__ (self, parent, node):
+        CdmsObj.__init__ (self, node)
+        val = self.__cdms_internals__ + ['id',]
         self.___cdms_internals__ = val
         self.parent = parent
         self.id = id
@@ -646,11 +613,11 @@ class AbstractAxis(CdmsObj):
         self._data_ = None
         # Cached wraparound values for circular axes
         self._doubledata_ = None
+        
+    def __str__ (self):
+        return string.join(self.listall(), "\n") + "\n"
 
-    def __str__(self):
-        return "\n".join(self.listall()) + "\n"
-
-    __repr__=__str__
+    __repr__ = __str__
 
     def __len__(self):
         raise CDMSError, MethodNotImplemented
@@ -659,7 +626,7 @@ class AbstractAxis(CdmsObj):
         return (len(self),)
 
     def _getdtype(self, name):
-        tc=self.typecode()
+        tc = self.typecode()
         return numpy.dtype(tc)
 
     def __getitem__(self, key):
@@ -681,17 +648,17 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateLatitude(self, persistent=0):
         if persistent:
-            self.axis="Y"
+            self.axis = "Y"
         else:
-            self.__dict__['axis']="Y"
+            self.__dict__['axis'] = "Y"
             self.attributes['axis']="Y"
 
     # Return true iff the axis is a latitude axis
     def isLatitude(self):
-        id=self.id.strip().lower()
-        if (hasattr(self, 'axis') and self.axis == 'Y'): return 1
-        units=getattr(self, "units", "").strip().lower()
-        if units in ["degrees_north", "degree_north", "degree_n", "degrees_n", "degreen", "degreesn"]:
+        id = self.id.strip().lower()
+        if (hasattr(self,'axis') and self.axis=='Y'): return 1
+        units = getattr(self,"units","").strip().lower()
+        if units in ["degrees_north","degree_north","degree_n","degrees_n","degreen","degreesn"]:
           return 1
         return (id[0:3] == 'lat') or (id in latitude_aliases)
 
@@ -699,25 +666,25 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateLevel(self, persistent=0):
         if persistent:
-            self.axis="Z"
+            self.axis = "Z"
         else:
-            self.__dict__['axis']="Z"
+            self.__dict__['axis'] = "Z"
             self.attributes['axis']="Z"
 
     # Return true iff the axis is a level axis
     def isLevel(self):
-        id=self.id.strip().lower()
-        if (hasattr(self, 'axis') and self.axis == 'Z'): return 1
-        if getattr(self, "positive", "").strip().lower() in ["up", "down"]:
+        id = self.id.strip().lower()
+        if (hasattr(self,'axis') and self.axis=='Z'): return 1
+        if getattr(self,"positive","").strip().lower() in ["up","down"]:
           return 1
         try:
-          # Ok let's see if this thing as pressure units
+          #Ok let's see if this thing as pressure units
           import genutil
-          p=genutil.udunits(1, "Pa")
-          units=getattr(self, 'units', "").strip()
+          p=genutil.udunits(1,"Pa")
+          units=getattr(self,'units',"").strip()
           p2=p.to(units)
           return 1
-        except Exception, err:
+        except Exception,err:
           pass
         return ((id[0:3] == 'lev') or (id[0:5] == 'depth') or (id in level_aliases))
 
@@ -726,30 +693,30 @@ class AbstractAxis(CdmsObj):
     # If modulo is defined, set as circular
     def designateLongitude(self, persistent=0, modulo=360.0):
         if persistent:
-            self.axis="X"
+            self.axis = "X"
             if modulo is None:
-                self.topology='linear'
+                self.topology = 'linear'
             else:
-                self.modulo=modulo
-                self.topology='circular'
+                self.modulo = modulo
+                self.topology = 'circular'
         else:
-            self.__dict__['axis']="X"
+            self.__dict__['axis'] = "X"
             self.attributes['axis']="X"
             if modulo is None:
-                self.__dict__['topology']='linear'
-                self.attributes['topology']='linear'
+                self.__dict__['topology'] = 'linear'
+                self.attributes['topology'] = 'linear'
             else:
-                self.__dict__['modulo']=modulo
-                self.__dict__['topology']='circular'
-                self.attributes['modulo']=modulo
-                self.attributes['topology']='circular'
+                self.__dict__['modulo'] = modulo
+                self.__dict__['topology'] = 'circular'
+                self.attributes['modulo'] = modulo
+                self.attributes['topology'] = 'circular'
 
     # Return true iff the axis is a longitude axis
     def isLongitude(self):
-        id=self.id.strip().lower()
-        if (hasattr(self, 'axis') and self.axis == 'X'): return 1
-        units=getattr(self, "units", "").strip().lower()
-        if units in ["degrees_east", "degree_east", "degree_e", "degrees_e", "degreee", "degreese"]:
+        id = self.id.strip().lower()
+        if (hasattr(self,'axis') and self.axis=='X'): return 1
+        units = getattr(self,"units","").strip().lower()
+        if units in ["degrees_east","degree_east","degree_e","degrees_e","degreee","degreese"]:
           return 1
         return (id[0:3] == 'lon') or (id in longitude_aliases)
 
@@ -757,65 +724,63 @@ class AbstractAxis(CdmsObj):
     # If persistent is true, write metadata to the container.
     def designateTime(self, persistent=0, calendar=None):
         if calendar is None:
-            calendar=cdtime.DefaultCalendar
+            calendar = cdtime.DefaultCalendar
         if persistent:
-            self.axis="T"
+            self.axis = "T"
             if calendar is not None:
                 self.setCalendar(calendar, persistent)
         else:
-            self.__dict__['axis']="T"
-            self.attributes['axis']="T"
+            self.__dict__['axis'] = "T"
+            self.attributes['axis'] = "T"
             if calendar is not None:
                 self.setCalendar(calendar, persistent)
 
     # For isTime(), keep track of whether each id is for a time axis or not, for better performance.
-    # This dictionary is a class variable (not a member of any particular
-    # instance).
-    idtaxis={}  # id:type where type is 'T' for time, 'O' for other
+    # This dictionary is a class variable (not a member of any particular instance).
+    idtaxis = {}  # id:type where type is 'T' for time, 'O' for other
 
     # Return true iff the axis is a time axis
     def isTime(self):
-        id=self.id.strip().lower()
-        if hasattr(self, 'axis'):
-            if self.axis == 'T': return 1
+        id = self.id.strip().lower()
+        if hasattr(self,'axis'):
+            if self.axis=='T': return 1
             elif self.axis is not None: return 0
         # Have we saved the id-to-axis type information already?
         if id in self.idtaxis:
-            if self.idtaxis[id] == 'T':
+            if self.idtaxis[id]=='T':
                 return 1
             else:
                 return 0
-        # Try to figure it out from units
+        ## Try to figure it out from units
         try:
           import genutil
-          units=getattr(self, "units", "").lower()
-          sp=units.split("since")
-          if len(sp) > 1:
-            t=genutil.udunits(1, "day")
-            s=sp[0].strip()
-            if s in t.available_units() and t.known_units()[s] == "TIME":
-              self.idtaxis[id]='T'
+          units=getattr(self,"units","").lower()
+          sp = units.split("since")
+          if len(sp)>1:
+            t=genutil.udunits(1,"day")
+            s = sp[0].strip()
+            if s in t.available_units() and t.known_units()[s]=="TIME":
+              self.idtaxis[id] = 'T'
               return 1
-            # try the plural version since udunits only as singular (day noy
-            # days)
-            s=s + "s"
-            if s in t.available_units() and t.known_units()[s] == "TIME":
-              self.idtaxis[id]='T'
+            #try the plural version since udunits only as singular (day noy days)
+            s=s+"s"
+            if s in t.available_units() and t.known_units()[s]=="TIME":
+              self.idtaxis[id] = 'T'
               return 1
         except:
           pass
-        # return (id[0:4] == 'time') or (id in time_aliases)
+        #return (id[0:4] == 'time') or (id in time_aliases)
         if (id[0:4] == 'time') or (id in time_aliases):
             self.idtaxis[id]='T'
             return 1
         else:
-            self.idtaxis[id]='O'
+            self.idtaxis[id] = 'O'
             return 0
 
     # Return true iff the axis is a forecast axis
     def isForecast(self):
-        id=self.id.strip().lower()
-        if (hasattr(self, 'axis') and self.axis == 'F'): return 1
+        id = self.id.strip().lower()
+        if (hasattr(self,'axis') and self.axis=='F'): return 1
         return (id[0:6] == 'fctau0') or (id in forecast_aliases)
     def isForecastTime(self):
         return self.isForecast()
@@ -825,15 +790,15 @@ class AbstractAxis(CdmsObj):
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
         if calendar is None:
-            calendar=self.getCalendar()
+            calendar = self.getCalendar()
         if self.isForecast():
-            result=[forecast.comptime(t) for t in self[:]]
+            result = [ forecast.comptime(t) for t in self[:] ]
         else:
-            result=[]
+            result = []
             for val in self[:]:
                 result.append(cdtime.reltime(val, self.units).tocomp(calendar))
         return result
-
+    
     #
     #  mf 20010418 -- output DTGs (YYYYMMDDHH)
     #
@@ -841,22 +806,22 @@ class AbstractAxis(CdmsObj):
         "Array version of cdtime tocomp. Returns a list of component times in DTG format."
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
-        result=[]
+        result = []
         if calendar is None:
-            calendar=self.getCalendar()
+            calendar = self.getCalendar()
         for val in self[:]:
             comptime=cdtime.reltime(val, self.units).tocomp(calendar)
             s=repr(comptime)
-            tt=s.split(' ')
-
-            ttt=tt[0].split('-')
+            tt=string.split(s,' ')
+        
+            ttt=string.split(tt[0],'-')
             yr=int(ttt[0])
             mo=int(ttt[1])
             da=int(ttt[2])
-
-            ttt=tt[1].split(':')
+        
+            ttt=string.split(tt[1],':')
             hr=int(ttt[0])
-            dtg="%04d%02d%02d%02d" % (yr, mo, da, hr)
+            dtg="%04d%02d%02d%02d"%(yr,mo,da,hr)
             result.append(dtg)
 
         return result
@@ -866,35 +831,27 @@ class AbstractAxis(CdmsObj):
         import datetime
         if not hasattr(self, 'units'):
             raise CDMSError, "No time units defined"
-        result=[]
+        result = []
         if calendar is None:
-            calendar=self.getCalendar()
+            calendar = self.getCalendar()
         for val in self[:]:
             c=cdtime.reltime(val, self.units).tocomp(calendar)
-            dtg=datetime.datetime(
-    c.year,
-     c.month,
-     c.day,
-     c.hour,
-     c.minute,
-     int(c.second),
-     int((c.second - int(c.second)) * 1000))
+            dtg = datetime.datetime(c.year,c.month,c.day,c.hour,c.minute,int(c.second),int((c.second-int(c.second))*1000))
             result.append(dtg)
         return result
 
-    def asRelativeTime(self, units=None):
+    def asRelativeTime( self, units=None ):
         "Array version of cdtime torel. Returns a list of relative times."
-        sunits=getattr(self, 'units', None)
-        if sunits is None or sunits == 'None':
+        sunits = getattr(self,'units',None)
+        if sunits==None or sunits=='None':
             raise CDMSError, "No time units defined"
-        if units is None or units == 'None':
+        if units==None or units=='None':
             units=sunits
         if self.isForecast():
-            result=[forecast.comptime(t).torel(units) for t in self[:]]
+            result = [ forecast.comptime(t).torel(units) for t in self[:] ]
         else:
-            cal=self.getCalendar()
-            result=[cdtime.reltime(t, sunits).torel(units, cal)
-                                     for t in self[:]]
+            cal = self.getCalendar()
+            result = [ cdtime.reltime(t,sunits).torel(units,cal) for t in self[:] ]
         return result
 
     def toRelativeTime(self, units, calendar=None):
@@ -903,35 +860,31 @@ class AbstractAxis(CdmsObj):
             raise CDMSError, "No time units defined"
         n=len(self[:])
         b=self.getBounds()
-        scal=self.getCalendar()
+        scal = self.getCalendar()
         if calendar is None:
-            calendar=scal
+            calendar = scal
         else:
             self.setCalendar(calendar)
         for i in range(n):
             tmp=cdtime.reltime(self[i], self.units).tocomp(scal)
-            tmp2=numpy.array(float(tmp.torel(units, calendar).value)).astype(
-                self[:].dtype.char)
-            # if i==1 : print
-            # self[:].dtype.char,'tmp2:',tmp2,tmp2.astype('f'),self[i],self[i].astype('f')
+            tmp2 = numpy.array(float(tmp.torel(units, calendar).value)).astype(self[:].dtype.char)
+            ## if i==1 : print self[:].dtype.char,'tmp2:',tmp2,tmp2.astype('f'),self[i],self[i].astype('f')
             self[i]=tmp2
             if b is not None:
-                tmp=cdtime.reltime(b[i, 0], self.units).tocomp(scal)
-                b[i, 0]=numpy.array(
-                    float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
-                tmp=cdtime.reltime(b[i, 1], self.units).tocomp(scal)
-                b[i, 1]=numpy.array(
-                    float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
+                tmp=cdtime.reltime(b[i,0], self.units).tocomp(scal)
+                b[i,0]=numpy.array(float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
+                tmp=cdtime.reltime(b[i,1], self.units).tocomp(scal)
+                b[i,1]=numpy.array(float(tmp.torel(units, calendar).value)).astype(b.dtype.char)
         if b is not None:
             self.setBounds(b)
         self.units=units
         return
 
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010412 -- test if an Axis is intrinsically circular
 #
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
 
     # Return true iff the axis wraps around
@@ -939,22 +892,22 @@ class AbstractAxis(CdmsObj):
     # (1) self.topology=='circular', or
     # (2) self.topology is undefined, and the axis is a longitude
     def isCircularAxis(self):
-
-        if hasattr(self, 'topology'):
-            iscircle=(self.topology == 'circular')
+        
+        if hasattr(self,'topology'):
+            iscircle = (self.topology=='circular')
         elif self.isLongitude():
-            iscircle=1
+            iscircle = 1
         else:
-            iscircle=0
+            iscircle = 0
 
         return iscircle
 
 
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010405 -- test if an transient Axis is REALLY circular
 #
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
 
     # Return true iff the axis wraps around
@@ -963,45 +916,45 @@ class AbstractAxis(CdmsObj):
     # (2) self.topology is undefined, and the axis is a longitude
     def isCircular(self):
 
-        if hasattr(self, 'realtopology'):
-            if self.realtopology == 'circular': return 1
-            elif self.realtopology == 'linear': return 0
+        if hasattr(self,'realtopology'):
+            if self.realtopology=='circular': return 1
+            elif self.realtopology=='linear': return 0
         if(len(self) < 2):
             return 0
-
-        baxis=self[0]
-        eaxis=self[-1]
-        deltaend=self[-1] - self[-2]
-        eaxistest=eaxis + deltaend - baxis
+        
+        baxis = self[0]
+        eaxis = self[-1]
+        deltaend = self[-1] - self[-2]
+        eaxistest = eaxis + deltaend - baxis
 
         cycle=self.getModuloCycle()
 
-        tol=0.01 * deltaend
+        tol=0.01*deltaend
 
         test=0
         if(abs(eaxistest - cycle) < tol): test=1
-
-        if hasattr(self, 'topology') and test == 1:
-            iscircle=(self.topology == 'circular')
+        
+        if hasattr(self,'topology') and test == 1:
+            iscircle = (self.topology=='circular')
         elif (self.isLongitude() and test == 1):
-            iscircle=1
+            iscircle = 1
         else:
-            iscircle=0
+            iscircle = 0
 
         # save realtopology attribute in __dict__, don't write it to the file
-        if iscircle == 1: self.__dict__['realtopology']='circular'
-        elif iscircle == 0: self.__dict__['realtopology']='linear'
+        if iscircle==1:  self.__dict__['realtopology'] = 'circular'
+        elif iscircle==0: self.__dict__['realtopology'] = 'linear'
         return iscircle
 
     def designateCircular(self, modulo, persistent=0):
         if persistent:
-            self.topology='circular'
-            self.modulo=modulo
+            self.topology = 'circular'
+            self.modulo = modulo
         else:
-            self.__dict__['topology']='circular'
-            self.__dict__['modulo']=modulo
-            self.attributes['modulo']=modulo
-            self.attributes['topology']='linear'
+            self.__dict__['topology'] = 'circular'
+            self.__dict__['modulo'] = modulo
+            self.attributes['modulo'] = modulo
+            self.attributes['topology'] = 'linear'
 
     def isLinear(self):
         raise CDMSError, MethodNotImplemented
@@ -1048,62 +1001,62 @@ class AbstractAxis(CdmsObj):
     # or None. If the axis does not have a calendar attribute, return the global
     # calendar.
     def getCalendar(self):
-        if hasattr(self, 'calendar'):
-            calendar=self.calendar.lower()
+        if hasattr(self,'calendar'):
+            calendar = string.lower(self.calendar)
         else:
-            calendar=None
+            calendar = None
 
-        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
     # Set the calendar
     def setCalendar(self, calendar, persistent=1):
         if persistent:
-            self.calendar=calendarToTag.get(calendar, None)
+            self.calendar = calendarToTag.get(calendar, None)
             self.attributes['calendar']=self.calendar
             if self.calendar is None:
                 raise CDMSError, InvalidCalendar % calendar
         else:
-            self.__dict__['calendar']=calendarToTag.get(calendar, None)
+            self.__dict__['calendar'] = calendarToTag.get(calendar, None)
             self.attributes['calendar']=self.calendar
             if self.__dict__['calendar'] is None:
                 raise CDMSError, InvalidCalendar % calendar
 
     def getData(self):
         raise CDMSError, MethodNotImplemented
-
+ 
     # Return the entire array
     def getValue(self):
         return self.__getitem__(slice(None))
 
-    def assignValue(self, data):
-        self.__setitem__(slice(None), data)
+    def assignValue(self,data):
+        self.__setitem__(slice(None),data)
 
     def _time2value(self, value):
         """ Map value of type comptime, reltime, or string of form "yyyy-mm-dd hh:mi:ss" to value"""
         if self.isTime():
             if type(value) in CdtimeTypes:
-                value=value.torel(self.units, self.getCalendar()).value
-            elif isinstance(value, basestring) and value not in [':', unspecified]:
-                cal=self.getCalendar()
-                value=cdtime.s2c(value, cal).torel(self.units, cal).value
+                value = value.torel(self.units, self.getCalendar()).value
+            elif type(value) is types.StringType and value not in [':',unspecified]:
+                cal = self.getCalendar()
+                value = cdtime.s2c(value, cal).torel(self.units, cal).value
         return value
 
 
     def getModuloCycle(self):
 
-        if hasattr(self, 'modulo'):
-            cycle=self.modulo
+        if hasattr(self,'modulo'):
+            cycle = self.modulo
             #
             # mf 20010419 test if attribute is a string (non CF), set to 360.0
             #
-            if isinstance(cycle, basestring):
-                cycle=360.0
+            if(type(cycle) == types.StringType):
+                cycle = 360.0
         else:
-            cycle=360.0
+            cycle = 360.0
 
         if isinstance(cycle, numpy.ndarray):
-            cycle=cycle[0]
+            cycle = cycle[0]
 
         return(cycle)
 
@@ -1115,7 +1068,7 @@ class AbstractAxis(CdmsObj):
 
         return(self.getModuloCycle())
 
-    def mapInterval(self, interval, indicator='ccn', cycle=None):
+    def mapInterval(self,interval,indicator='ccn',cycle=None):
         """
         Map coordinate interval to index interval. interval has one of the forms:
 
@@ -1146,216 +1099,210 @@ class AbstractAxis(CdmsObj):
         Note: if the interval is interior to the axis, but does not span any
         axis element, a singleton (i,i+1) indicating an adjacent index is returned.
         """
-        i, j, k=self.mapIntervalExt(interval, indicator, cycle)
-        j=min(j, i + len(self))
-        # i=i-1
-        return (i, j)
+        i,j,k = self.mapIntervalExt(interval,indicator,cycle)
+        j = min(j, i+len(self))
+        #i=i-1
+        return (i,j)
 
 
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 #
 # mf 20010308 - 20010412 -- general handing of wrapping
 #
-# mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
+#mfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmfmf
 
-    def mapIntervalExt(
-        self, interval, indicator='ccn', cycle=None, epsilon=None):
+    def mapIntervalExt(self,interval,indicator='ccn',cycle=None,epsilon=None):
         """Like mapInterval, but returns (i,j,k) where k is stride,
         and (i,j) is not restricted to one cycle."""
 
         # nCycleMax : max number of cycles a user a specify in wrapping
 
         nCycleMax=6
-
+        
         # interval is None returns the full interval
-        if interval is None or interval == ':':
+        if interval is None or interval==':':
             return (0, len(self), 1)
 
         # Allow intervals of the same form as getRegion.
-        if len(interval) == 3:
-            x, y, indicator=interval
-            interval=(x, y)
-        elif len(interval) == 4:
-            x, y, indicator, cycle=interval
-            interval=(x, y)
+        if len(interval)==3:
+            x,y,indicator = interval
+            interval = (x,y)
+        elif len(interval)==4:
+            x,y,indicator,cycle = interval
+            interval = (x,y)
 
         # check length of indicator if overridden by user
         #
 
-        indicator=indicator.lower()
-        if len(indicator) == 2: indicator += 'n'
+        indicator = string.lower(indicator)
+        if len(indicator)==2: indicator += 'n'
 
-        if((len(indicator) != 3) or
-               ((indicator[0] != 'c' and indicator[0] != 'o') or
+        if( ( len(indicator) != 3 ) or
+               ( (indicator[0] != 'c' and indicator[0] != 'o') or
                  (indicator[1] != 'c' and indicator[1] != 'o') or
                  (indicator[2] != 'n' and indicator[2] != 'b' and indicator[2] != 's' and
                   indicator[2] != 'e')
                  )
             ):
             raise CDMSError, "EEE: 3-character interval/intersection indicator incomplete or incorrect = "\
-                 + indicator
-
+                 +indicator
+       
         if self._data_ is None:
-            self._data_=self.getData()
+            self._data_ = self.getData()
 
-        # ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt
+        #ttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttttt
         # Handle time types
-        interval=(
-    self._time2value(interval[0]),
-     self._time2value(interval[1]))
+        interval = (self._time2value(interval[0]), self._time2value(interval[1]))
 
         # If the interval is reversed wrt self, reverse the interval and
         # set the stride to -1
-        if (interval[0] <= interval[1]) == (self[0] <= self[-1]):
+        if (interval[0]<=interval[1])==(self[0]<=self[-1]):
             stride=1
         else:
             stride=-1
-            interval=(interval[1], interval[0])
-            indicator=indicator[1] + indicator[0] + indicator[2]
+            interval = (interval[1],interval[0])
+            indicator = indicator[1]+indicator[0]+indicator[2]
 
-        # mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
+        #mmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm
         #
         # basic test for wrapping - is axis REALLY circular?
         #
-        # ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
+        #ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
 
-        xind=indicator[0]
-        yind=indicator[1]
-        iind=indicator[2]
+        xind = indicator[0]
+        yind = indicator[1]
+        iind = indicator[2]
 
-        xi, yi=interval
+        xi,yi = interval
 
-        length=len(self)
-        ar=self[:]
-        ar0=ar[0]
-        arn=ar[-1]
-        armin=min(ar0, arn)
-        armax=max(ar0, arn)
+        length = len(self)
+        ar = self[:]
+        ar0 = ar[0]
+        arn = ar[-1]
+        armin = min(ar0,arn)
+        armax = max(ar0,arn)
 
         # Wrapped if circular and at least one value is outside the axis range.
-        wraptest1=self.isCircular()
-        wraptest2=not ((armin <= xi <= armax) and (armin <= yi <= armax))
-
+        wraptest1 = self.isCircular()
+        wraptest2 = not ((armin <= xi <= armax) and (armin <= yi <= armax))
+        
         if (wraptest1 and wraptest2):
 
             #
-            # find cycle and calc # of cycles in the interval
+            #  find cycle and calc # of cycles in the interval
             #
-
+            
             cycle=self.getModulo()
+            
+            intervalLength=yi-xi
+            intervalCycles=intervalLength/cycle
+            
+            bd = self.getBounds()
+            
+            nPointsCycle = len(ar)
 
-            intervalLength=yi - xi
-            intervalCycles=intervalLength / cycle
-
-            bd=self.getBounds()
-
-            nPointsCycle=len(ar)
-
-            ar0=ar[0]
-            ar1=ar[-1]
+            ar0 = ar[0]
+            ar1 = ar[-1]
 
             #
             # test for reversed coordinates
             #
+            
+            if ar0>ar1:
+                cycle = -1 * abs(cycle)
 
-            if ar0 > ar1:
-                cycle=-1 * abs(cycle)
-
-            # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+            #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
             #
             #  make sure xi<yi and shift to positive axis indices
             #
-            # eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
-
+            #eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee
+            
             # Ensure that xi<yi
-
-            if cycle > 0 and yi < xi: xi, yi=yi, xi
-            if cycle < 0 and yi > xi: xi, yi=yi, xi
+            
+            if cycle>0 and yi<xi: xi,yi = yi,xi
+            if cycle<0 and yi>xi: xi,yi = yi,xi
 
             # calculate the number of cycles to shift to positive side
+            
+            nCycleShift=numpy.floor((xi-ar0)/cycle)
+            xp = xi - cycle * nCycleShift
+            yp = xp + intervalLength
 
-            nCycleShift=numpy.floor((xi - ar0) / cycle)
-            xp=xi - cycle * nCycleShift
-            yp=xp + intervalLength
-
-            # Extend the data vector with wraparound number of cycles in
-            # interval and shifts
-
-            nCycle=int(intervalCycles + 1.0 + 0.5) + abs(nCycleShift)
-
+            # Extend the data vector with wraparound number of cycles in interval and shifts
 
+            nCycle = int(intervalCycles + 1.0 + 0.5) + abs(nCycleShift)
+            
+                
             #
             # check if nCycle is > nCycleMax
             #
             if(nCycle >= nCycleMax):
                 raise CDMSError, InvalidNCycles + repr(nCycle)
 
-            self._doubledata_=numpy.concatenate((ar, ar + cycle))
+            self._doubledata_ = numpy.concatenate(( ar, ar + cycle ))
             k=2
-            while(k < nCycle):
-                self._doubledata_=numpy.concatenate(
-                    (self._doubledata_, ar + k * cycle))
-                k=k + 1
+            while(k<nCycle):
+                self._doubledata_ = numpy.concatenate(( self._doubledata_, ar + k*cycle ) )
+                k=k+1
 
             # Map the canonical coordinate interval (xp,yp) in the 'extended' data array
             # create axis to get the bounds array
 
-            bigar=self._doubledata_
+            bigar = self._doubledata_
             bigarAxis=createAxis(bigar)
-            bd=bigarAxis.getBounds()
+            bd = bigarAxis.getBounds()
             if bd is None:              # In case autobounds is off
-                bd=bigarAxis.genGenericBounds()
+                bd = bigarAxis.genGenericBounds()
 
             # run the more general mapLinearExt to get the indices
-
-            indexInterval=mapLinearExt(
-                bigar, bd, (xp, yp), indicator, wrapped=1)
+            
+            indexInterval= mapLinearExt(bigar,bd,(xp,yp),indicator,wrapped=1)
 
             #
             # check to make sure we got an interval
             #
-
+            
             if(indexInterval is None):
                 return None
 
-            i, j=indexInterval
+            i,j=indexInterval
 
             #
             #  now shift i back
             #
 
-            i=i + int(nCycleShift * float(nPointsCycle))
+            i = i + int(nCycleShift*float(nPointsCycle))
 
-            #
+            #   
             #  adjust the length of the output interval by the indicator
             #  mapLinear does the calc correctly, we have to modify because we
             #  are overriding with the (float)(number of cycles) in the interval
             #
-
-            j=j + int(nCycleShift * float(nPointsCycle))
-            retval=(i, j)
-
+            
+            j = j + int(nCycleShift*float(nPointsCycle))
+            retval = (i,j)
+            
         else:
-            bd=self.getBounds()
+            bd = self.getBounds()
             if bd is None:              # In case autobounds is off
-                bd=self.genGenericBounds()
-            retval=mapLinearExt(ar, bd, interval, indicator)
+                bd = self.genGenericBounds()
+            retval = mapLinearExt(ar, bd, interval, indicator)
 
         if retval is not None:
-            i, j=retval
-            if stride == -1:
+            i,j = retval
+            if stride==-1:
                 if(j == length):
-                    i, j=j - 1, i - 1
+                    i,j=j-1,i-1
                 else:
-                    i, j=j - 1, i - 1
-                if j == -1:
+                    i,j=j-1,i-1
+                if j==-1:
                     j=None
 
-            retval=(i, j, stride)
+            retval = (i,j,stride)
 
         return retval
 
-    def subaxis(self, i, j, k=1, wrap=True):
+    def subaxis(self,i,j,k=1, wrap=True):
         """Create a transient axis for the index slice [i:j:k]
         The stride k can be positive or negative. Wraparound is
         supported for longitude dimensions or those with a modulus attribute.
@@ -1364,80 +1311,80 @@ class AbstractAxis(CdmsObj):
         fullBounds = self.getBounds(isGeneric)
         _debug=0
         _debugprefix="SS__XX subaxis "
-
+        
 
         # Handle wraparound
-        modulo=None
-        size=len(self)
-
+        modulo = None
+        size = len(self)
+        
         #----------------------------------------------------------------------
-        # mf 20010328 negative stride i >= vice i >
+        # mf 20010328 negative stride i >= vice i > 
         #----------------------------------------------------------------------
-
-        if wrap and ((k > 0 and j > size) or (k < 0 and i >= size)) and self.isCircular():
-            modulo=self.getModuloCycle()
+        
+        if wrap and ((k>0 and j>size) or (k<0 and i >= size)) and self.isCircular():
+            modulo = self.getModuloCycle()
 
         if modulo is not None:
             # If self is decreasing and stride is positive,
             # or self is increasing and stride is negative, subtract the modulus,
             # otherwise add it.
-            if (self[0] > self[-1]) == (k > 0):
-                modulo=-modulo
+            if (self[0]>self[-1])==(k>0):
+                modulo = -modulo
 
             #----------------------------------------------------------------------
             #
             #  mf 20010329 -- N vice two slice scheme (more general)
             #
-            #------------------------------------------------------------------
+            #----------------------------------------------------------------------
 
             donew=1
 
             if(donew):
 
-                sn=splitSliceExt(slice(i, j, k), size)
-                if(_debug): print "SSSS1-------------------- ", sn, len(sn)
+                sn = splitSliceExt(slice(i,j,k),size)
+                if(_debug): print "SSSS1-------------------- ",sn,len(sn)
 
-                for kk in range(0, len(sn)):
+                for kk in range(0,len(sn)):
                     sl=sn[kk]
-                    if(_debug): print "SSSSSSSS kk = ", kk, sl
-                    part=self[sl] + kk * modulo
-                    if(_debug): print "SSSSSSSSSSSSSSS modulo", part[0], part[-1], modulo
+                    if(_debug): print "SSSSSSSS kk = ",kk,sl
+                    part = self[sl] + kk*modulo
+                    if(_debug): print "SSSSSSSSSSSSSSS modulo",part[0],part[-1],modulo
                     if(kk == 0):
-                        data=part
+                        data = part
                     else:
-                        data=numpy.concatenate((data, part))
+                        data = numpy.concatenate((data,part))
 
                     if fullBounds is not None:
-                        bound=fullBounds[sl] + kk * modulo
+                        bound = fullBounds[sl] + kk*modulo
                         if (kk == 0):
-                            bounds=bound
+                            bounds = bound
                         else:
-                            bounds=numpy.concatenate((bounds, bound))
+                            bounds = numpy.concatenate((bounds,bound))
                     else:
-                        bounds=None
-
-
+                        bounds = None
+                        
+            
             else:
-
-                s1, s2=splitSlice(slice(i, j, k), size)
-                if(_debug): print "SSSS0: original ", s1, s2
-
-                part1=self[s1]
-                part2=self[s2] + modulo
-                if(_debug): print "SSSSSSSSSSSSSSS modulo", self[0], self[-1], modulo
-                data=numpy.concatenate((part1, part2))
+                
+                s1, s2 = splitSlice(slice(i,j,k),size)
+                if(_debug): print "SSSS0: original ",s1,s2
+                
+                part1 = self[s1]
+                part2 = self[s2]+modulo
+                if(_debug): print "SSSSSSSSSSSSSSS modulo",self[0],self[-1],modulo
+                data = numpy.concatenate((part1,part2))
                 if fullBounds is not None:
-                    bounds1=fullBounds[s1]
-                    bounds2=fullBounds[s2] + modulo
-                    bounds=numpy.concatenate((bounds1, bounds2))
+                    bounds1 = fullBounds[s1]
+                    bounds2 = fullBounds[s2]+modulo
+                    bounds = numpy.concatenate((bounds1,bounds2))
                 else:
-                    bounds=None
-
+                    bounds = None
+            
 
         else:                           # no wraparound
-            data=self[i:j:k]
+            data = self[i:j:k]
             if fullBounds is not None:
-                bounds=fullBounds[i:j:k]
+                bounds = fullBounds[i:j:k]
             else:
                 bounds = None
         
@@ -1449,85 +1396,76 @@ class AbstractAxis(CdmsObj):
         if self.isTime(): newaxis.designateTime()
 
         for attname in self.attributes.keys():
-            if attname not in ["datatype", "length", "isvar", "name_in_file", "partition", "partition_length"]:
+            if attname not in ["datatype", "length","isvar","name_in_file","partition","partition_length"]:
                 setattr(newaxis, attname, getattr(self, attname))
-                newaxis.attributes[attname]=getattr(self, attname)
+                newaxis.attributes[attname]=getattr(self,attname)
 
         # Change circular topology to linear if a strict subset was copied
-        if hasattr(self, "topology") and self.topology == "circular" and len(newaxis) < len(self):
+        if hasattr(self,"topology") and self.topology=="circular" and len(newaxis)<len(self):
             newaxis.topology="linear"
 
         return newaxis
-
+    
 #----------------------------------------------------------------------
 # mf 2001 set calls to subAxis as subaxis
 #----------------------------------------------------------------------
 
-    subAxis=subaxis
+    subAxis = subaxis
 
     def typecode(self):
         raise CDMSError, MethodNotImplemented
 
-    # Check that a boundary array is valid, raise exception if not. bounds is
-    # an array of shape (n,2)
-    def validateBounds(self, bounds):
-        requiredShape=(len(self), 2)
-        requiredShape2=(len(self) + 1,)
-        if bounds.shape != requiredShape and bounds.shape != requiredShape2:
+    # Check that a boundary array is valid, raise exception if not. bounds is an array of shape (n,2)
+    def validateBounds(self,bounds):
+        requiredShape = (len(self),2)
+        requiredShape2 = (len(self)+1,)
+        if bounds.shape!=requiredShape and bounds.shape!=requiredShape2:
             raise CDMSError, InvalidBoundsArray + \
-                 'shape is %s, should be %s or %s' % (
-    `bounds.shape`,
-     `requiredShape`,
-     `requiredShape2`)
-        if bounds.shape == requiredShape2:  # case of "n+1" bounds
+                 'shape is %s, should be %s or %s'%(`bounds.shape`,`requiredShape`,`requiredShape2`)
+        if bounds.shape==requiredShape2: # case of "n+1" bounds
             bounds2=numpy.zeros(requiredShape)
-            bounds2[:, 0]=bounds[:-1]
-            bounds2[:, 1]=bounds[1::]
+            bounds2[:,0]=bounds[:-1]
+            bounds2[:,1]=bounds[1::]
             bounds=bounds2
-        mono=(bounds[0, 0] <= bounds[0, 1])
+        mono = (bounds[0,0]<=bounds[0,1])
         if mono:
             for i in range(bounds.shape[0]):
-                if not bounds[i, 0] <= self[i] <= bounds[i, 1]:
+                if not bounds[i,0]<=self[i]<=bounds[i,1]:
                     raise CDMSError, InvalidBoundsArray + \
-'bounds[%i]=%f is not in the range [%f,%f]' % (
-    i, self[i], bounds[i, 0], bounds[i, 1])
+'bounds[%i]=%f is not in the range [%f,%f]'%(i,self[i],bounds[i,0],bounds[i,1])
         else:
             for i in range(bounds.shape[0]):
-                if not bounds[i, 0] >= self[i] >= bounds[i, 1]:
+                if not bounds[i,0]>=self[i]>=bounds[i,1]:
                     raise CDMSError, InvalidBoundsArray + \
-'bounds[%i]=%f is not in the range [%f,%f]' % (
-    i, self[i], bounds[i, 1], bounds[i, 0])
+'bounds[%i]=%f is not in the range [%f,%f]'%(i,self[i],bounds[i,1],bounds[i,0])
         return bounds
 
-    # Generate bounds from midpoints. width is the width of the zone if the
-    # axis has one value.
+    # Generate bounds from midpoints. width is the width of the zone if the axis has one value.
     def genGenericBounds(self, width=1.0):
         if self._data_ is None:
-            self._data_=self.getData()
-        ar=self._data_
-        if len(self) > 1:
-            leftPoint=numpy.array([1.5 * ar[0] - 0.5 * ar[1]])
-            midArray=(ar[0:-1] + ar[1:]) / 2.0
-            rightPoint=numpy.array([1.5 * ar[-1] - 0.5 * ar[-2]])
-            bnds=numpy.concatenate((leftPoint, midArray, rightPoint))
+            self._data_ = self.getData()
+        ar = self._data_
+        if len(self)>1:
+            leftPoint = numpy.array([1.5*ar[0]-0.5*ar[1]])
+            midArray = (ar[0:-1]+ar[1:])/2.0
+            rightPoint = numpy.array([1.5*ar[-1]-0.5*ar[-2]])
+            bnds = numpy.concatenate((leftPoint,midArray,rightPoint))
         else:
-            delta=width / 2.0
-            bnds=numpy.array([self[0] - delta, self[0] + delta])
+            delta = width/2.0
+            bnds = numpy.array([self[0]-delta,self[0]+delta])
 
         # Transform to (n,2) array
-        retbnds=numpy.zeros((len(ar), 2), numpy.float)
-        retbnds[:, 0]=bnds[:-1]
-        retbnds[:, 1]=bnds[1:]
+        retbnds = numpy.zeros((len(ar),2),numpy.float)
+        retbnds[:,0] = bnds[:-1]
+        retbnds[:,1] = bnds[1:]
 
         if self.isLatitude():
-            retbnds[0, :]=numpy.maximum(
-    -90.0, numpy.minimum(90.0, retbnds[0, :]))
-            retbnds[-1, :]=numpy.maximum(
-    -90.0, numpy.minimum(90.0, retbnds[-1, :]))
+            retbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,retbnds[0,:]))
+            retbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,retbnds[-1,:]))
 
         return retbnds
 
-    def clone(self, copyData=1):
+    def clone (self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
@@ -1536,8 +1474,8 @@ class AbstractAxis(CdmsObj):
         if copyData==1:
             mycopy = createAxis(copy.copy(self[:]))
         else:
-            mycopy=createAxis(self[:])
-        mycopy.id=self.id
+            mycopy = createAxis(self[:])
+        mycopy.id = self.id
         try:
             mycopy.setBounds(b, isGeneric=isGeneric[0])
         except CDMSError:
@@ -1547,104 +1485,102 @@ class AbstractAxis(CdmsObj):
            setattr(mycopy, k, v)
         return mycopy
 
-    def listall(self, all=None):
+    def listall (self, all=None):
         "Get list of info about this axis."
-        aname=self.id
-        result=[]
+        aname = self.id
+        result = []
         result.append('   id: ' + aname)
         if self.isLatitude(): result.append('   Designated a latitude axis.')
         if self.isLongitude(): result.append('   Designated a longitude axis.')
         if self.isTime(): result.append('   Designated a time axis.')
         if self.isLevel(): result.append('   Designated a level axis.')
         try:
-            units=self.units
+            units = self.units
             result.append('   units:  ' + units)
         except:
             pass
-        d=self.getValue()
+        d = self.getValue()
         result.append('   Length: ' + str(len(d)))
         result.append('   First:  ' + str(d[0]))
         result.append('   Last:   ' + str(d[-1]))
-        flag=1
+        flag = 1
         for k in self.attributes.keys():
             if k in std_axis_attributes: continue
             if flag:
                 result.append('   Other axis attributes:')
-                flag=0
-            result.append('      ' + k + ': ' + str(self.attributes[k]))
+                flag = 0
+            result.append('      '+k+': '+str(self.attributes[k]))
         result.append('   Python id:  %s' % hex(id(self)))
 
         if all:
             result.append("   Values:")
             result.append(str(d))
-            b=self.getBounds()
+            b = self.getBounds()
             result.append("   Bounds:")
             result.append(str(b))
         return result
 
     def info(self, flag=None, device=None):
         "Write info about axis; include dimension values and weights if flag"
-        if device is None: device=sys.stdout
+        if device is None: device = sys.stdout
         device.write(str(self))
 
     def isVirtual(self):
         "Return true iff coordinate values are implicitly defined."
         return 0
 
-    shape=property(_getshape, None)
-    dtype=_getdtype
+    shape = property(_getshape,None)
+    dtype = _getdtype
 
-# PropertiedClasses.set_property(AbstractAxis, 'shape',
-# AbstractAxis._getshape, nowrite=1, nodelete=1)
-# PropertiedClasses.set_property(AbstractAxis, 'dtype',
-# AbstractAxis._getdtype, nowrite=1, nodelete=1)
-# internattr.add_internal_attribute (AbstractAxis, 'id', 'parent')
+## PropertiedClasses.set_property(AbstractAxis, 'shape', 
+##                         AbstractAxis._getshape, nowrite=1, nodelete=1)
+## PropertiedClasses.set_property(AbstractAxis, 'dtype', 
+##                         AbstractAxis._getdtype, nowrite=1, nodelete=1)
+## internattr.add_internal_attribute (AbstractAxis, 'id', 'parent')
 
 # One-dimensional coordinate axis in a dataset
 class Axis(AbstractAxis):
-    def __init__(self, parent, axisNode=None):
+    def __init__(self,parent,axisNode=None):
         if axisNode is not None and axisNode.tag != 'axis':
                raise CDMSError, 'Creating axis, node is not an axis node.'
         AbstractAxis.__init__(self, parent, axisNode)
         if axisNode is not None:
             if axisNode.partition is not None:
-                flatpart=axisNode.partition
-                self.__dict__['partition']=numpy.reshape(
-    flatpart, (len(flatpart) / 2, 2))
+                flatpart = axisNode.partition
+                self.__dict__['partition']=numpy.reshape(flatpart,(len(flatpart)/2,2))
                 self.attributes['partition']=self.partition
-        self.id=axisNode.id
-
+        self.id = axisNode.id
+    
     def typecode(self):
         return cdmsNode.CdToNumericType.get(self._node_.datatype)
 
     # Handle slices of the form x[i], x[i:j:k], x[(slice(i,j,k),)], and x[...]
     def __getitem__(self, key):
-        node=self._node_
-        length=len(node)
+        node = self._node_
+        length = len(node)
 
         # Allow key of form (slice(i,j),) etc.
-        if isinstance(key, tuple) and len(key) == 1:
-            key=key[0]
+        if type(key) is types.TupleType and len(key)==1:
+            key = key[0]
 
-        if isinstance(key, (int, numpy.int, numpy.int32)):  # x[i]
-            if key >= length:
+        if isinstance(key, (types.IntType, numpy.int,numpy.int32)):  # x[i]
+            if key>=length:
                 raise IndexError, 'index out of bounds'
             else:
-                # Don't generate the entire array (if linear) just for one
-                # value
-                return node.data[key % length]
-        elif isinstance(key, slice):  # x[i:j:k]
+                # Don't generate the entire array (if linear) just for one value
+                return node.data[key%length]
+        elif type(key) is types.SliceType: # x[i:j:k]
             if self._data_ is None:
-                self._data_=node.getData()
+                self._data_ = node.getData()
             return self._data_[key.start:key.stop:key.step]
-        elif isinstance(key, Ellipsis.__class__):  # x[...]
+        elif type(key) is types.EllipsisType: # x[...]
             if self._data_ is None:
-                self._data_=node.getData()
+                self._data_ = node.getData()
             return self._data_
-        elif isinstance(key, tuple):
-            raise IndexError, 'axis is one-dimensional'
+        elif type(key) is types.TupleType:
+            raise IndexError,'axis is one-dimensional'
         else:
-            raise IndexError, 'index must be an integer: %s' % `key`
+            raise IndexError,'index must be an integer: %s'%`key`
 
     # Get axis data
     def getData(self):
@@ -1653,7 +1589,7 @@ class Axis(AbstractAxis):
     # Handle slices of the form x[i:j]
     def __getslice__(self, low, high):
         if self._data_ is None:
-            self._data_=self.getData()
+            self._data_ = self.getData()
         return self._data_[low:high]
 
     def __len__(self):
@@ -1661,7 +1597,7 @@ class Axis(AbstractAxis):
 
     # Return true iff the axis representation is linear
     def isLinear(self):
-        return self._node_.dataRepresent == cdmsNode.CdLinear
+        return self._node_.dataRepresent==cdmsNode.CdLinear
 
     # Return the bounds array, or generate a default if autoBounds mode is on
     def getBounds(self, isGeneric=None):
@@ -1686,26 +1622,26 @@ class Axis(AbstractAxis):
 
     # Return the bounds array, or None
     def getExplicitBounds(self):
-        boundsArray=None
-        if hasattr(self, 'bounds'):
-            boundsName=self.bounds
+        boundsArray = None
+        if hasattr(self,'bounds'):
+            boundsName = self.bounds
             try:
-                boundsVar=self.parent.variables[boundsName]
-                boundsArray=numpy.ma.filled(boundsVar.getSlice())
+                boundsVar = self.parent.variables[boundsName]
+                boundsArray = numpy.ma.filled(boundsVar.getSlice())
             except KeyError:
-                boundsArray=None
+                boundsArray = None
 
         return boundsArray
 
     def getCalendar(self):
-        if hasattr(self, 'calendar'):
-            calendar=self.calendar.lower()
+        if hasattr(self,'calendar'):
+            calendar = string.lower(self.calendar)
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar=self.parent.calendar.lower()
+            calendar = string.lower(self.parent.calendar)
         else:
-            calendar=None
+            calendar = None
 
-        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
 # In-memory coordinate axis
@@ -1717,38 +1653,38 @@ class TransientAxis(AbstractAxis):
         '''
         AbstractAxis.__init__(self, None, None)
         if id is None:
-            TransientAxis.axis_count=TransientAxis.axis_count + 1
-            id='axis_' + str(TransientAxis.axis_count)
+            TransientAxis.axis_count = TransientAxis.axis_count + 1
+            id = 'axis_' + str(TransientAxis.axis_count)
         if attributes is None:
-            if hasattr(data, 'attributes'): attributes=data.attributes
+            if hasattr(data, 'attributes'): attributes = data.attributes
         if attributes is not None:
             for name, value in attributes.items():
                 if name not in ['missing_value', 'name']:
                     setattr(self, name, value)
-        self.id=id
+        self.id = id
         if isinstance(data, AbstractAxis):
             if copy == 0:
-                self._data_=data[:]
+                self._data_ = data[:]
             else:
-                self._data_=numpy.array(data[:])
+                self._data_ = numpy.array(data[:])
         elif isinstance(data, numpy.ndarray):
             if copy == 0:
-                self._data_=data
+                self._data_ = data
             else:
-                self._data_=numpy.array(data)
+                self._data_ = numpy.array(data)
         elif isinstance(data, numpy.ma.MaskedArray):
             if numpy.ma.getmask(data) is not numpy.ma.nomask:
                 raise CDMSError, \
                       'Cannot construct an axis with a missing value.'
-            data=data.data
+            data = data.data
             if copy == 0:
-                self._data_=data
+                self._data_ = data
             else:
-                self._data_=numpy.array(data)
+                self._data_ = numpy.array(data)
         elif data is None:
-            self._data_=None
+            self._data_ = None
         else:
-            self._data_=numpy.array(data)
+            self._data_ = numpy.array(data)
 
         self._doubledata_ = None
         self._genericBounds_ = genericBounds
@@ -1761,10 +1697,10 @@ class TransientAxis(AbstractAxis):
         return self._data_[low:high]
 
     def __setitem__(self, index, value):
-        self._data_[index]=numpy.ma.filled(value)
+        self._data_[index] = numpy.ma.filled(value)
 
     def __setslice__(self, low, high, value):
-        self._data_[low:high]=numpy.ma.filled(value)
+        self._data_[low:high] = numpy.ma.filled(value)
 
     def __len__(self):
         return len(self._data_)
@@ -1799,22 +1735,19 @@ class TransientAxis(AbstractAxis):
     def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if bounds is not None:
             if isinstance(bounds, numpy.ma.MaskedArray):
-                bounds=numpy.ma.filled(bounds)
+                bounds = numpy.ma.filled(bounds)
             if validate:
-                bounds=self.validateBounds(bounds)
+                bounds = self.validateBounds(bounds)
             else:                       # Just do the absolute minimum validation
-                requiredShape=(len(self), 2)
-                requiredShape2=(len(self) + 1,)
-                if bounds.shape != requiredShape and bounds.shape != requiredShape2:
+                requiredShape = (len(self),2)
+                requiredShape2 = (len(self)+1,)
+                if bounds.shape!=requiredShape and bounds.shape!=requiredShape2:
                     raise CDMSError, InvalidBoundsArray + \
-                          'shape is %s, should be %s or %s' % (
-    `bounds.shape`,
-     `requiredShape`,
-     `requiredShape2`)
-                if bounds.shape == requiredShape2:  # case of "n+1" bounds
+                          'shape is %s, should be %s or %s'%(`bounds.shape`,`requiredShape`,`requiredShape2`)
+                if bounds.shape==requiredShape2: # case of "n+1" bounds
                     bounds2=numpy.zeros(requiredShape)
-                    bounds2[:, 0]=bounds[:-1]
-                    bounds2[:, 1]=bounds[1::]
+                    bounds2[:,0]=bounds[:-1]
+                    bounds2[:,1]=bounds[1::]
                     bounds=bounds2
             self._bounds_ = copy.copy(bounds)
             self._genericBounds_ = isGeneric
@@ -1823,7 +1756,7 @@ class TransientAxis(AbstractAxis):
                 self._bounds_ = self.genGenericBounds()
                 self._genericBounds_ = True
             else:
-                self._bounds_=None
+                self._bounds_ = None
 
     def isLinear(self):
         return 0
@@ -1838,17 +1771,17 @@ class TransientVirtualAxis(TransientAxis):
 
     def __init__(self, axisname, axislen):
         TransientAxis.__init__(self, None, id=axisname)
-        self._virtualLength=axislen  # length of the axis
+        self._virtualLength = axislen # length of the axis
 
     def __len__(self):
         return self._virtualLength
 
-    def __str__(self):
-        return "<TransientVirtualAxis %s(%d)>" % (self.id, self._virtualLength)
+    def __str__ (self):
+        return "<TransientVirtualAxis %s(%d)>"%(self.id, self._virtualLength)
 
-    __repr__=__str__
+    __repr__ = __str__
 
-    def clone(self, copyData=1):
+    def clone (self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient virtual axis.
         If copyData is 1, make a separate copy of the data."""
@@ -1866,7 +1799,7 @@ class TransientVirtualAxis(TransientAxis):
 
     def setBounds(self, bounds, isGeneric=False):
         "No boundaries on virtual axes"
-        self._bounds_=None
+        self._bounds_ = None
 
     def __getitem__(self, key):
         return self.getData()[key]
@@ -1874,84 +1807,79 @@ class TransientVirtualAxis(TransientAxis):
     def __getslice__(self, low, high):
         return self.getData()[low:high]
 
-# PropertiedClasses.initialize_property_class (TransientVirtualAxis)
+## PropertiedClasses.initialize_property_class (TransientVirtualAxis)
 
 # One-dimensional coordinate axis in a CdmsFile.
 class FileAxis(AbstractAxis):
-
+    
     def __init__(self, parent, axisname, obj=None):
-        AbstractAxis.__init__(self, parent, None)
-        val=self.__cdms_internals__ + ['name_in_file', ]
-        self.___cdms_internals__=val
-        self.id=axisname
-        self._obj_=obj
+        AbstractAxis.__init__ (self, parent, None)
+        val = self.__cdms_internals__ +['name_in_file',]
+        self.___cdms_internals__ = val
+        self.id = axisname
+        self._obj_ = obj
         # Overshadows file boundary data, if not None
-        self._boundsArray_=None
-        (units, typecode, name_in_file, parent_varname,
-         dimtype, ncid)=parent._file_.dimensioninfo[axisname]
-        self.__dict__['_units']=units
-        att=self.attributes
+        self._boundsArray_ = None
+        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
+                   parent._file_.dimensioninfo[axisname]
+        self.__dict__['_units'] = units
+        att = self.attributes
         att['units']=units
-        self.attributes=att
-        self.name_in_file=self.id
+        self.attributes = att
+        self.name_in_file = self.id
         if name_in_file:
-            self.name_in_file=name_in_file
+            self.name_in_file = name_in_file
         # Combine the attributes of the variable object, if any
         if obj is not None:
             for attname in self._obj_.__dict__.keys():
-                attval=getattr(self._obj_, attname)
-                if not callable(attval):
-                    self.__dict__[attname]=attval
-                    att=self.attributes
+                attval = getattr(self._obj_,attname)
+                if type(attval)!=types.BuiltinFunctionType:
+                    self.__dict__[attname]  = attval
+                    att = self.attributes
                     att[attname]=attval
-                    self.attributes=att
-
+                    self.attributes= att
+        
     def getData(self):
-        if cdmsobj._debug == 1:
-            print 'Getting array for axis', self.id
+        if cdmsobj._debug==1:
+            print 'Getting array for axis',self.id
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         try:
-            result=self.parent._file_.readDimension(self.id)
+            result = self.parent._file_.readDimension(self.id)
         except:
             try:
-                result=apply(self._obj_.getitem, (slice(None, None),))
+                result = apply(self._obj_.getitem, (slice(None,None),))
             except:
-                raise CDMSError, 'Data for dimension %s not found' % self.id
+                raise CDMSError,'Data for dimension %s not found'%self.id
         return result
 
     def typecode(self):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-        (units, typecode, name_in_file, parent_varname,
-         dimtype, ncid)=self.parent._file_.dimensioninfo[self.id]
+        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
+                             self.parent._file_.dimensioninfo[self.id]
         return typecode
-
+    
     def _setunits(self, value):
-        self._units=value
+        self._units = value
         self.attributes['units']=value
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         setattr(self._obj_, 'units', value)
-        (units, typecode, name_in_file, parent_varname,
-         dimtype, ncid)=self.parent._file_.dimensioninfo[self.id]
-        self.parent._file_.dimensioninfo[self.id]=(
-    value,
-     typecode,
-     name_in_file,
-     parent_varname,
-     dimtype,
-     ncid)
+        (units,typecode,name_in_file,parent_varname,dimtype,ncid) = \
+            self.parent._file_.dimensioninfo[self.id]
+        self.parent._file_.dimensioninfo[self.id] = \
+                  (value,typecode,name_in_file,parent_varname,dimtype,ncid)
     def _getunits(self):
         return self._units
 
     def _delunits(self):
         del(self._units)
         del(self.attributes['units'])
-        delattr(self._obj_, 'units')
+        delattr(self._obj_,'units')
 
 
-    def __getattr__(self, name):
+    def __getattr__(self,name):
         if name == 'units':
             return self._units
         try:
@@ -1959,27 +1887,27 @@ class FileAxis(AbstractAxis):
         except:
             raise AttributeError
     # setattr writes external attributes to the file
-    def __setattr__(self, name, value):
+    def __setattr__ (self, name, value):
         if name == 'units':
             self._setunits(value)
             return
         if hasattr(self, 'parent') and self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-# s = self.get_property_s (name)
-# if s is not None:
-# s(self, name, value)
-# return
-        if not name in self.__cdms_internals__ and name[0] != '_':
+##         s = self.get_property_s (name)
+##         if s is not None:
+##             s(self, name, value)
+##             return
+        if not name in self.__cdms_internals__ and name[0]!='_':
             setattr(self._obj_, name, value)
             self.attributes[name]=value
-        self.__dict__[name]=value
+        self.__dict__[name]  = value
 
     # delattr deletes external global attributes in the file
     def __delattr__(self, name):
-# d = self.get_property_d(name)
-# if d is not None:
-# d(self, name)
-# return
+##         d = self.get_property_d(name)
+##         if d is not None:
+##             d(self, name)
+##             return
         if name == "units":
             self._delunits()
             return
@@ -1994,84 +1922,83 @@ class FileAxis(AbstractAxis):
 
     # Read data
     # If the axis has a related Cdunif variable object, just read that variable
-    # otherwise, cache the Cdunif (read-only) data values in self._data_. in this case,
-    # the axis is not extensible, so it is not necessary to reread it each
-    # time.
+    # otherwise, cache the Cdunif (read-only) data values in self._data_. in this case, 
+    # the axis is not extensible, so it is not necessary to reread it each time.
     def __getitem__(self, key):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         # See __getslice__ comment below.
-        if (self._obj_ is not None) and (self.parent._mode_ != 'r') and not (hasattr(self.parent, 'format') and self.parent.format == "DRS"):
+        if (self._obj_ is not None) and (self.parent._mode_!='r') and not (hasattr(self.parent,'format') and self.parent.format=="DRS"):
             # For negative strides, get the equivalent slice with positive stride,
             # then reverse the result.
-            if (isinstance(key, slice)) and (key.step is not None) and key.step < 0:
-                posslice=reverseSlice(key, len(self))
-                result=apply(self._obj_.getitem, (posslice,))
+            if (type(key) is types.SliceType) and (key.step is not None) and key.step<0:
+                posslice = reverseSlice(key,len(self))
+                result = apply(self._obj_.getitem, (posslice,))
                 return result[::-1]
             else:
-                if isinstance(key, int) and key >= len(self):
-                    raise IndexError, 'Index out of bounds: %d' % key
-                if not isinstance(key, tuple):
-                    key=(key,)
+                if isinstance(key, types.IntType) and key>=len(self):
+                    raise IndexError, 'Index out of bounds: %d'%key
+                if type(key) is not types.TupleType:
+                    key = (key,)
                 return apply(self._obj_.getitem, key)
         if self._data_ is None:
-            self._data_=self.getData()
-        length=len(self._data_)
-        if isinstance(key, int):  # x[i]
-            if key >= length:
+            self._data_ = self.getData()
+        length = len(self._data_)
+        if isinstance(key, types.IntType):  # x[i]
+            if key>=length:
                 raise IndexError, 'index out of bounds'
             else:
-                return self._data_[key % length]
-        elif isinstance(key, slice):  # x[i:j:k]
+                return self._data_[key%length]
+        elif type(key) is types.SliceType: # x[i:j:k]
             return self._data_[key.start:key.stop:key.step]
-        elif isinstance(key, Ellipsis.__class__):  # x[...]
+        elif type(key) is types.EllipsisType: # x[...]
             return self._data_
-        elif isinstance(key, tuple):
-            raise IndexError, 'axis is one-dimensional'
+        elif type(key) is types.TupleType:
+            raise IndexError,'axis is one-dimensional'
         else:
-            raise IndexError, 'index must be an integer or slice: %s' % `key`
+            raise IndexError,'index must be an integer or slice: %s'%`key`
 
     def __getslice__(self, low, high):
         # Hack to prevent netCDF overflow error on 64-bit architectures
-        high=min(Max32int, high)
-
+        high = min(Max32int, high)
+        
         # Hack to fix a DRS bug: force use of readDimension for DRS axes.
         # Since DRS is read-only here, it is OK just to cache all dimensions.
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
-        if (self._obj_ is not None) and (self.parent._mode_ != 'r') and not (hasattr(self.parent, 'format') and self.parent.format == "DRS"):
-            return apply(self._obj_.getslice, (low, high))
+        if (self._obj_ is not None) and (self.parent._mode_!='r') and not (hasattr(self.parent,'format') and self.parent.format=="DRS"):
+            return apply(self._obj_.getslice,(low,high))
         else:
             if self._data_ is None:
-                self._data_=self.getData()
+                self._data_ = self.getData()
             return self._data_[low:high]
 
     def __setitem__(self, index, value):
         if self._obj_ is None:
             raise CDMSError, ReadOnlyAxis + self.id
         if self.parent is None:
-            raise CDMSError, FileWasClosed + self.id
-        return apply(self._obj_.setitem, (index, numpy.ma.filled(value)))
+            raise CDMSError, FileWasClosed+self.id
+        return apply(self._obj_.setitem,(index,numpy.ma.filled(value)))
 
     def __setslice__(self, low, high, value):
         # Hack to prevent netCDF overflow error on 64-bit architectures
-        high=min(Max32int, high)
+        high = min(Max32int, high)
         if self._obj_ is None:
             raise CDMSError, ReadOnlyAxis + self.id
         if self.parent is None:
-            raise CDMSError, FileWasClosed + self.id
-        return apply(self._obj_.setslice, (low, high, numpy.ma.filled(value)))
+            raise CDMSError, FileWasClosed+self.id
+        return apply(self._obj_.setslice,(low,high,numpy.ma.filled(value)))
 
     def __len__(self):
         if self.parent is None:
             raise CDMSError, FileWasClosed + self.id
         if self._obj_ is not None:
-            length=len(self._obj_)
+            length = len(self._obj_)
         elif self._data_ is None:
-            self._data_=self.getData()
-            length=len(self._data_)
+            self._data_ = self.getData()
+            length = len(self._data_)
         else:
-            length=len(self._data_)
+            length = len(self._data_)
         return length
 
     def isLinear(self):
@@ -2098,18 +2025,18 @@ class FileAxis(AbstractAxis):
     # Return the bounds array, or None
     def getExplicitBounds(self):
         if self._boundsArray_ is None:
-            boundsArray=None
-            if hasattr(self, 'bounds'):
-                boundsName=self.bounds
+            boundsArray = None
+            if hasattr(self,'bounds'):
+                boundsName = self.bounds
                 try:
-                    boundsVar=self.parent[boundsName]
-                    boundsArray=numpy.ma.filled(boundsVar)
-                    self._boundsArray_=boundsArray  # for climatology performance
-                except KeyError, err:
+                    boundsVar = self.parent[boundsName]
+                    boundsArray = numpy.ma.filled(boundsVar)
+                    self._boundsArray_ = boundsArray  # for climatology performance
+                except KeyError,err:
                     print err
-                    boundsArray=None
+                    boundsArray = None
         else:
-            boundsArray=self._boundsArray_
+            boundsArray = self._boundsArray_
 
         return boundsArray
 
@@ -2125,58 +2052,54 @@ class FileAxis(AbstractAxis):
         if persistent:
             if index is None:
                 if validate:
-                    bounds=self.validateBounds(bounds)
-                index=0
+                    bounds = self.validateBounds(bounds)
+                index = 0
 
             # Create the bound axis, if necessary
-            file=self.parent
+            file = self.parent
             if file._boundAxis_ is None:
 
                 # First look for 'bound' of length two
-                if file.axes.has_key("bound") and len(file.axes["bound"]) == 2:
-                    file._boundAxis_=file.axes["bound"]
+                if file.axes.has_key("bound") and len(file.axes["bound"])==2:
+                    file._boundAxis_ = file.axes["bound"]
                 else:
-                    file._boundAxis_=file.createVirtualAxis("bound", 2)
+                    file._boundAxis_ = file.createVirtualAxis("bound",2)
 
             # Create the boundary variable if necessary
-            if hasattr(self, 'bounds'):
-                boundName=self.bounds
-                boundVar=file.variables[boundName]
+            if hasattr(self,'bounds'):
+                boundName = self.bounds
+                boundVar = file.variables[boundName]
             else:
                 if boundsid is None:
-                    boundName="bounds_" + self.id
+                    boundName = "bounds_"+self.id
                 else:
-                    boundName=boundsid
-                boundVar=file.createVariable(
-    boundName,
-     cdmsNode.NumericToCdType.get(bounds.dtype.char),
-     (self,
-     file._boundAxis_))
+                    boundName = boundsid
+                boundVar = file.createVariable(boundName, cdmsNode.NumericToCdType.get(bounds.dtype.char), (self,file._boundAxis_))
                 # And link to self
-                self.bounds=boundName
-                self._boundsArray_=None
+                self.bounds = boundName
+                self._boundsArray_ = None
 
-            boundVar[index:index + len(bounds)]=bounds
+            boundVar[index:index+len(bounds)] = bounds
 
         else:
-            self._boundsArray_=copy.copy(bounds)
+            self._boundsArray_ = copy.copy(bounds)
 
     def getCalendar(self):
-        if hasattr(self, 'calendar'):
-            calendar=self.calendar.lower()
+        if hasattr(self,'calendar'):
+            calendar = string.lower(self.calendar)
         elif self.parent is not None and hasattr(self.parent, 'calendar'):
-            calendar=self.parent.calendar.lower()
+            calendar = string.lower(self.parent.calendar)
         else:
-            calendar=None
+            calendar = None
 
-        cdcal=tagToCalendar.get(calendar, cdtime.DefaultCalendar)
+        cdcal = tagToCalendar.get(calendar, cdtime.DefaultCalendar)
         return cdcal
 
     def isVirtual(self):
         "Return true iff coordinate values are implicitly defined."
 
         # No virtual axes in GrADS files
-        if self.parent is not None and hasattr(self.parent, 'format') and self.parent.format == 'GRADS':
+        if self.parent is not None and hasattr(self.parent, 'format') and self.parent.format=='GRADS':
             return 0
         return (self._obj_ is None)
 
@@ -2186,11 +2109,11 @@ class FileAxis(AbstractAxis):
             return (self.parent._file_.dimensions[self.id] is None)
         else:
             return False
-# PropertiedClasses.set_property (FileAxis, 'units',
-# acts=FileAxis._setunits,
-# nodelete=1
-# )
-# internattr.add_internal_attribute(FileAxis, 'name_in_file')
+## PropertiedClasses.set_property (FileAxis, 'units', 
+##                                 acts=FileAxis._setunits,
+##                                 nodelete=1
+##                                )
+## internattr.add_internal_attribute(FileAxis, 'name_in_file')
 
 class FileVirtualAxis(FileAxis):
     """An axis with no explicit representation of data values in the file.
@@ -2202,8 +2125,8 @@ class FileVirtualAxis(FileAxis):
 
     def __init__(self, parent, axisname, axislen):
         FileAxis.__init__(self, parent, axisname)
-        self._virtualLength=axislen  # length of the axis
-
+        self._virtualLength = axislen # length of the axis
+        
     def __len__(self):
         return self._virtualLength
 
@@ -2214,100 +2137,100 @@ class FileVirtualAxis(FileAxis):
         "Return true iff coordinate values are implicitly defined."
         return 1
 
-# PropertiedClasses.initialize_property_class (FileVirtualAxis)
+## PropertiedClasses.initialize_property_class (FileVirtualAxis)
 
-# Functions for selecting axes
-def axisMatchAxis(axes, specifications=None, omit=None, order=None):
-    """Given a list of axes and a specification or list of
+######## Functions for selecting axes
+def axisMatchAxis (axes, specifications=None, omit=None, order=None):
+    """Given a list of axes and a specification or list of 
      specificatons, and a specification or list of specifications
-     of those axes to omit, return a list of
-     those axes in the list that match the specification but
-     do not include in the list any axes that matches an omit
+     of those axes to omit, return a list of 
+     those axes in the list that match the specification but 
+     do not include in the list any axes that matches an omit 
      specification.
 
      If specifications is None, include all axes less the omitted ones.
 
-     Individual specifications must be integer indices into axes or
+     Individual specifications must be integer indices into axes or 
      matching criteria as detailed in axisMatches.
 
      Axes are returned in the order they occur in the axes argument unless
-     order is given.
+     order is given. 
 
-     order can be a string containing the symbols t,x,y,z, or -.
-     If a - is given, any elements of the result not chosen otherwise are
+     order can be a string containing the symbols t,x,y,z, or -. 
+     If a - is given, any elements of the result not chosen otherwise are 
      filled in from left to right with remaining candidates.
     """
     return [axes[i] for i in \
             axisMatchIndex(axes, specifications, omit, order)]
 
-def axisMatchIndex(axes, specifications=None, omit=None, order=None):
-    """Given a list of axes and a specification or list of
+def axisMatchIndex (axes, specifications=None, omit=None, order=None):
+    """Given a list of axes and a specification or list of 
      specificatons, and a specification or list of specifications
-     of those axes to omit, return a list of the indices of
-     those axes in the list that match the specification but
-     do not include in the list any axes that matches an omit
+     of those axes to omit, return a list of the indices of 
+     those axes in the list that match the specification but 
+     do not include in the list any axes that matches an omit 
      specification.
 
      If specifications is None, include all axes less the omitted ones.
 
-     Individual specifications must be integer indices into axes or
+     Individual specifications must be integer indices into axes or 
      matching criteria as detailed in axisMatches.
 
      The indices of axes are returned in the order the axes
      occur in the axes argument, unless order is given.
 
-     order can be a string containing the symbols t,x,y,z, or -.
-     If a - is given, any elements of the result not chosen otherwise are
+     order can be a string containing the symbols t,x,y,z, or -. 
+     If a - is given, any elements of the result not chosen otherwise are 
      filled in from left to right with remaining candidates.
     """
     if specifications is None:
-        speclist=axes
-    elif isinstance(specifications, basestring):
-        speclist=[specifications]
-    elif isinstance(specifications, list):
-        speclist=specifications
-    elif isinstance(specifications, tuple):
+        speclist = axes
+    elif isinstance(specifications, types.StringType):
+        speclist = [specifications]
+    elif isinstance(specifications, types.ListType):
+        speclist = specifications
+    elif isinstance(specifications, types.TupleType):
         speclist=list(specifications)
-    elif isinstance(specifications, int):
-        speclist=[specifications]
-    elif callable(specifications):
-        speclist=[specifications]
-    else:  # to allow arange, etc.
-        speclist=list(numpy.ma.filled(specifications))
-
-    candidates=[]
+    elif isinstance(specifications, types.IntType):
+        speclist = [specifications]
+    elif isinstance(specifications, types.FunctionType):
+        speclist = [specifications]
+    else: # to allow arange, etc.
+        speclist = list(numpy.ma.filled(specifications))
+
+    candidates = []
     for i in range(len(axes)):
         for s in speclist:
-            if isinstance(s, int):
-                r=(s == i)
+            if isinstance(s, types.IntType):
+                r = (s == i)
             else:
-                r=axisMatches(axes[i], s)
+                r = axisMatches(axes[i], s)
             if r:
                 candidates.append(i)
                 break
 
     if not candidates:
-        return candidates  # list empty
+        return candidates   #list empty
 
     if omit is None:
-        omitlist=[]
-    elif isinstance(omit, basestring):
-        omitlist=[omit]
-    elif isinstance(omit, list):
-        omitlist=omit
-    elif isinstance(omit, tuple):
+        omitlist = []
+    elif isinstance(omit, types.StringType):
+        omitlist = [omit]
+    elif isinstance(omit, types.ListType):
+        omitlist = omit
+    elif isinstance(omit, types.TupleType):
         omitlist=list(omit)
-    elif isinstance(omit, int):
-        omitlist=[omit]
-    elif callable(omit):
-        omitlist=[omit]
+    elif isinstance(omit, types.IntType):
+        omitlist = [omit]
+    elif isinstance(omit, types.FunctionType):
+        omitlist = [omit]
     elif isinstance(omit, AbstractAxis):
-        omitlist=[omit]
+        omitlist = [omit]
     else:
         raise CDMSError, 'Unknown type of omit specifier.'
 
     for s in omitlist:
-        if isinstance(s, int):
+        if isinstance(s, types.IntType):
             for i in range(len(candidates)):
                 if axes[candidates[i]] is axes[s]:
                     del candidates[i]
@@ -2315,11 +2238,11 @@ def axisMatchIndex(axes, specifications=None, omit=None, order=None):
         elif isinstance(s, AbstractAxis):
             for i in range(len(candidates)):
                 if s is axes[candidates[i]]:
-                    del candidates[i]
+                    del candidates[i] 
                     break
         else:
             for i in range(len(candidates)):
-                r=axisMatches(axes[candidates[i]], s)
+                r = axisMatches(axes[candidates[i]], s)
                 if r:
                     del candidates[i]
                     break
@@ -2327,31 +2250,31 @@ def axisMatchIndex(axes, specifications=None, omit=None, order=None):
     if order is None:
         return candidates
 
-    n=len(candidates)
-    m=len(order)
-    result=[None] * n
+    n = len(candidates)
+    m = len(order)
+    result = [None]*n
 # this loop is done this way for future escapes where elements of order
 # are not single chars.
-    j=0
-    io=0
+    j = 0
+    io = 0
     while j < n:
         if j >= m or order[io] == '-':
-            result[j]=candidates[0]
+            result[j] = candidates[0]
             del candidates[0]
             j += 1
             io += 1
             continue
         elif order[j] == 't':
-            oj='time'
+            oj = 'time'
             io += 1
         elif order[j] == 'x':
-            oj='longitude'
+            oj = 'longitude'
             io += 1
         elif order[j] == 'y':
-            oj='latitude'
+            oj = 'latitude'
             io += 1
         elif order[j] == 'z':
-            oj='level'
+            oj = 'level'
             io += 1
         else:
             # later could put in escaped ids or indices
@@ -2359,21 +2282,21 @@ def axisMatchIndex(axes, specifications=None, omit=None, order=None):
 
         for i in range(n):
             if axisMatches(axes[candidates[i]], oj):
-                result[j]=candidates[i]
+                result[j] = candidates[i]
                 del candidates[i]
                 break
         else:
             raise CDMSError, "Axis requested in order specification not there"
-        j += 1
+        j += 1    
     return result
-
+            
 
 def axisMatches(axis, specification):
     """Return 1 or 0 depending on whether axis matches the specification.
        Specification must be one of:
-       1. a string representing an axis id or one of
-          the keywords time, fctau0, latitude or lat, longitude or lon, or
-          lev or level.
+       1. a string representing an axis id or one of 
+          the keywords time, fctau0, latitude or lat, longitude or lon, or 
+          lev or level. 
 
           axis may be surrounded with parentheses or spaces.
 
@@ -2385,20 +2308,20 @@ def axisMatches(axis, specification):
           if the value returned is true, the axis matches.
 
        3. an axis object; will match if it is the same object as axis.
-    """
+    """   
     if isinstance(specification, basestring):
-        s=specification.lower()
-        s=s.strip()
+        s = string.lower(specification)
+        s = s.strip()
         while s[0] == '(':
             if s[-1] != ')':
                 raise CDMSError, 'Malformed axis spec, ' + specification
-            s=s[1:-1].strip()
-        if axis.id.lower() == s:
+            s = s[1:-1].strip()
+        if string.lower(axis.id) == s:
             return 1
         elif (s == 'time') or (s in time_aliases):
-            return axis.isTime()
+            return axis.isTime() 
         elif (s == 'fctau0') or (s in forecast_aliases):
-            return axis.isForecast()
+            return axis.isForecast() 
         elif (s[0:3] == 'lat') or (s in latitude_aliases):
             return axis.isLatitude()
         elif (s[0:3] == 'lon') or (s in longitude_aliases):
@@ -2408,11 +2331,11 @@ def axisMatches(axis, specification):
         else:
             return 0
 
-    elif callable(specification):
-        r=specification(axis)
-        if r:
+    elif isinstance(specification, types.FunctionType):
+        r = specification(axis)
+        if r: 
             return 1
-        else:
+        else: 
             return 0
 
     elif isinstance(specification, AbstractAxis):
@@ -2420,26 +2343,26 @@ def axisMatches(axis, specification):
 
     raise CDMSError, "Specification not acceptable: "\
           + str(type(specification)) + ', ' + str(specification)
-
+    
 def concatenate(axes, id=None, attributes=None):
     """Concatenate the axes, return a transient axis."""
-
-    data=numpy.ma.concatenate([ax[:] for ax in axes])
-    boundsArray=[ax.getBounds() for ax in axes]
+    
+    data = numpy.ma.concatenate([ax[:] for ax in axes])
+    boundsArray = [ax.getBounds() for ax in axes]
     if None in boundsArray:
-        bounds=None
+        bounds = None
     else:
-        bounds=numpy.ma.concatenate(boundsArray)
+        bounds = numpy.ma.concatenate(boundsArray)
     return TransientAxis(data, bounds=bounds, id=id, attributes=attributes)
 
 def take(ax, indices):
     """Take values indicated by indices list, return a transient axis."""
 
     # Bug in ma compatibility module
-    data=numpy.ma.take(ax[:], indices)
-    abounds=ax.getBounds()
+    data = numpy.ma.take(ax[:], indices)
+    abounds = ax.getBounds()
     if abounds is not None:
-        bounds=numpy.ma.take(abounds, indices, axis=0)
+        bounds = numpy.ma.take(abounds, indices, axis=0)
     else:
-        bounds=None
+        bounds = None
     return TransientAxis(data, bounds=bounds, id=ax.id, attributes=ax.attributes)
diff --git a/Packages/cdms2/Lib/bindex.py b/Packages/cdms2/Lib/bindex.py
index 913d6fdbd..9156c9709 100644
--- a/Packages/cdms2/Lib/bindex.py
+++ b/Packages/cdms2/Lib/bindex.py
@@ -1,11 +1,9 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """Bin index for non-rectilinear grids"""
 
-import _bindex
-import numpy
-
+import _bindex, numpy 
 
 def bindexHorizontalGrid(latlin, lonlin):
     """Create a bin index for a horizontal grid.
@@ -14,16 +12,14 @@ def bindexHorizontalGrid(latlin, lonlin):
 
     Returns the index.
     """
-    lonlin = numpy.mod(lonlin, 360)
-    NI, NJ = _bindex.getLens()
-    head = numpy.zeros(NI * NJ, dtype='l')
-                       # This should match NBINI, NBINJ in bindex.c
-    next = numpy.zeros(len(latlin), dtype='l')
+    lonlin = numpy.mod(lonlin,360)
+    NI,NJ = _bindex.getLens()
+    head = numpy.zeros(NI*NJ,dtype='l')       # This should match NBINI, NBINJ in bindex.c
+    next = numpy.zeros(len(latlin),dtype='l')
     _bindex.bindex(latlin, lonlin, head, next)
-
+    
     return (head, next)
 
-
 def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
     """Intersect a horizontal grid with a lat-lon region.
 
@@ -35,7 +31,7 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
     Returns an array of indices, in latlin/lonlin, of the points in
     the intersection.
     """
-    points = numpy.zeros(len(latlin), dtype='l')
+    points = numpy.zeros(len(latlin),dtype='l')
     if latspecs is None:
         slat = -90.0
         elat = 90.0
@@ -45,14 +41,14 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
         elat = latspecs[1]
         latopt = latspecs[2]
 
-    if slat > elat:
+    if slat>elat:
         tmp = slat
         slat = elat
         elat = tmp
 
     # If the longitude range is >=360.0, just intersect with the full range.
     # Otherwise, the points array could overflow and generate a seg fault.
-    if lonspecs is None or abs(lonspecs[1] - lonspecs[0]) >= 360.0:
+    if lonspecs is None or abs(lonspecs[1]-lonspecs[0])>=360.0:
         slon = 0.0
         elon = 360.0
         lonopt = 'co'
@@ -61,22 +57,11 @@ def intersectHorizontalGrid(latspecs, lonspecs, latlin, lonlin, index):
         elon = lonspecs[1]
         lonopt = lonspecs[2]
 
-    if slon > elon:
+    if slon>elon:
         tmp = slon
         slon = elon
         elon = tmp
 
-    npoints = _bindex.intersect(
-        slat,
-        slon,
-     elat,
-     elon,
-     latlin,
-     lonlin,
-     index[0],
-     index[1],
-     points,
-     latopt,
-     lonopt)
+    npoints = _bindex.intersect(slat, slon, elat, elon, latlin, lonlin, index[0], index[1], points, latopt, lonopt)
 
     return points[:npoints]
diff --git a/Packages/cdms2/Lib/cache.py b/Packages/cdms2/Lib/cache.py
index 12163fe94..c7456b503 100644
--- a/Packages/cdms2/Lib/cache.py
+++ b/Packages/cdms2/Lib/cache.py
@@ -1,16 +1,8 @@
 """
 CDMS cache management and file movement objects
 """
-import cdurllib
-import urlparse
-import tempfile
-import os
-import time
-import cdmsobj
-import sys
-import errno
-import shelve
-from .error import CDMSError
+import cdurllib, urlparse, tempfile, os, time, cdmsobj, sys, errno, shelve
+from error import CDMSError
 MethodNotImplemented = "Method not yet implemented"
 SchemeNotSupported = "Scheme not supported: "
 LockError = "Lock error:"
@@ -22,7 +14,6 @@ _lock_max_tries = 10                    # Number of tries for a lock
 _lock_naptime = 1                       # Seconds between lock tries
 _cache_tempdir = None                   # Default temporary directory
 
-
 def lock(filename):
     """
     Acquire a file-based lock with the given name.
@@ -41,50 +32,48 @@ def lock(filename):
     while (not success) and (tries < _lock_max_tries):
         try:
             if cdmsobj._debug:
-                print 'Process %d: Trying to acquire lock %s' % (os.getpid(), path)
-            fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0o666)
+                print 'Process %d: Trying to acquire lock %s'%(os.getpid(),path)
+            fd = os.open(path, os.O_CREAT | os.O_WRONLY | os.O_EXCL, 0666)
 
         # If the open failed because the file already exists, keep trying, otherwise
         # reraise the error
         except OSError:
-            if sys.exc_value.errno != errno.EEXIST:
+            if sys.exc_value.errno!=errno.EEXIST:
                 raise
             tries = tries + 1
         else:
             if cdmsobj._debug:
-                print 'Process %d: Acquired lock %s after %d tries' % (os.getpid(), path, tries)
+                print 'Process %d: Acquired lock %s after %d tries'%(os.getpid(),path,tries)
             success = 1
             break
 
         # Sleep until next retry
         if cdmsobj._debug:
-            print 'Process %d: Failed to acquire lock %s, sleeping' % (os.getpid(), path)
+            print 'Process %d: Failed to acquire lock %s, sleeping'%(os.getpid(),path)
         time.sleep(_lock_naptime)
 
     # Error if the lock could not be acquired
     if not success:
-        raise CDMSError(LockError + 'Could not acquire a lock on %s' % path)
+        raise CDMSError, LockError + 'Could not acquire a lock on %s'%path
 
     # The lock succeeded, so just close the file - we don't need to write
     # anything here
     else:
         os.close(fd)
 
-
 def unlock(filename):
     """
     Delete a file-based lock with the given name.
-    Usage:unlock(filename)
+    Usage:unlock(filename) 
     If the function returns, the lock was successfully deleted.
     Note: This function is UNIX-specific.
     """
 
     path = lockpath(filename)
     if cdmsobj._debug:
-        print 'Process %d: Unlocking %s' % (os.getpid(), path)
+        print 'Process %d: Unlocking %s'%(os.getpid(),path)
     os.unlink(path)
 
-
 def lockpath(filename):
     """
     Generate the pathname of a lock. Creates the directory containing the lock
@@ -95,12 +84,12 @@ def lockpath(filename):
 
     if not _cache_tempdir:
         tempfile.mktemp()
-        _cache_tempdir = os.path.join(tempfile.tempdir, 'cdms')
+        _cache_tempdir = os.path.join(tempfile.tempdir,'cdms')
         if not os.path.isdir(_cache_tempdir):
             if cdmsobj._debug:
-                print 'Process %d: Creating cache directory %s' % (os.getpid(), _cache_tempdir)
-            os.mkdir(_cache_tempdir, 0o777)
-    return os.path.join(_cache_tempdir, filename)
+                print 'Process %d: Creating cache directory %s'%(os.getpid(),_cache_tempdir)
+            os.mkdir(_cache_tempdir,0777)
+    return os.path.join(_cache_tempdir,filename)
 
 _useWindow = 0                          # If true, use a progress dialog
 _pythonTransfer = 0
@@ -108,7 +97,6 @@ _globusTransfer = 1
 _requestManagerTransfer = 2
 _transferMethod = _pythonTransfer       # Method of transferring files
 
-
 def useWindow():
     """
     Specify that dialog windows should be used if possible. Do not call this directly, use
@@ -117,7 +105,6 @@ def useWindow():
     global _useWindow
     _useWindow = 1
 
-
 def useTTY():
     """
     Informational messages such as FTP status should be sent to the terminal. See useWindow.
@@ -125,7 +112,6 @@ def useTTY():
     global _useWindow
     _useWindow = 0
 
-
 def useGlobusTransfer():
     """
     Specify that file transfers should use the Globus storage API (SC-API). See usePythonTransfer.
@@ -133,7 +119,6 @@ def useGlobusTransfer():
     global _transferMethod
     _transferMethod = _globusTransfer
 
-
 def usePythonTransfer():
     """
     Specify that file transfers should use the Python libraries urllib, ftplib. See useGlobusTransfer.
@@ -141,18 +126,15 @@ def usePythonTransfer():
     global _transferMethod
     _transferMethod = _pythonTransfer
 
-
 def useRequestManagerTransfer():
     try:
         import reqm
     except ImportError:
-        raise CDMSError(RequestManagerNotSupported)
+        raise CDMSError, RequestManagerNotSupported
     global _transferMethod
     _transferMethod = _requestManagerTransfer
 
-
-def copyFile(fromURL, toURL, callback=None,
-             lcpath=None, userid=None, useReplica=1):
+def copyFile(fromURL, toURL, callback=None, lcpath=None, userid=None, useReplica=1):
     """
     Copy file <fromURL> to local file <toURL>. For FTP transfers, if cache._useWindow is true,
     display a progress dialog, otherwise just print progress messages.
@@ -163,16 +145,15 @@ def copyFile(fromURL, toURL, callback=None,
     """
     if callback is None:
         if _useWindow:
-            from . import gui
+            import gui
             dialogParent = gui.getProgressParent()
             dialog = gui.CdProgressDialog(dialogParent, fromURL)
             callback = gui.updateProgressGui
         else:
             callback = cdurllib.sampleReportHook
-    (scheme, netloc, path, parameters, query,
-     fragment) = urlparse.urlparse(fromURL)
-    if scheme == 'ftp':
-        if _transferMethod == _pythonTransfer:
+    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fromURL)
+    if scheme=='ftp':
+        if _transferMethod==_pythonTransfer:
             urlopener = cdurllib.CDURLopener()
 
             # In window environment, attach the dialog to the opener. This will
@@ -186,57 +167,51 @@ def copyFile(fromURL, toURL, callback=None,
             except:
                 if _useWindow:
                     dialog.Destroy()
-                raise
-        elif _transferMethod == _globusTransfer:  # Transfer via Globus SC-API
+                raise 
+        elif _transferMethod==_globusTransfer: # Transfer via Globus SC-API
             try:
                 import globus.storage
             except ImportError:
-                raise CDMSError(GlobusNotSupported)
+                raise CDMSError, GlobusNotSupported
 
-            globus.storage.transfer(fromURL, "file:" + toURL)
+            globus.storage.transfer(fromURL, "file:"+toURL)
         else:
-            raise CDMSError(SchemeNotSupported + scheme)
+            raise CDMSError, SchemeNotSupported + scheme
         return
-    elif _transferMethod == _requestManagerTransfer:  # Request manager gransfer
-        import reqm
-        import signal
+    elif _transferMethod==_requestManagerTransfer: # Request manager gransfer
+        import reqm, signal
 
         # Define an alarm handler, to poll the request manager
         def handler(signum, frame):
             pass
 
-        # Obtain server reference from environment variable ESG_REQM_REF if
-        # present
+        # Obtain server reference from environment variable ESG_REQM_REF if present
         serverRef = os.environ.get('ESG_REQM_REF', '/tmp/esg_rqm.ref')
         server = reqm.RequestManager(iorFile=serverRef)
-        result, token = server.requestFile(
-            userid, lcpath, path, toURL, useReplica)
+        result, token = server.requestFile(userid, lcpath, path, toURL, useReplica)
         server.execute(token)
 
         # Poll the request manager for completion, signalled by estim<=0.0
-        while True:
+        while 1:
             signal.signal(signal.SIGALRM, handler)
             estim = server.estimate(token)
-            print 'Estimate: ', estim
-            if estim <= 0.0:
-                break
+            print 'Estimate: ',estim
+            if estim<=0.0: break
             signal.alarm(3)             # Number of seconds between polls
             signal.pause()
 
         #!!!! Remove this when gsincftp uses the right target name !!!
-
-# oldpath = os.path.join(os.path.dirname(toURL),path)
-# os.rename(oldpath,toURL)
+            
+##         oldpath = os.path.join(os.path.dirname(toURL),path)
+##         os.rename(oldpath,toURL)
 
         #!!!!
-
+        
         return
     else:
-        raise CDMSError(SchemeNotSupported + scheme)
+        raise CDMSError, SchemeNotSupported + scheme
 
 # A simple data cache
-
-
 class Cache:
 
     indexpath = None                    # Path of data cache index
@@ -252,10 +227,9 @@ class Cache:
             except:
                 pass
             lock("index_lock")
-            self.index = shelve.open(self.indexpath)  # Persistent cache index
+            self.index = shelve.open(self.indexpath) # Persistent cache index
             try:
-                os.chmod(self.indexpath, 0o666)
-                         # Make index file world writeable
+                os.chmod(self.indexpath,0666) # Make index file world writeable
             except:
                 pass
             self.index.close()
@@ -263,7 +237,7 @@ class Cache:
             # Clean up pending read notifications in the cache. This will also
             # mess up tranfers in progress...
             self.clean()
-            self.direc = os.path.dirname(self.indexpath)  # Cache directory
+            self.direc = os.path.dirname(self.indexpath) # Cache directory
 
     def get(self, filekey):
         """
@@ -295,7 +269,7 @@ class Cache:
         lock("index_lock")
         try:
             if cdmsobj._debug:
-                print 'Process %d: Adding cache file %s,\n   key %s' % (os.getpid(), path, filekey)
+                print 'Process %d: Adding cache file %s,\n   key %s'%(os.getpid(),path,filekey)
             self.index = shelve.open(self.indexpath)
             self.index[filekey] = path
         except:
@@ -320,8 +294,7 @@ class Cache:
             pass
         unlock("index_lock")
 
-    def copyFile(self, fromURL, filekey,
-                 lcpath=None, userid=None, useReplica=None):
+    def copyFile(self, fromURL, filekey, lcpath=None, userid=None, useReplica=None):
         """
         Copy the file <fromURL> into the cache. Return the result path.
 
@@ -329,9 +302,9 @@ class Cache:
         <userid> is the string user ID, <useReplica> is true iff the request manager should
         search the replica catalog for the actual file to transfer.
         """
-
+        
         # Put a notification into the cache, that this file is being read.
-        self.put(filekey, "__READ_PENDING__")
+        self.put(filekey,"__READ_PENDING__")
 
         # Get a temporary file in the cache
         tempdir = tempfile.tempdir
@@ -341,29 +314,21 @@ class Cache:
 
         # Copy to the temporary file
         try:
-            copyFile(
-                fromURL,
-                toPath,
-                lcpath=lcpath,
-                userid=userid,
-                useReplica=useReplica)
-            os.chmod(toPath, 0o666)
-                     # Make cache files world writeable
+            copyFile(fromURL, toPath, lcpath=lcpath, userid=userid, useReplica=useReplica)
+            os.chmod(toPath,0666)           # Make cache files world writeable
         except:
-            # Remove the notification on error, and the temp file, then
-            # re-raise
+            # Remove the notification on error, and the temp file, then re-raise
             self.deleteEntry(filekey)
             if os.path.isfile(toPath):
                 os.unlink(toPath)
             raise
 
         # Add to the cache index
-        self.put(filekey, toPath)
+        self.put(filekey,toPath)
 
         return toPath
 
-    def getFile(self, fromURL, filekey, naptime=5,
-                maxtries=60, lcpath=None, userid=None, useReplica=None):
+    def getFile(self, fromURL, filekey, naptime=5, maxtries=60, lcpath=None, userid=None, useReplica=None):
         """
         Get the file with <fileURL>. If the file is in the cache, read it.
         If another process is transferring it into the cache, wait for the
@@ -386,34 +351,23 @@ class Cache:
         """
         # If the file is being read into the cache, just wait for it
         tempname = self.get(filekey)
-        # Note: This is not bulletproof: another process could set the cache at
-        # this point
+        # Note: This is not bulletproof: another process could set the cache at this point
         if tempname is None:
-            fpath = self.copyFile(
-                fromURL,
-                filekey,
-                lcpath=lcpath,
-                userid=userid,
-                useReplica=useReplica)
-        elif tempname == "__READ_PENDING__":
+            fpath = self.copyFile(fromURL,filekey,lcpath=lcpath,userid=userid,useReplica=useReplica)
+        elif tempname=="__READ_PENDING__":
             success = 0
             for i in range(maxtries):
                 if cdmsobj._debug:
-                    print 'Process %d: Waiting for read completion, %s' % (os.getpid(), repr(filekey))
+                    print 'Process %d: Waiting for read completion, %s'%(os.getpid(),`filekey`)
                 time.sleep(naptime)
                 tempname = self.get(filekey)
 
                 # The read failed, or the entry was deleted.
                 if tempname is None:
-                    fpath = self.copyFile(
-                        fromURL,
-                        filekey,
-                        lcpath=lcpath,
-                        userid=userid,
-                        useReplica=useReplica)
+                    fpath = self.copyFile(fromURL,filekey,lcpath=lcpath,userid=userid,useReplica=useReplica)
 
                 # The read is not yet complete
-                elif tempname == "__READ_PENDING__":
+                elif tempname=="__READ_PENDING__":
                     continue
 
                 # The read is finished.
@@ -422,13 +376,13 @@ class Cache:
                     success = 1
                     break
             if not success:
-                raise CDMSError(TimeOutError + repr(filekey))
+                raise CDMSError, TimeOutError +`filekey`
 
         else:
             fpath = tempname
 
         if cdmsobj._debug:
-            print 'Process %d: Got file %s from cache %s' % (os.getpid(), fromURL, fpath)
+            print 'Process %d: Got file %s from cache %s'%(os.getpid(),fromURL,fpath)
         return fpath
 
     def delete(self):
@@ -440,11 +394,10 @@ class Cache:
             self.index = shelve.open(self.indexpath)
             for key in self.index.keys():
                 path = self.index[key]
-                if path == "__READ_PENDING__":
-                    continue  # Don't remove read-pending notifications
+                if path=="__READ_PENDING__": continue # Don't remove read-pending notifications
                 try:
                     if cdmsobj._debug:
-                        print 'Process %d: Deleting cache file %s' % (os.getpid(), path)
+                        print 'Process %d: Deleting cache file %s'%(os.getpid(),path)
                     os.unlink(path)
                 except:
                     pass
@@ -462,7 +415,7 @@ class Cache:
         self.index = shelve.open(self.indexpath)
         for key in self.index.keys():
             path = self.index[key]
-            if path == "__READ_PENDING__":
+            if path=="__READ_PENDING__":
                 del self.index[key]
         self.index.close()
         unlock("index_lock")
diff --git a/Packages/cdms2/Lib/cdmsNode.py b/Packages/cdms2/Lib/cdmsNode.py
index ff5bba3f2..794e4ed3b 100644
--- a/Packages/cdms2/Lib/cdmsNode.py
+++ b/Packages/cdms2/Lib/cdmsNode.py
@@ -1,5 +1,5 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS node classes
@@ -9,16 +9,16 @@ from numpy import get_printoptions, set_printoptions, inf
 import CDML
 import cdtime
 import re
+import string
 import sys
+from types import *
 from error import CDMSError
 
 # Regular expressions
-_Name = re.compile('[a-zA-Z0-9_:][-a-zA-Z0-9._:]*$')
-                   # Note: allows digit as first character
+_Name = re.compile('[a-zA-Z0-9_:][-a-zA-Z0-9._:]*$') # Note: allows digit as first character
 _Integer = re.compile('[0-9]+$')
 _ArraySep = re.compile('[\[\],\s]+')
-_Illegal = re.compile('([<>&\"\'])|([^\t\r\n -\176\240-\377])')
-                      #" illegal chars in content
+_Illegal = re.compile('([<>&\"\'])|([^\t\r\n -\176\240-\377])')   #" illegal chars in content
 
 # Data types
 
@@ -33,51 +33,42 @@ CdDouble = CDML.CdDouble
 CdString = CDML.CdString
 CdFromObject = CDML.CdFromObject
 CdAny = CDML.CdAny
-CdDatatypes = [
-    CdChar,
-     CdByte,
-     CdShort,
-     CdInt,
-     CdLong,
-     CdInt64,
-     CdFloat,
-     CdDouble,
-     CdString]
+CdDatatypes = [CdChar,CdByte,CdShort,CdInt,CdLong,CdInt64,CdFloat,CdDouble,CdString]
 
 CdScalar = CDML.CdScalar
 CdArray = CDML.CdArray
 
-NumericToCdType = {numpy.sctype2char(numpy.float32): CdFloat,
-                   numpy.sctype2char(numpy.float): CdDouble,
-                   numpy.sctype2char(numpy.int16): CdShort,
-                   numpy.sctype2char(numpy.int32): CdInt,
-                   numpy.sctype2char(numpy.int): CdLong,
-                   numpy.sctype2char(numpy.int64): CdInt64,
-                   numpy.sctype2char(numpy.intc): CdLong,
-                   numpy.sctype2char(numpy.int8): CdByte,
-                   'c': CdChar,
-                   'B': 'B',
-                   'H': 'H',
-                   'L': 'L',
-                   'q': CdInt64,
-                   'Q': 'Q',
-                   'S': 'S'
+NumericToCdType = {numpy.sctype2char(numpy.float32):CdFloat,
+                   numpy.sctype2char(numpy.float):CdDouble,
+                   numpy.sctype2char(numpy.int16):CdShort,
+                   numpy.sctype2char(numpy.int32):CdInt,
+                   numpy.sctype2char(numpy.int):CdLong,
+                   numpy.sctype2char(numpy.int64):CdInt64,
+                   numpy.sctype2char(numpy.intc):CdLong,
+                   numpy.sctype2char(numpy.int8):CdByte,
+                   'c':CdChar,
+                   'B':'B',
+                   'H':'H',
+                   'L':'L',
+                   'q':CdInt64,
+                   'Q':'Q',
+                   'S':'S'
                    }
 
-CdToNumericType = {CdChar: 'c',
-                   CdByte: numpy.int8,
-                   CdShort: numpy.int16,
-                   CdInt: numpy.int32,
-                   CdLong: numpy.int,
-                   CdInt64: numpy.int64,
-                   CdFloat: numpy.float32,
-                   CdDouble: numpy.float}
+CdToNumericType = {CdChar:'c',
+                   CdByte:numpy.int8,
+                   CdShort:numpy.int16,
+                   CdInt:numpy.int32,
+                   CdLong:numpy.int,
+                   CdInt64:numpy.int64,
+                   CdFloat:numpy.float32,
+                   CdDouble:numpy.float}
 
 # Grid types
 UnknownGridType = "unknown"
 GaussianGridType = "gaussian"
 UniformGridType = "uniform"
-CdGridtypes = [UnknownGridType, GaussianGridType, UniformGridType]
+CdGridtypes = [UnknownGridType,GaussianGridType,UniformGridType]
 
 DuplicateIdError = "Duplicate identifier: "
 InvalidArgumentError = "Invalid argument: "
@@ -85,8 +76,6 @@ InvalidDatatype = "Invalid datatype: "
 InvalidGridtype = "Invalid grid type: "
 InvalidIdError = "Invalid identifier: "
 NotMonotonic = "Result array is not monotonic "
-
-
 class NotMonotonicError(CDMSError):
     pass
 
@@ -107,8 +96,6 @@ CdSingleton = 2
 # '"' --> &quot;
 # "'" --> &apos;
 # all other illegal characters are removed #"
-
-
 def mapIllegalToEntity(matchobj):
     s = matchobj.group(0)
     if s == '<':
@@ -117,55 +104,49 @@ def mapIllegalToEntity(matchobj):
         return '&gt;'
     elif s == '&':
         return '&amp;'
-    elif s == '"':  # "
+    elif s == '"':   #"
         return '&quot;'
-    elif s == "'":
+    elif s=="'":
         return '&apos;'
     else:
         return ""
 
 # Named node
-
-
 class CdmsNode:
 
     def __init__(self, tag, id=None, parent=None):
         if id and _Name.match(id) is None:
             raise CDMSError, InvalidIdError + id
-        self.attribute = {}
-            # External attributes, attribute[name]=(value,cdDatatype)
+        self.attribute = {}             # External attributes, attribute[name]=(value,cdDatatype)
         self.child = []                 # Children
         self.id = id                    # Identifier string
         self.parent = parent            # Parent node in a tree, None for root
         self.tag = tag                  # XML tag string
         self.content = None             # XML content string
-        self.dtd = CDML.CDML().dtd.get(self.tag)
-                             # CDML Document Type Definition for this tag
-        self.extra = CDML.CDML().extra.get(
-            self.tag)  # Extra datatype constraints
+        self.dtd = CDML.CDML().dtd.get(self.tag)  # CDML Document Type Definition for this tag
+        self.extra = CDML.CDML().extra.get(self.tag) # Extra datatype constraints
         CdmsNode.mapToExternal(self)    # Don't call subclass mapToExternal!
 
     # Map to external attributes
     def mapToExternal(self):
         if self.id is not None and _Name.match(self.id) is None:
             raise CDMSError, InvalidIdError + self.id
-        if self.id is not None:
-            self.setExternalAttr('id', self.id)
+        if self.id is not None: self.setExternalAttr('id',self.id)
 
     # Set content from a string. The interpretation
     # of content is class-dependent
-    def setContentFromString(self, content):
-        self.content = content
+    def setContentFromString(self,content):
+        self.content=content
 
     # Get content
     def getContent(self):
         return self.content
 
     # Add a child node
-    def add(self, child):
+    def add(self,child):
         if child is not None:
             self.child.append(child)
-            child.parent = self
+            child.parent=self
         return child
 
     # Return a list of child nodes
@@ -173,13 +154,13 @@ class CdmsNode:
         return self.child
 
     # Get the child node at index k
-    def getChildAt(self, index):
+    def getChildAt(self,index):
         return self.child[index]
 
     # Remove and return the child at index k
-    def removeChildAt(self, index):
+    def removeChildAt(self,index):
         child = self.child[index]
-        self.child = self.child[:index] + self.child[index + 1:]
+        self.child = self.child[:index]+self.child[index+1:]
         return child
 
     # Get the number of children
@@ -187,7 +168,7 @@ class CdmsNode:
         return len(self.child)
 
     # Get the index of a node
-    def getIndex(self, node):
+    def getIndex(self,node):
         index = -1
         for i in range(len(self.child)):
             if node is self.child[i]:
@@ -201,21 +182,20 @@ class CdmsNode:
 
     # True iff node is a leaf node
     def isLeaf(self):
-        return self.child == []
+        return self.child==[]
 
     # Set an external attribute
     # 'attr' is an Attr object
     def setExternalAttrFromAttr(self, attr):
-        if attr.value is None:
-            return
-        self.attribute[attr.name] = (attr.value, attr.getDatatype())
+        if attr.value is None: return
+        self.attribute[attr.name]=(attr.value,attr.getDatatype())
 
     # Get an external attribute, as an Attr instance
     def getExternalAttrAsAttr(self, name):
         attrPair = self.attribute.get(name)
         if attrPair:
-            (value, datatype) = attrPair
-            attr = AttrNode(name, value)
+            (value,datatype) = attrPair
+            attr = AttrNode(name,value)
             attr.datatype = datatype
             return attr
         else:
@@ -223,7 +203,7 @@ class CdmsNode:
 
     # Set an external attribute
     def setExternalAttr(self, name, value, datatype=None):
-        attr = AttrNode(name, value)
+        attr = AttrNode(name,value)
         attr.datatype = datatype
         self.setExternalAttrFromAttr(attr)
 
@@ -231,7 +211,7 @@ class CdmsNode:
     def getExternalAttr(self, name):
         attrPair = self.attribute.get(name)
         if attrPair:
-            (value, datatype) = attrPair
+            (value,datatype) = attrPair
             return value
         else:
             return None
@@ -242,88 +222,71 @@ class CdmsNode:
 
     # Set the external attribute dictionary. The input dictionary
     # is of the form {name:value,...} where value is a string.
-    def setExternalDict(self, dict):
+    def setExternalDict(self,dict):
         for key in dict.keys():
-            self.attribute[key] = (dict[key], CdString)
+            self.attribute[key]=(dict[key],CdString)
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self, fd=None, tablevel=0, format=1):
-        if fd is None:
-            fd = sys.stdout
+    def write(self,fd=None,tablevel=0,format=1):
+        if fd is None: fd = sys.stdout
         printLimit = get_printoptions()['threshold']
-        set_printoptions(threshold=inf)
-                         # Ensure that all Numeric array values will be printed
+        set_printoptions(threshold=inf)            # Ensure that all Numeric array values will be printed
         if self.dtd:
             validAttrs = self.dtd.keys()
         else:
             validAttrs = None
 
-        if format:
-            fd.write(tablevel * '\t')
-        fd.write('<' + self.tag)
-        if format:
-            fd.write('\n')
+        if format: fd.write(tablevel*'\t')
+        fd.write('<'+self.tag)
+        if format: fd.write('\n')
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                if format:
-                    fd.write((tablevel + 1) * '\t')
-                (attval, datatype) = self.attribute[attname]
+                if format: fd.write((tablevel+1)*'\t')
+                (attval,datatype)=self.attribute[attname]
                 # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-                attvalstr = _Illegal.sub(
-    mapIllegalToEntity,
-     str(attval))  # Map illegal chars to entities
+                attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
                 if format:
-                    fd.write(attname + '\t="' + attvalstr + '"')
+                    fd.write(attname+'\t="'+attvalstr+'"')
                 else:
-                    fd.write(' ' + attname + '="' + attvalstr + '"')
-                if format:
-                    fd.write('\n')
-        if format:
-            fd.write((tablevel + 1) * '\t')
+                    fd.write(' '+attname+'="'+attvalstr+'"')
+                if format: fd.write('\n')
+        if format: fd.write((tablevel+1)*'\t')
         fd.write('>')
-        if format:
-            fd.write('\n')
+        if format: fd.write('\n')
 
         # Write extra attributes
         for attname in self.attribute.keys():
             if validAttrs and (attname not in validAttrs):
-                (attval, datatype) = self.attribute[attname]
-                attr = AttrNode(attname, attval)
-                attr.datatype = datatype
+                (attval,datatype)=self.attribute[attname]
+                attr = AttrNode(attname,attval)
+                attr.datatype=datatype
                 attr.mapToExternal()
-                attr.write(fd, tablevel + 1, format)
+                attr.write(fd,tablevel+1,format)
 
         # Write content
         content = self.getContent()
         if content is not None:
-            content = _Illegal.sub(
-    mapIllegalToEntity,
-     content)  # Map illegal chars to entities
-            if format:
-                fd.write((tablevel + 1) * '\t')
+            content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
+            if format: fd.write((tablevel+1)*'\t')
             fd.write(content)
-            if format:
-                fd.write('\n')
+            if format: fd.write('\n')
 
         # Write children
         for node in self.child:
-            node.write(fd, tablevel + 1, format)
+            node.write(fd,tablevel+1,format)
 
-        if format:
-            fd.write((tablevel + 1) * '\t')
-        fd.write('</' + self.tag + '>')
-        if format:
-            fd.write('\n')
+        if format: fd.write((tablevel+1)*'\t')
+        fd.write('</'+self.tag+'>')
+        if format: fd.write('\n')
         set_printoptions(threshold=printLimit)  # Restore original
 
-    # Write to a file without formatting.
-    def write_raw(self, fd=None):
-        if fd is None:
-            fd = sys.stdout
-        self.write(fd, 0, 0)
+    # Write to a file without formatting. 
+    def write_raw(self,fd=None):
+        if fd is None: fd = sys.stdout
+        self.write(fd,0,0)
 
     # Write an LDIF (LDAP interchange format) entry
     # parentdn is the parent LDAP distinguished name
@@ -331,116 +294,104 @@ class CdmsNode:
     # A trailing newline is added iff format==1
     # Note: unlike write, this does not write children as well
     def write_ldif(self, parentdn, userAttrs=[], fd=None, format=1):
-        if fd is None:
-            fd = sys.stdout
+        if fd is None: fd = sys.stdout
         if self.dtd:
             validAttrs = self.dtd.keys()
         else:
             validAttrs = None
 
         # Write distinguished name
-        newdn = "%s=%s,%s" % (self.tag, self.id, parentdn)
-        fd.write("dn: %s\n" % newdn)
+        newdn = "%s=%s,%s"%(self.tag,self.id,parentdn)
+        fd.write("dn: %s\n"%newdn)
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                (attval, datatype) = self.attribute[attname]
-                # attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  #
-                # Map illegal chars to entities
-                if not isinstance(attval, basestring):
+                (attval,datatype)=self.attribute[attname]
+                # attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
+                if type(attval)!=StringType:
                     attval = `attval`
-                attvalstr = attval.strip()
-                attvalstr = re.sub(
-    '\n',
-     '\n ',
-     attvalstr)  # Make sure continuation lines are preceded with a space
-                if attvalstr == '':
-                    attvalstr = "none"
-                fd.write("%s: %s\n" % (attname, attvalstr))
-
+                attvalstr = string.strip(attval)
+                attvalstr = re.sub('\n','\n ',attvalstr) # Make sure continuation lines are preceded with a space
+                if attvalstr=='': attvalstr = "none"
+                fd.write("%s: %s\n"%(attname,attvalstr))
+        
         # Write extra attributes
         for attname in self.attribute.keys():
             if validAttrs and (attname not in validAttrs):
-                (attval, datatype) = self.attribute[attname]
-                if not isinstance(attval, basestring):
+                (attval,datatype)=self.attribute[attname]
+                if type(attval)!=StringType:
                     attval = `attval`
-                attval = re.sub(
-    '\n',
-     '\n ',
-     attval)  # Make sure continuation lines are preceded with a space
-                fd.write("attr: %s=%s\n" % (attname, attval))
+                attval = re.sub('\n','\n ',attval) # Make sure continuation lines are preceded with a space
+                fd.write("attr: %s=%s\n"%(attname,attval))
 
         # Write content
         # content = self.getContent()
         # if content is not None:
-        # content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
+        #     content = _Illegal.sub(mapIllegalToEntity,content)  # Map illegal chars to entities
         #     fd.write("value: %s"%(content,))
 
         # Write user attributes
-        if isinstance(userAttrs, basestring):
+        if type(userAttrs)==StringType:
             newAttrs = [userAttrs]
         else:
             newAttrs = userAttrs
         for entry in list(newAttrs):
-            fd.write("%s\n" % entry)
+            fd.write("%s\n"%entry)
 
         # Write classes
         fd.write("objectclass: top\n")
-        fd.write("objectclass: %s\n" % (self.tag))
+        fd.write("objectclass: %s\n"%(self.tag))
 
-        if format == 1:
+        if format==1:
             fd.write('\n')
 
         return newdn
 
     # Validate attributes
-    def validate(self, idtable=None):
+    def validate(self,idtable=None):
 
         # Check validity of enumerated values and references
         validKeys = self.dtd.keys()
         for attname in self.attribute.keys():
             if attname in validKeys:
-                (atttype, default) = self.dtd[attname]
-                if isinstance(atttype, tuple):
-                    attval = self.getExternalAttr(attname)
-                    assert attval in atttype, 'Invalid attribute %s=%s must be in %s' % (
-                        attname, attval, `atttype`)
-                elif atttype == CDML.Idref:
-                    attval = self.getExternalAttr(attname)
+                (atttype,default)=self.dtd[attname]
+                if type(atttype) is TupleType:
+                    attval=self.getExternalAttr(attname)
+                    assert attval in atttype, 'Invalid attribute %s=%s must be in %s'%(attname,attval,`atttype`)
+                elif atttype==CDML.Idref:
+                    attval=self.getExternalAttr(attname)
                     if idtable:
                         if not idtable.has_key(attval):
-                            print 'Warning: ID reference not found: %s=%s' % (attname, attval)
-
+                            print 'Warning: ID reference not found: %s=%s'%(attname,attval)
+            
         # Validate children
         for node in self.children():
             node.validate(idtable)
-
+    
 # Container object for other CDMS objects
-
-
 class DatasetNode(CdmsNode):
 
     def __init__(self, id):
-        CdmsNode.__init__(self, "dataset", id)
+        CdmsNode.__init__(self,"dataset",id )
         self.idtable = {}
 
     # Validate the dataset and all child nodes
-    def validate(self, idtable=None):
+    def validate(self,idtable=None):
         if not idtable:
-            idtable = self.idtable
-        CdmsNode.validate(self, idtable)
+            idtable=self.idtable
+        CdmsNode.validate(self,idtable)
 
     # Add a child node with an ID
-    def addId(self, id, child):
-        if self.idtable.has_key(id):
-            raise CDMSError, DuplicateIdError + id
-        CdmsNode.add(self, child)
-        self.idtable[id] = child
+    def addId(self,id,child):
+        if self.idtable.has_key(id): 
+            raise CDMSError, DuplicateIdError +id
+        CdmsNode.add(self,child)
+        self.idtable[id]=child
         return child
 
     # Get a child node from its ID
-    def getChildNamed(self, id):
+    def getChildNamed(self,id):
         return self.idtable.get(id)
 
     # Get the ID table
@@ -450,80 +401,66 @@ class DatasetNode(CdmsNode):
     # Dump to a CDML file.
     # path is the file to dump to, or None for standard output.
     # if format is true, write with tab, newline formatting
-    def dump(self, path=None, format=1):
+    def dump(self,path=None,format=1):
         if path:
             try:
-                fd = open(path, 'w')
+                fd = open(path,'w')
             except IOError:
-                raise IOError, '%s: %s' % (sys.exc_value, path)
+                raise IOError,'%s: %s'%(sys.exc_value,path)
         else:
             fd = sys.stdout
         fd.write('<?xml version="1.0"?>')
-        if format:
-            fd.write('\n')
-        fd.write(
-            '<!DOCTYPE dataset SYSTEM "http://www-pcmdi.llnl.gov/software/cdms/cdml.dtd">')
-        if format:
-            fd.write('\n')
-        self.write(fd, 0, format)
-        if fd != sys.stdout:
-            fd.close()
+        if format: fd.write('\n')
+        fd.write('<!DOCTYPE dataset SYSTEM "http://www-pcmdi.llnl.gov/software/cdms/cdml.dtd">')
+        if format: fd.write('\n')
+        self.write(fd,0,format)
+        if fd!=sys.stdout: fd.close()
 
 # Spatio-temporal variable
 # Two ways to create a variable:
 # (1) var = VariableNode(id,datatype,domain)
 # (2) var = VariableNode(id,datatype)
 #     var.setDomain(domain)
-
-
 class VariableNode(CdmsNode):
 
     # Create a variable.
     # If validate is true, validate immediately
-
     def __init__(self, id, datatype, domain):
-        assert isinstance(
-    datatype, basestring), 'Invalid datatype: ' + `datatype`
-        assert datatype in CdDatatypes, 'Invalid datatype: ' + `datatype`
-        CdmsNode.__init__(self, "variable", id)
+        assert type(datatype) is StringType, 'Invalid datatype: '+`datatype`
+        assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
+        CdmsNode.__init__(self,"variable",id)
         self.datatype = datatype
         self.setDomain(domain)
         VariableNode.mapToExternal(self)
 
     # Set the domain
-    def setDomain(self, domain):
+    def setDomain(self,domain):
         if not self.isLeaf():
             self.removeChildAt(0)
         self.add(domain)
 
     # Get the domain
     def getDomain(self):
-        if self.getChildCount() > 0:
+        if self.getChildCount()>0:
             return self.getChildAt(0)
         else:
             return None
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('datatype', self.datatype)
-
+        self.setExternalAttr('datatype',self.datatype)
+        
 # Coordinate axis
-
-
 class AxisNode(CdmsNode):
 
     # If datatype is None, assume values [0,1,..,length-1]
     # data is a numpy array, if specified
-
-    def __init__(self, id, length, datatype=CdLong, data=None):
-        assert isinstance(length, int), 'Invalid length: ' + `length`
-        assert isinstance(
-    datatype, basestring), 'Invalid datatype: ' + `datatype`
-        assert datatype in CdDatatypes, 'Invalid datatype: ' + `datatype`
-        if data is not None:
-            assert isinstance(
-    data, numpy.ndarray), 'data must be a 1-D Numeric array'
-        CdmsNode.__init__(self, "axis", id)
+    def __init__(self, id, length, datatype=CdLong,data=None):
+        assert isinstance(length, IntType), 'Invalid length: '+`length`
+        assert type(datatype) is StringType, 'Invalid datatype: '+`datatype`
+        assert datatype in CdDatatypes, 'Invalid datatype: '+`datatype`
+        if data is not None: assert isinstance(data, numpy.ndarray), 'data must be a 1-D Numeric array'
+        CdmsNode.__init__(self,"axis",id)
         self.datatype = datatype
         self.data = data
         # data representation is CdLinear or CdVector
@@ -542,51 +479,48 @@ class AxisNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('datatype', self.datatype)
-        self.setExternalAttr('length', self.length)
+        self.setExternalAttr('datatype',self.datatype)
+        self.setExternalAttr('length',self.length)
 
     # Set data from content string
     # The content of an axis is the data array.
-    def setContentFromString(self, datastring):
+    def setContentFromString(self,datastring):
         datatype = self.datatype
         numericType = CdToNumericType.get(datatype)
-        if numericType is None:
-            raise CDMSError, InvalidDatatype + datatype
+        if numericType is None: raise CDMSError, InvalidDatatype + datatype
         stringlist = _ArraySep.split(datastring)
         numlist = []
         for numstring in stringlist:
-            if numstring == '':
-                continue
-            numlist.append(float(numstring))
-        if len(numlist) > 0:
+            if numstring=='': continue
+            numlist.append(string.atof(numstring))
+        if len(numlist)>0:
             # NB! len(zero-length array) causes IndexError on Linux!
-            dataArray = numpy.array(numlist, numericType)
+            dataArray = numpy.array(numlist,numericType)
             self.data = dataArray
             self.length = len(self.data)
 
     # Set the partition from a string. This does not
     # set the external string representation
-    def setPartitionFromString(self, partstring):
+    def setPartitionFromString(self,partstring):
         stringlist = _ArraySep.split(partstring)
         numlist = []
         for numstring in stringlist:
-            if numstring == '':
-                continue
-            numlist.append(int(numstring))
-        dataArray = numpy.array(numlist, numpy.int)
-        if len(dataArray) > 0:
+            if numstring=='': continue
+            numlist.append(string.atoi(numstring))
+        dataArray = numpy.array(numlist,numpy.int)
+        if len(dataArray)>0:
             self.partition = dataArray
 
     # Get the content string: the data values if the representation
     # is as a vector, or ane empty string otherwise
     def getContent(self):
-        if self.data is None or self.dataRepresent == CdLinear:
+        if self.data is None or self.dataRepresent==CdLinear:
             return ''
         else:
             return str(self.data)
 
     # Set the data as an array, check for monotonicity
-    def setData(self, data):
+    def setData(self,data):
 
         # If this axis is currently linear, remove the linear node
         if self.dataRepresent == CdLinear:
@@ -595,8 +529,8 @@ class AxisNode(CdmsNode):
         self.data = data
         self.dataRepresent = CdVector
         self.length = len(data)
-        self.setExternalAttr('length', self.length)
-        if self.monotonicity() == CdNotMonotonic:
+        self.setExternalAttr('length',self.length)
+        if self.monotonicity()==CdNotMonotonic:
             raise NotMonotonicError, NotMonotonic
 
     # Get the data as an array
@@ -608,9 +542,9 @@ class AxisNode(CdmsNode):
 
     # Set the data as a linear vector
     # If the partition is set, derive the vector length from it
-    def setLinearData(self, linearNode, partition=None):
+    def setLinearData(self,linearNode, partition=None):
         self.data = linearNode
-        if self.getChildCount() > 0:
+        if self.getChildCount()>0:
             self.removeChildAt(0)           # Remove the previous linear node
         self.add(linearNode)
         self.dataRepresent = CdLinear
@@ -621,23 +555,23 @@ class AxisNode(CdmsNode):
             self.partition = partition
             self.length = partition[-1]
             linearNode.length = self.length
-            self.setExternalAttr('partition', str(self.partition))
-        self.setExternalAttr('length', self.length)
+            self.setExternalAttr('partition',str(self.partition))
+        self.setExternalAttr('length',self.length)
 
     # Test if axis data vectors are equal
-    def equal(self, axis):
+    def equal(self,axis):
         # Require that partitions (if any) are equal
         if self.partition is not None and axis.partition is not None:
-            if len(self.partition) != len(axis.partition):
+            if len(self.partition)!=len(axis.partition):
                 return 0
-            if not numpy.alltrue(numpy.equal(self.partition, axis.partition)):
+            if not numpy.alltrue(numpy.equal(self.partition,axis.partition)):
                 return 0
         elif self.partition is not None or axis.partition is not None:
             return 0
-
+        
         if self.dataRepresent == axis.dataRepresent == CdVector:
             try:
-                return numpy.alltrue(numpy.equal(self.data, axis.data))
+                return numpy.alltrue(numpy.equal(self.data,axis.data))
             except ValueError:
                 return 0
         elif self.dataRepresent == axis.dataRepresent == CdLinear:
@@ -649,34 +583,34 @@ class AxisNode(CdmsNode):
 
     # Test if axis data vectors are element-wise close
     # True iff for each respective element a and b, abs((b-a)/b)<=eps
-    def isClose(self, axis, eps):
-        if eps == 0:
+    def isClose(self,axis,eps):
+        if eps==0:
             return self.equal(axis)
         if self.dataRepresent == axis.dataRepresent == CdVector:
             try:
-                return numpy.alltrue(numpy.less_equal(numpy.absolute(self.data - axis.data), numpy.absolute(eps * self.data)))
+                return numpy.alltrue(numpy.less_equal(numpy.absolute(self.data-axis.data),numpy.absolute(eps*self.data)))
             except ValueError:
                 return 0
         elif self.dataRepresent == axis.dataRepresent == CdLinear:
-            return self.data.isClose(axis.data, eps)
+            return self.data.isClose(axis.data,eps)
         elif self.dataRepresent == CdVector:
-            return axis.data.isCloseVector(self.data, eps)
+            return axis.data.isCloseVector(self.data,eps)
         else:
-            return self.data.isCloseVector(axis.data, eps)
+            return self.data.isCloseVector(axis.data,eps)
 
     # Test for strict monotonicity.
     # Returns CdNotMonotonic, CdIncreasing, CdDecreasing, or CdSingleton
     def monotonicity(self):
         if self.dataRepresent == CdLinear:
             return self.data.monotonicity()
-        elif self.length == 1:
+        elif self.length==1:
             return CdSingleton
         else:
             first = self.data[:-1]
             second = self.data[1:]
-            if numpy.alltrue(numpy.less(first, second)):
+            if numpy.alltrue(numpy.less(first,second)):
                 return CdIncreasing
-            elif numpy.alltrue(numpy.greater(first, second)):
+            elif numpy.alltrue(numpy.greater(first,second)):
                 return CdDecreasing
             else:
                 return CdNotMonotonic
@@ -684,24 +618,24 @@ class AxisNode(CdmsNode):
     # Extend axes. 'isreltime' is true iff
     # the axes are relative time axes
     # If allowgaps is true, allow gaps when extending linear vectors
-    def extend(self, axis, isreltime=0, allowgaps=0):
+    def extend(self,axis,isreltime=0,allowgaps=0):
         # Set trylin true if should try to catenate linear vectors
-        if self.dataRepresent == CdLinear:
+        if self.dataRepresent==CdLinear:
             anode = self.data
-            if axis.dataRepresent == CdLinear:
+            if axis.dataRepresent==CdLinear:
                 bnode = axis.data
                 trylin = 1
-            elif axis.length == 1:
+            elif axis.length==1:
                 bnode = LinearDataNode(axis.data[0], 0.0, 1)
                 trylin = 1
             else:
                 trylin = 0
-        elif self.length == 1:
+        elif self.length==1: 
             anode = LinearDataNode(self.data[0], 0.0, 1)
-            if axis.dataRepresent == CdLinear:
+            if axis.dataRepresent==CdLinear:
                 bnode = axis.data
                 trylin = 1
-            elif axis.length == 1:
+            elif axis.length==1:
                 bnode = LinearDataNode(axis.data[0], 0.0, 1)
                 trylin = 1
             else:
@@ -709,25 +643,25 @@ class AxisNode(CdmsNode):
         else:
             trylin = 0
 
-        if isreltime == 1:
+        if isreltime==1:
             units1 = self.getExternalAttr('units')
             units2 = axis.getExternalAttr('units')
         else:
             units1 = units2 = None
 
-        if trylin == 1:
+        if trylin==1:
             try:
                 aindex = 0
                 alength = anode.length
                 bindex = alength
                 blength = bnode.length
-                if isreltime == 1 and units1 and units2 and units1 != units2:
-                    rtime = cdtime.reltime(bnode.start, units2)
+                if isreltime==1 and units1 and units2 and units1!=units2:
+                    rtime = cdtime.reltime(bnode.start,units2)
                     offset = rtime.torel(units1).value
-                    bnode.start = bnode.start + offset
+                    bnode.start = bnode.start+offset
                 else:
                     offset = None
-                linNode = anode.concatenate(bnode, allowgaps)
+                linNode = anode.concatenate(bnode,allowgaps)
             except NotMonotonicError:
                 # The dimensions cannot be extended as linear arrays,
                 # so try to extend them as vectors
@@ -735,20 +669,15 @@ class AxisNode(CdmsNode):
             else:
                 # Extend the partition attribute
                 if offset is not None:
-                    bindex = int(offset / linNode.delta + 0.5)
-                if self.partition is None:
-                    partition = numpy.array(
-    [aindex,
-     aindex + alength,
-     bindex,
-     bindex + blength])
-                    self.partition_length = alength + blength
+                    bindex = int(offset/linNode.delta+0.5)
+                if self.partition  is None:
+                    partition = numpy.array([aindex,aindex+alength,bindex,bindex+blength])
+                    self.partition_length = alength+blength
                 else:
-                    partition = numpy.concatenate(
-    (self.partition, [bindex, bindex + blength]))
-                    self.partition_length = self.partition_length + blength
-                self.setLinearData(linNode, partition)
-                self.setExternalAttr('partition_length', self.partition_length)
+                    partition = numpy.concatenate((self.partition,[bindex,bindex+blength]))
+                    self.partition_length = self.partition_length+blength
+                self.setLinearData(linNode,partition)
+                self.setExternalAttr('partition_length',self.partition_length)
                 return self
 
         # Else get both axis vectors, concatenate
@@ -761,31 +690,29 @@ class AxisNode(CdmsNode):
         blength = len(ar2)
 
         # Adjust array2 if relative time and units differ
-        if isreltime == 1:
+        if isreltime==1:
             if units1 and units2 and units1 != units2:
-                rtime = cdtime.reltime(0.0, units2)
+                rtime = cdtime.reltime(0.0,units2)
                 delta = rtime.torel(units1).value
-                ar2 = ar2 + delta
+                ar2 = ar2+delta
 
-        ar = numpy.concatenate((ar1, ar2))
+        ar = numpy.concatenate((ar1,ar2))
         try:
             self.setData(ar)
         except NotMonotonicError:
             # Restore original array and resignal
             self.setData(ar1)
-            raise NotMonotonicError, NotMonotonic + `ar`
+            raise NotMonotonicError, NotMonotonic+`ar`
 
         # Extend the partition attribute
-        if self.partition is None:
-            self.partition = numpy.array(
-    [aindex, aindex + alength, bindex, bindex + blength])
-            self.partition_length = alength + blength
+        if self.partition  is None:
+            self.partition = numpy.array([aindex,aindex+alength,bindex,bindex+blength])
+            self.partition_length = alength+blength
         else:
-            self.partition = numpy.concatenate(
-                (self.partition, [bindex, bindex + blength]))
-            self.partition_length = self.partition_length + blength
-        self.setExternalAttr('partition', str(self.partition))
-        self.setExternalAttr('partition_length', self.partition_length)
+            self.partition = numpy.concatenate((self.partition,[bindex,bindex+blength]))
+            self.partition_length = self.partition_length+blength
+        self.setExternalAttr('partition',str(self.partition))
+        self.setExternalAttr('partition_length',self.partition_length)
 
         return self
 
@@ -793,21 +720,16 @@ class AxisNode(CdmsNode):
         return len(self.data)
 
 # Linear data element
-
-
 class LinearDataNode(CdmsNode):
 
-    validStartTypes = [
-        int, float, type(cdtime.comptime(0)), type(cdtime.reltime(0, "hours"))]
-    validDeltaTypes = [int, float, list]
+    validStartTypes = [IntType,FloatType,type(cdtime.comptime(0)),type(cdtime.reltime(0,"hours"))]
+    validDeltaTypes = [IntType,FloatType,ListType]
 
     def __init__(self, start, delta, length):
-        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (
-            type(start) in self.validStartTypes), 'Invalid start argument: ' + `start`
-        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (
-            type(delta) in self.validDeltaTypes), 'Invalid delta argument: ' + `delta`
-        assert isinstance(length, int), 'Invalid length argument: ' + `length`
-        CdmsNode.__init__(self, "linear")
+        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(start) in self.validStartTypes), 'Invalid start argument: '+`start`
+        assert isinstance(start, numpy.floating) or isinstance(start, numpy.integer) or (type(delta) in self.validDeltaTypes), 'Invalid delta argument: '+`delta`
+        assert isinstance(length, IntType), 'Invalid length argument: '+`length`
+        CdmsNode.__init__(self,"linear")
         self.delta = delta
         self.length = length
         self.start = start
@@ -815,57 +737,53 @@ class LinearDataNode(CdmsNode):
 
     # Get an indexed value
     def __getitem__(self, index):
-        return self.start + index * self.delta
+        return self.start + index*self.delta
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("start", self.start)
-        self.setExternalAttr("delta", self.delta)
-        self.setExternalAttr("length", self.length)
+        self.setExternalAttr("start",self.start)
+        self.setExternalAttr("delta",self.delta)
+        self.setExternalAttr("length",self.length)
 
     # Equality of linear vectors
-    def equal(self, axis):
-        return self.delta == axis.delta and self.length == axis.length and self.start == axis.start
+    def equal(self,axis):
+        return self.delta==axis.delta and self.length==axis.length and self.start==axis.start
 
     # Closeness of linear vectors
-    def isClose(self, axis, eps):
-        if eps == 0:
+    def isClose(self,axis,eps):
+        if eps==0:
             return self.equal(axis)
         else:
-            return self.delta == axis.delta and self.length == axis.length and abs(self.start - axis.start) <= abs(eps * self.start)
+            return self.delta==axis.delta and self.length==axis.length and abs(self.start-axis.start)<=abs(eps*self.start)
 
     # Equality of linear vector and array
-    def equalVector(self, ar):
-        diff = ar[1:] - ar[:-1]
+    def equalVector(self,ar):
+        diff = ar[1:]-ar[:-1]
         try:
-            comp = numpy.alltrue(
-    numpy.equal((self.delta) * numpy.ones(self.length - 1), diff))
+            comp = numpy.alltrue(numpy.equal((self.delta)*numpy.ones(self.length-1),diff))
         except ValueError:
             return 0
         return comp
 
     # Closeness of linear vector and array
-    def isCloseVector(self, ar, eps):
-        if eps == 0:
+    def isCloseVector(self,ar,eps):
+        if eps==0:
             return self.equalVector(ar)
-        diff = ar[1:] - ar[:-1]
-        diff2 = self.delta * numpy.ones(self.length - 1)
+        diff = ar[1:]-ar[:-1]
+        diff2 = self.delta*numpy.ones(self.length-1)
         try:
-            comp = numpy.alltrue(
-    numpy.less_equal(numpy.absolute(diff2 - diff),
-     numpy.absolute(eps * diff2)))
+            comp = numpy.alltrue(numpy.less_equal(numpy.absolute(diff2-diff),numpy.absolute(eps*diff2)))
         except ValueError:
             return 0
         return comp
 
-    # Return monotonicity: CdNotMonotonic, CdIncreasing, CdDecreasing, or
-    # CdSingleton
+    # Return monotonicity: CdNotMonotonic, CdIncreasing, CdDecreasing, or CdSingleton
     def monotonicity(self):
-        if self.length == 1:
+        if self.length==1:
             return CdSingleton
-        elif self.delta > 0.0:
+        elif self.delta>0.0:
             return CdIncreasing
-        elif self.delta < 0.0:
+        elif self.delta<0.0:
             return CdDecreasing
         else:
             return CdNotMonotonic
@@ -873,39 +791,34 @@ class LinearDataNode(CdmsNode):
     # Return a vector representation, given a CDMS datatype
     def toVector(self, datatype):
         numericType = CdToNumericType.get(datatype)
-        if numericType is None:
-            raise CDMSError, InvalidDatatype + datatype
+        if numericType is None: raise CDMSError, InvalidDatatype + datatype
         start = self.start
         delta = self.delta
         length = self.length
-        if length > 1:
-            stop = start + (length - 0.99) * delta
-            if delta == 0.0:
-                delta = 1.0
-            ar = numpy.arange(start, stop, delta, numericType)
+        if length>1:
+            stop = start + (length-0.99)*delta
+            if delta==0.0: delta=1.0
+            ar = numpy.arange(start,stop,delta,numericType)
         else:
-            ar = numpy.array([start], numericType)
+            ar = numpy.array([start],numericType)
         return ar
 
     # Concatenate linear arrays, preserving linearity
     # If allowgaps is set, don't require that the linear arrays be contiguous
     # Return a new linear node
-    def concatenate(self, linearNode, allowgaps=0):
-        if self.length > 1 and linearNode.length > 1 and self.delta != linearNode.delta:
-            raise NotMonotonicError, NotMonotonic + \
-                'linear vector deltas do not match: %s,%s' % (
-                    `self.delta`, `linearNode.delta`)
+    def concatenate(self,linearNode,allowgaps=0):
+        if self.length>1 and linearNode.length>1 and self.delta != linearNode.delta:
+            raise NotMonotonicError, NotMonotonic + 'linear vector deltas do not match: %s,%s'%(`self.delta`,`linearNode.delta`)
 
-        if self.length > 1:
+        if self.length>1:
             delta = self.delta
-        elif linearNode.length > 1:
+        elif linearNode.length>1:
             delta = linearNode.delta
         else:
             delta = linearNode.start - self.start
-        if allowgaps == 0:
-            if linearNode.start - self.start != self.length * delta:
-                raise NotMonotonicError, NotMonotonic + \
-                    'linear vectors are not contiguous'
+        if allowgaps==0:
+            if linearNode.start-self.start != self.length*delta:
+                raise NotMonotonicError, NotMonotonic + 'linear vectors are not contiguous'
         length = self.length + linearNode.length
         return LinearDataNode(self.start, delta, length)
 
@@ -913,16 +826,12 @@ class LinearDataNode(CdmsNode):
         return self.length
 
 # Rectilinear lat-lon grid
-
-
 class RectGridNode(CdmsNode):
 
     # Create a grid
     # All arguments are strings
-
-    def __init__(self, id, latitude, longitude,
-                 gridtype=UnknownGridType, order="yx", mask=None):
-        CdmsNode.__init__(self, "rectGrid", id)
+    def __init__(self, id, latitude, longitude, gridtype=UnknownGridType, order="yx", mask=None):
+        CdmsNode.__init__(self,"rectGrid",id)
         self.latitude = latitude
         self.longitude = longitude
         self.gridtype = gridtype
@@ -932,20 +841,17 @@ class RectGridNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('type', self.gridtype)
+        self.setExternalAttr('type',self.gridtype)
         self.setExternalAttr('latitude', self.latitude)
-        self.setExternalAttr('longitude', self.longitude)
-        self.setExternalAttr('order', self.order)
-        if self.mask is not None:
-            self.setExternalAttr('mask', self.mask)
+        self.setExternalAttr('longitude',self.longitude)
+        self.setExternalAttr('order',self.order)
+        if self.mask is not None: self.setExternalAttr('mask',self.mask)
 
 # Link to an external element
-
-
 class XLinkNode(CdmsNode):
 
     def __init__(self, id, uri, contentRole, content=''):
-        CdmsNode.__init__(self, "xlink", id)
+        CdmsNode.__init__(self,"xlink",id)
         self.uri = uri
         self.contentRole = contentRole
         self.content = content
@@ -953,39 +859,33 @@ class XLinkNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("href", self.uri, CdString)
-        self.setExternalAttr("content-role", self.contentRole, CdString)
+        self.setExternalAttr("href",self.uri,CdString)
+        self.setExternalAttr("content-role",self.contentRole,CdString)
 
 # Link to a document
-
-
 class DocLinkNode(CdmsNode):
 
     def __init__(self, uri, content=''):
-        CdmsNode.__init__(self, "doclink")
+        CdmsNode.__init__(self,"doclink")
         self.uri = uri
         self.content = content
         DocLinkNode.mapToExternal(self)
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr("href", self.uri, CdString)
+        self.setExternalAttr("href",self.uri,CdString)
 
 # Domain
-
-
 class DomainNode(CdmsNode):
 
     def __init__(self):
-        CdmsNode.__init__(self, "domain")
+        CdmsNode.__init__(self,"domain")
 
 # Domain element
-
-
 class DomElemNode(CdmsNode):
 
     def __init__(self, name, start=None, length=None):
-        CdmsNode.__init__(self, "domElem")
+        CdmsNode.__init__(self,"domElem")
         self.name = name
         self.start = start
         self.length = length
@@ -993,16 +893,14 @@ class DomElemNode(CdmsNode):
 
     # Map to external attributes
     def mapToExternal(self):
-        self.setExternalAttr('name', self.name)
-        if self.start is not None:
-            self.setExternalAttr('start', self.start)
-        if self.length is not None:
-            self.setExternalAttr('length', self.length)
+        self.setExternalAttr('name',self.name)
+        if self.start is not None: self.setExternalAttr('start',self.start)
+        if self.length is not None: self.setExternalAttr('length',self.length)
 
     # Set the name
-    def setName(self, name):
+    def setName(self,name):
         self.name = name
-        self.setExternalAttr('name', self.name)
+        self.setExternalAttr('name',self.name)
 
     # Get the name
     def getName(self):
@@ -1010,84 +908,82 @@ class DomElemNode(CdmsNode):
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self, fd=None, tablevel=0, format=1):
-        if fd is None:
-            fd = sys.stdout
-        if format:
-            fd.write(tablevel * '\t')
-        fd.write('<' + self.tag)
+    def write(self,fd=None,tablevel=0,format=1):
+        if fd is None: fd = sys.stdout
+        if format: fd.write(tablevel*'\t')
+        fd.write('<'+self.tag)
         for attname in self.attribute.keys():
-            (attval, datatype) = self.attribute[attname]
+            (attval,datatype)=self.attribute[attname]
             # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-            attvalstr = _Illegal.sub(
-    mapIllegalToEntity,
-     str(attval))  # Map illegal chars to entities
-            fd.write(' ' + attname + '="' + attvalstr + '"')
+            attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
+            fd.write(' '+attname+'="'+attvalstr+'"')
         fd.write('/>')
-        if format:
-            fd.write('\n')
+        if format: fd.write('\n')
 
 # Attribute node - only used as a placeholder during parse and write
 #   Attr nodes are not placed on the tree
 #
 # Two ways to create an Attr object:
 # (1) attr = AttrNode(name,value)
-# datatype = sometype # optionally, to override intrinsic type
+#     datatype = sometype # optionally, to override intrinsic type
 # (2) attr = AttrNode(name,None)
 #     attr.setValueFromString(somestring,sometype)
-
-
 class AttrNode(CdmsNode):
 
     def __init__(self, name, value=None):
-        CdmsNode.__init__(self, "attr")
-        if not (isinstance(value, (int, numpy.integer, float, numpy.floating, basestring)) or (value is None)):
-            raise CDMSError, 'Invalid attribute type: ' + value
-        self.name=name
-        self.value=value
-        self.datatype=None            # CDMS datatype, use getDatatype to retrieve
-        self.content=''             # string content
+        CdmsNode.__init__(self,"attr")
+        if not (isinstance(value,IntType)
+                or isinstance(value,numpy.integer)
+                or isinstance(value,FloatType)
+                or isinstance(value,numpy.floating)
+                or isinstance(value,StringType)
+                or isinstance(value,NoneType)):
+            raise CDMSError, 'Invalid attribute type: '+`value`
+        self.name = name
+        self.value = value
+        self.datatype = None            # CDMS datatype, use getDatatype to retrieve
+        self.content = ''             # string content
 
     # Note: mapToExternal is not called at init time, must be called explicitly
     #   if needed
     def mapToExternal(self):
-        self.attribute['name']=(self.name, CdString)
-        self.attribute['datatype']=(self.getDatatype(), CdString)
-        self.content=self.getValueAsString()
+        self.attribute['name']=(self.name,CdString)
+        self.attribute['datatype']=(self.getDatatype(),CdString)
+        self.content = self.getValueAsString()
 
     def getDatatype(self):
         if self.datatype:
             return self.datatype
-        elif isinstance(self.value, basestring):
+        elif type(self.value) is StringType:
             return CdString
-        elif isinstance(self.value, (float, numpy.floating)):
+        elif isinstance(self.value, FloatType) or isinstance(self.value,numpy.floating):
             return CdDouble
-        elif isinstance(self.value, (int, numpy.integer)):
+        elif isinstance(self.value, IntType) or isinstance(self.value,numpy.integer):
             return CdLong
         else:
-            raise CDMSError, 'Invalid attribute type: ' + `self.value`
+            raise CDMSError, 'Invalid attribute type: '+`self.value`
 
     def getLength(self):
         return 1
 
     # Map a string of a given datatype to a value
     #   Returns ValueError if the conversion fails
-    def setValueFromString(self, valString, datatype):
-        val=None
-        if not isinstance(valString, basestring):
+    def setValueFromString(self,valString,datatype):
+        val = None
+        if type(valString) is not StringType:
             raise CDMSError, 'input value is not a string'
         if datatype == CdString:
             val=valString
-        elif datatype in (CdShort, CdInt, CdLong):
+        elif datatype in (CdShort,CdInt,CdLong):
             try:
-                val=int(valString)
+                val=string.atoi(valString)
             except ValueError:
-                raise CDMSError, 'value is not an integer: ' + valString
-        elif datatype in (CdFloat, CdDouble):
+                raise CDMSError, 'value is not an integer: '+valString
+        elif datatype in (CdFloat,CdDouble):
             try:
-                val=float(valString)
+                val=string.atof(valString)
             except ValueError:
-                raise CDMSError, 'value is not floating-point: ' + valString
+                raise CDMSError, 'value is not floating-point: '+valString
         self.value=val
         self.datatype=datatype
         return val
@@ -1097,40 +993,36 @@ class AttrNode(CdmsNode):
 
     # Set content
     # This may be called multiple times, so append
-    def setContentFromString(self, content):
-        self.content=self.content + content
+    def setContentFromString(self,content):
+        self.content = self.content+content
 
     # Write to a file, with formatting.
     # tablevel is the start number of tabs
-    def write(self, fd=None, tablevel=0, format=1):
-        if fd is None: fd=sys.stdout
+    def write(self,fd=None,tablevel=0,format=1):
+        if fd is None: fd = sys.stdout
         if self.dtd:
-            validAttrs=self.dtd.keys()
+            validAttrs = self.dtd.keys()
         else:
-            validAttrs=None
+            validAttrs = None
 
-        if format: fd.write(tablevel * '\t')
-        fd.write('<' + self.tag)
+        if format: fd.write(tablevel*'\t')
+        fd.write('<'+self.tag)
 
         # Write valid attributes
         for attname in self.attribute.keys():
             if (validAttrs and (attname in validAttrs)) or (not validAttrs):
-                (attval, datatype)=self.attribute[attname]
+                (attval,datatype)=self.attribute[attname]
                 # attvalstr = string.replace(str(attval),'"',"'") # Map " to '
-                attvalstr=_Illegal.sub(
-    mapIllegalToEntity,
-     str(attval))  # Map illegal chars to entities
-                fd.write(' ' + attname + '="' + attvalstr + '"')
+                attvalstr = _Illegal.sub(mapIllegalToEntity,str(attval))  # Map illegal chars to entities
+                fd.write(' '+attname+'="'+attvalstr+'"')
         fd.write('>')
 
         # Write content
         if self.content is not None:
-            content=_Illegal.sub(
-    mapIllegalToEntity,
-     self.content)  # Map illegal chars to entities
+            content = _Illegal.sub(mapIllegalToEntity,self.content)  # Map illegal chars to entities
             fd.write(content)
 
-        fd.write('</' + self.tag + '>')
+        fd.write('</'+self.tag+'>')
         if format: fd.write('\n')
 
 if __name__ == '__main__':
@@ -1163,7 +1055,7 @@ if __name__ == '__main__':
     # hAxis = AxisNode('h',len(h),CdDouble,h)
     # jAxis = AxisNode('j',len(j),CdDouble); jAxis.setLinearData(j)
     # kAxis = AxisNode('k',len(k),CdDouble,k)
-
+    
     # print aAxis.monotonicity()
     # print hAxis.monotonicity()
     # print kAxis.monotonicity()
@@ -1171,29 +1063,29 @@ if __name__ == '__main__':
     # print dAxis.monotonicity()
     # print jAxis.monotonicity()
 
-    m=LinearDataNode(1, 2, 3)
-    n=LinearDataNode(11, 2, 3)
-    p=LinearDataNode(15, -4, 3)
-    q=numpy.array([4., 2., 1.])
-    r=numpy.array([11., 9., 8.])
-    s=numpy.array([7.])
-    t=numpy.array([9.])
-    v=numpy.array([5.])
-
-    mAxis=AxisNode('m', len(m), CdLong); mAxis.setLinearData(m)
-    nAxis=AxisNode('n', len(n), CdLong); nAxis.setLinearData(n)
-    pAxis=AxisNode('p', len(p), CdLong); pAxis.setLinearData(p)
-    qAxis=AxisNode('q', len(q), CdDouble, q)
-    rAxis=AxisNode('r', len(r), CdDouble, r)
-    sAxis=AxisNode('s', len(s), CdDouble, s)
-    tAxis=AxisNode('t', len(t), CdDouble, t)
-    vAxis=AxisNode('v', len(v), CdDouble, v)
+    m = LinearDataNode(1,2,3)
+    n = LinearDataNode(11,2,3)
+    p = LinearDataNode(15,-4,3)
+    q = numpy.array([4.,2.,1.])
+    r = numpy.array([11.,9.,8.])
+    s = numpy.array([7.])
+    t = numpy.array([9.])
+    v = numpy.array([5.])
+
+    mAxis = AxisNode('m',len(m),CdLong); mAxis.setLinearData(m)
+    nAxis = AxisNode('n',len(n),CdLong); nAxis.setLinearData(n)
+    pAxis = AxisNode('p',len(p),CdLong); pAxis.setLinearData(p)
+    qAxis = AxisNode('q',len(q),CdDouble,q)
+    rAxis = AxisNode('r',len(r),CdDouble,r)
+    sAxis = AxisNode('s',len(s),CdDouble,s)
+    tAxis = AxisNode('t',len(t),CdDouble,t)
+    vAxis = AxisNode('v',len(v),CdDouble,v)
 
     def printType(axis):
-        if axis.dataRepresent == CdLinear: print 'linear'
+        if axis.dataRepresent==CdLinear: print 'linear'
         else: print 'vector'
-
-    def testit(a, b):
+        
+    def testit(a,b):
         import copy
         x=copy.copy(a)
         print x.extend(b).getData()
@@ -1209,6 +1101,6 @@ if __name__ == '__main__':
     # testit(mAxis,rAxis)
     # testit(vAxis,nAxis)
     # testit(sAxis,rAxis)
-
+    
     # Errors:
     # testit(mAxis,nAxis)
diff --git a/Packages/cdms2/Lib/cdmsURLopener.py b/Packages/cdms2/Lib/cdmsURLopener.py
index e0b710131..f401bf8d5 100644
--- a/Packages/cdms2/Lib/cdmsURLopener.py
+++ b/Packages/cdms2/Lib/cdmsURLopener.py
@@ -3,16 +3,15 @@
 
 import urllib
 
-
 class CDMSURLopener(urllib.FancyURLopener):
 
-    # Override FancyURLopener error handling - raise an exception
-    # Can also define function http_error_DDD where DDD is the 3-digit error code,
-    # to handle specific errors.
-
-    def http_error_default(self, url, fp, errcode, errmsg, headers):
-        void = fp.read()
-        fp.close()
-        raise IOError('http error', errcode, errmsg, headers)
+	# Override FancyURLopener error handling - raise an exception
+        # Can also define function http_error_DDD where DDD is the 3-digit error code,
+        # to handle specific errors.
+	def http_error_default(self, url, fp, errcode, errmsg, headers):
+		void = fp.read()
+		fp.close()
+		raise IOError, ('http error', errcode, errmsg, headers)
 
 urllib._urlopener = CDMSURLopener()
+
diff --git a/Packages/cdms2/Lib/cdmsobj.py b/Packages/cdms2/Lib/cdmsobj.py
index 30961e26b..14c2d9bf0 100644
--- a/Packages/cdms2/Lib/cdmsobj.py
+++ b/Packages/cdms2/Lib/cdmsobj.py
@@ -2,12 +2,15 @@
 CDMS module-level functions and definitions
 """
 
-from . import cdmsNode
+import cdmsNode
 import cdtime
 import glob
 import os
 import re
+import string
 import sys
+import types
+#import internattr
 
 # Data types
 
@@ -26,7 +29,7 @@ CdArray = cdmsNode.CdArray
 
 Unlimited = 1                           # Unlimited axis designator
 
-Max32int = 2**31 - 1                      # Maximum 32-bit integer
+Max32int = 2**31-1                      # Maximum 32-bit integer
 
 # Regular expressions for each template specifier
 _Daynum = '[0-3][0-9]'
@@ -41,7 +44,7 @@ _Percent = '%'
 _Second = '[0-5][0-9]'
 _Year2 = '[0-9][0-9]'
 _Year4 = '[0-9]{4,4}'
-_Zulu = _Hour + '[z|Z]' + _Year4 + _Monthnum + _Daynum
+_Zulu = _Hour+'[z|Z]'+_Year4+_Monthnum+_Daynum
 
 # Positions for time lists
 _yr = 0
@@ -93,67 +96,42 @@ _specifierMap = {
     '%v': (_Name, 'name', 'var', None),
     '%y': (_Year2, 'year2', 'time', _yr),
     '%z': (_Zulu, 'zulu', 'time', None),
-}
-
-_monthListUpper = [
-    'JAN',
-     'FEB',
-     'MAR',
-     'APR',
-     'MAY',
-     'JUN',
-     'JUL',
-     'AUG',
-     'SEP',
-     'OCT',
-     'NOV',
-     'DEC']
+    }
+
+_monthListUpper = ['JAN','FEB','MAR','APR','MAY','JUN','JUL','AUG','SEP','OCT','NOV','DEC']
 _monthMapUpper = {
-    'JAN': 1,
-    'FEB': 2,
-    'MAR': 3,
-    'APR': 4,
-    'MAY': 5,
-    'JUN': 6,
-    'JUL': 7,
-    'AUG': 8,
-    'SEP': 9,
-    'OCT': 10,
-    'NOV': 11,
-    'DEC': 12,
-}
-
-_monthListLower = [
-    'jan',
-     'feb',
-     'mar',
-     'apr',
-     'may',
-     'jun',
-     'jul',
-     'aug',
-     'sep',
-     'oct',
-     'nov',
-     'dec']
+    'JAN':1,
+    'FEB':2,
+    'MAR':3,
+    'APR':4,
+    'MAY':5,
+    'JUN':6,
+    'JUL':7,
+    'AUG':8,
+    'SEP':9,
+    'OCT':10,
+    'NOV':11,
+    'DEC':12,
+    }
+
+_monthListLower = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']
 _monthMapLower = {
-    'jan': 1,
-    'feb': 2,
-    'mar': 3,
-    'apr': 4,
-    'may': 5,
-    'jun': 6,
-    'jul': 7,
-    'aug': 8,
-    'sep': 9,
-    'oct': 10,
-    'nov': 11,
-    'dec': 12,
-}
-
-_specre = re.compile(
-    '(%%|%G|%H|%L|%M|%S|%Y|%d|%eG|%eH|%eL|%eM|%eS|%eY|%ed|%ef|%eg|%eh|%em|%en|%ey|%ez|%f|%g|%h|%m|%n|%v|%y|%z)')
-_filere = re.compile('[^' + os.sep + ']+')
+    'jan':1,
+    'feb':2,
+    'mar':3,
+    'apr':4,
+    'may':5,
+    'jun':6,
+    'jul':7,
+    'aug':8,
+    'sep':9,
+    'oct':10,
+    'nov':11,
+    'dec':12,
+    }
+
+_specre = re.compile('(%%|%G|%H|%L|%M|%S|%Y|%d|%eG|%eH|%eL|%eM|%eS|%eY|%ed|%ef|%eg|%eh|%em|%en|%ey|%ez|%f|%g|%h|%m|%n|%v|%y|%z)')
+_filere = re.compile('[^'+os.sep+']+')
 
 _debug = 0                              # Print debug info
 
@@ -162,14 +140,12 @@ AbsoluteTemplate = "Template must be a relative path: "
 # Module-level functions
 
 # Set debug mode, to 'on' or 'off'
-
-
 def setDebugMode(mode):
     global _debug
-    if mode == 'on':
-        _debug = 1
-    elif mode == 'off':
-        _debug = 0
+    if mode=='on':
+        _debug=1
+    elif mode=='off':
+        _debug=0
 
 # Map a template to a regular expression
 # Returns (regex,dimtypes), where regex is the regular expression
@@ -178,226 +154,206 @@ def setDebugMode(mode):
 # where specStrings is the specifier associated with the dimension type,
 # or for time, the list of specifiers in the order (yr,mo,dy,hr,mi,se)
 # where each element is the specifier for that time element
-
-
 def templateToRegex(template):
 
     matchspec = {}
-    dimtypes = {'var': None,
-                'time': [None, None, None, None, None, None],
-                'etime': [None, None, None, None, None, None],
-                'level': None,
-                'elevel': None,
+    dimtypes = {'var':None,
+                'time':[None,None,None,None,None,None],
+                'etime':[None,None,None,None,None,None],
+                'level':None,
+                'elevel':None,
                 }
 
     # Map spec to regex
     # (The default arg bring matchspec and dimtypes into the local scope)
     def retspec(matchobj, matchspec=matchspec, dimtypes=dimtypes):
         spec = matchobj.group(0)
-        pat, name, dimtype, pos = _specifierMap[spec]
+        pat,name,dimtype,pos = _specifierMap[spec]
 
-        if dimtype == 'var':
-            dimtypes['var'] = spec
-        elif dimtype in ('time', 'etime'):
+        if dimtype=='var':
+            dimtypes['var']=spec
+        elif dimtype in ('time','etime'):
             if pos is not None:
-                dimtypes[dimtype][pos] = spec
-            elif name in ('zulu', 'ezulu'):
-                pass  # Crack Zulu time
-        elif dimtype in ('level', 'elevel'):
-            dimtypes[dimtype] = spec
-
-        if spec in matchspec:
-            return '(?P=' + name + ')'
+                dimtypes[dimtype][pos]=spec
+            elif name in ('zulu','ezulu'):
+                pass # Crack Zulu time
+        elif dimtype in ('level','elevel'):
+            dimtypes[dimtype]=spec
+
+        if matchspec.has_key(spec):
+            return '(?P='+name+')'
         else:
             matchspec[spec] = 1
-            return '(?P<' + name + '>' + pat + ')'
-
-    templatere = _specre.sub(retspec, template)
-    return (templatere, dimtypes)
-
+            return '(?P<'+name+'>'+pat+')'
+        
+    templatere = _specre.sub(retspec,template)
+    return (templatere,dimtypes)
 
 def retglob(matchobj):
     return '*'
 
 # Generate a component time from a matchobj and list of specs
-
-
-def generateTime(matchobj, timespecs):
+def generateTime(matchobj,timespecs):
     iyr = 0
     imo = 1
     idy = 1
     ihr = 0
     imi = 0
     ise = 0
-    yrspec, mospec, dyspec, hrspec, mispec, sespec = timespecs
+    yrspec,mospec,dyspec,hrspec,mispec,sespec = timespecs
     if yrspec:
-        pat, name, dimtype, pos = _specifierMap[yrspec]
+        pat,name,dimtype,pos = _specifierMap[yrspec]
         yrstr = matchobj.group(name)
-        iyr = int(yrstr)
+        iyr = string.atoi(yrstr)
 
         # Map 2-digit year to [1950,2049)
-        if yrspec in ('%y', '%ey'):
-            if iyr < 50:
-                iyr = iyr + 2000
+        if yrspec in ('%y','%ey'):
+            if iyr<50:
+                iyr = iyr+2000
             else:
-                iyr = iyr + 1900
+                iyr = iyr+1900
     if mospec:
-        pat, name, dimtype, pos = _specifierMap[mospec]
+        pat,name,dimtype,pos = _specifierMap[mospec]
         mostr = matchobj.group(name)
-        if mospec in ('%G', '%eG'):
+        if mospec in ('%G','%eG'):
             imo = _monthMapUpper[mostr]
-        elif mospec in ('%g', '%eg'):
+        elif mospec in ('%g','%eg'):
             imo = _monthMapLower[mostr]
-        elif mospec in ('%m', '%em', '%n', '%en'):
-            imo = int(mostr)
+        elif mospec in ('%m','%em','%n','%en'):
+            imo = string.atoi(mostr)
     if dyspec:
-        pat, name, dimtype, pos = _specifierMap[dyspec]
+        pat,name,dimtype,pos = _specifierMap[dyspec]
         dystr = matchobj.group(name)
-        idy = int(dystr)
+        idy = string.atoi(dystr)
     if hrspec:
-        pat, name, dimtype, pos = _specifierMap[hrspec]
+        pat,name,dimtype,pos = _specifierMap[hrspec]
         hrstr = matchobj.group(name)
-        ihr = int(hrstr)
+        ihr = string.atoi(hrstr)
     if mispec:
-        pat, name, dimtype, pos = _specifierMap[mispec]
+        pat,name,dimtype,pos = _specifierMap[mispec]
         mistr = matchobj.group(name)
-        imi = int(mistr)
+        imi = string.atoi(mistr)
     if sespec:
-        pat, name, dimtype, pos = _specifierMap[sespec]
+        pat,name,dimtype,pos = _specifierMap[sespec]
         sestr = matchobj.group(name)
-        ise = int(sestr)
-    return cdtime.comptime(iyr, imo, idy, ihr, imi, ise)
+        ise = string.atoi(sestr)
+    return cdtime.comptime(iyr,imo,idy,ihr,imi,ise)   
 
 # Find all files in 'direc' which match 'template'.
 # template is a relative path, and may contain specifiers
 # in directory names. Returns a list [(f,m),..,(f,m)] where
 # f is a matching file name, and m is a list [var,time,etime,level,elevel]
 # of matching values in f. Any or all elems of the list may be None.
-
-
-def matchingFiles(direc, template):
+def matchingFiles(direc,template):
 
     if os.path.isabs(template):
-        raise AbsoluteTemplate(template)
+        raise AbsoluteTemplate, template
 
     # Generate a glob pattern
     normTemplate = os.path.normpath(template)
-    globPattern = _filere.sub(retglob, normTemplate)
+    globPattern = _filere.sub(retglob,normTemplate)
 
     # Map the template to a regular expression
-    templatere, dimtypes = templateToRegex(template)
+    templatere,dimtypes = templateToRegex(template)
     ctre = re.compile(templatere)
 
     # Get a list of candidate files
     try:
         os.chdir(direc)
     except os.error:
-        raise IOError('%s: %s' % (sys.exc_info()[1], direc))
-
+        raise IOError,'%s: %s'%(sys.exc_value,direc)
+        
     candlist = glob.glob(globPattern)
 
-    # Look for matches
+    # Look for matches 
     matchfiles = []
     for candfile in candlist:
         matchobj = ctre.match(candfile)
 
         # Create matching values
-        if matchobj is None:
-            continue
-        matchnames = [None, None, None, None, None]
+        if matchobj is None: continue
+        matchnames = [None,None,None,None,None]
         if dimtypes['var'] is not None:
             matchnames[_var] = matchobj.group('name')
-        if dimtypes['time'] != [None, None, None, None, None, None]:
-            matchnames[_time] = generateTime(matchobj, dimtypes['time'])
-        if dimtypes['etime'] != [None, None, None, None, None, None]:
-            matchnames[_etime] = generateTime(matchobj, dimtypes['etime'])
+        if dimtypes['time'] != [None,None,None,None,None,None]:
+            matchnames[_time] = generateTime(matchobj,dimtypes['time'])
+        if dimtypes['etime'] != [None,None,None,None,None,None]:
+            matchnames[_etime] = generateTime(matchobj,dimtypes['etime'])
         if dimtypes['level'] is not None:
-            matchnames[_level] = int(matchobj.group('level'))
+            matchnames[_level] = string.atoi(matchobj.group('level'))
         if dimtypes['elevel'] is not None:
-            matchnames[_elevel] = int(matchobj.group('elevel'))
-        matchfiles.append((candfile, matchnames))
+            matchnames[_elevel] = string.atoi(matchobj.group('elevel'))
+        matchfiles.append((candfile,matchnames))
 
     return matchfiles
 
 # Get a string time component from a spec and a component time
+def getTimeAsString(spec,time):
 
-
-def getTimeAsString(spec, time):
-
-    if spec in ('%G', '%eG'):
+    if spec in ('%G','%eG'):
         imo = time.month
-        specstr = _monthListUpper[imo - 1]
-    elif spec in ('%H', '%eH'):
+        specstr = _monthListUpper[imo-1]
+    elif spec in ('%H','%eH'):
         specstr = str(time.hour)
-    elif spec in ('%M', '%eM'):
+    elif spec in ('%M','%eM'):
         specstr = str(time.minute)
-    elif spec in ('%S', '%eS'):
+    elif spec in ('%S','%eS'):
         specstr = str(int(time.second))
-    elif spec in ('%Y', '%eY'):
-        specstr = str(time.year).zfill(4)
-    elif spec in ('%d', '%ed'):
+    elif spec in ('%Y','%eY'):
+        specstr = string.zfill(str(time.year),4)
+    elif spec in ('%d','%ed'):
         specstr = str(time.day)
-    elif spec in ('%f', '%ef'):
-        specstr = str(time.day).zfill(2)
-    elif spec in ('%g', '%eg'):
+    elif spec in ('%f','%ef'):
+        specstr = string.zfill(str(time.day),2)
+    elif spec in ('%g','%eg'):
         imo = time.month
-        specstr = _monthListLower[imo - 1]
-    elif spec in ('%h', '%eh'):
-        specstr = str(time.hour).zfill(2)
-    elif spec in ('%m', '%em'):
+        specstr = _monthListLower[imo-1]
+    elif spec in ('%h','%eh'):
+        specstr = string.zfill(str(time.hour),2)
+    elif spec in ('%m','%em'):
         specstr = str(time.month)
-    elif spec in ('%n', '%en'):
-        specstr = str(time.month).zfill(2)
-    elif spec in ('%y', '%ey'):
-        specstr = str(time.year % 100).zfill(2)
-    elif spec in ('%z', '%ez'):
-        specstr = getTimeAsString(
-            '%H',
-            time) + 'Z' + getTimeAsString(
-                '%Y',
-                time) + getTimeAsString(
-            '%n',
-             time) + getTimeAsString(
-                '%d',
-                 time)
+    elif spec in ('%n','%en'):
+        specstr = string.zfill(str(time.month),2)
+    elif spec in ('%y','%ey'):
+        specstr = string.zfill(str(time.year%100),2)
+    elif spec in ('%z','%ez'):
+        specstr = getTimeAsString('%H',time)+'Z'+getTimeAsString('%Y',time)+getTimeAsString('%n',time)+getTimeAsString('%d',time)
     return specstr
 
 # Generate a file path, given a template and matchname list.
 # matchnames is a list [varname,time,etime,level,elevel], where
 # any or all elems may be None.  If matchnames be a longer list,
 # it is not an error but the additional elements are ignored.
-
-
-def getPathFromTemplate(template, matchnames):
+def getPathFromTemplate(template,matchnames):
 
     # Map spec to value string
     # (Default arg brings matchnames into the local scope)
     def retpath(matchobj, matchnames=matchnames):
         spec = matchobj.group(0)
-        pat, name, dimtype, pos = _specifierMap[spec]
-        var, time, etime, level, elevel = matchnames[0:5]
+        pat,name,dimtype,pos = _specifierMap[spec]
+        var,time,etime,level,elevel = matchnames[0:5]
 
-        if dimtype == 'var':
+        if dimtype=='var':
             if var is None:
                 specstr = spec
             else:
                 specstr = var
-        elif dimtype == 'time':
+        elif dimtype=='time':
             if time is None:
                 specstr = spec
             else:
-                specstr = getTimeAsString(spec, time)
-        elif dimtype == 'etime':
+                specstr = getTimeAsString(spec,time)
+        elif dimtype=='etime':
             if etime is None:
                 specstr = spec
             else:
-                specstr = getTimeAsString(spec, etime)
-        elif dimtype == 'level':
+                specstr = getTimeAsString(spec,etime)
+        elif dimtype=='level':
             if level is None:
                 specstr = spec
             else:
                 specstr = str(level)
-        elif dimtype == 'elevel':
+        elif dimtype=='elevel':
             if elevel is None:
                 specstr = spec
             else:
@@ -405,7 +361,7 @@ def getPathFromTemplate(template, matchnames):
 
         return specstr
 
-    path = _specre.sub(retpath, template)
+    path = _specre.sub(retpath,template)
     return path
 
 # Search an object or list of objects for a string attribute which
@@ -416,17 +372,16 @@ def getPathFromTemplate(template, matchnames):
 # 'rectGrid','xlink', or None.  If tag is None, all object classes are
 # searched.  If attribute is None, all attributes are searched.
 
-
 def searchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
-        tag = tag.lower()
+        tag = string.lower(tag)
     regexp = re.compile(pattern)
-    if not isinstance(objlist, list):
+    if type(objlist) is not types.ListType:
         objlist = [objlist]
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.searchPattern(regexp, attribute, tag)
+        returnlist = returnlist + obj.searchPattern(regexp,attribute,tag)
 
     return returnlist
 
@@ -438,17 +393,16 @@ def searchPattern(objlist, pattern, attribute=None, tag=None):
 # 'rectGrid','xlink', or None.  If tag is None, all object classes are
 # searched.  If attribute is None, all attributes are searched.
 
-
 def matchPattern(objlist, pattern, attribute=None, tag=None):
     if tag is not None:
-        tag = tag.lower()
+        tag = string.lower(tag)
     regexp = re.compile(pattern)
-    if not isinstance(objlist, list):
+    if type(objlist) is not types.ListType:
         objlist = [objlist]
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.matchPattern(regexp, attribute, tag)
+        returnlist = returnlist + obj.matchPattern(regexp,attribute,tag)
 
     return returnlist
 
@@ -467,19 +421,17 @@ def matchPattern(objlist, pattern, attribute=None, tag=None):
 #
 #   lambda obj: obj.partition_length > 1000
 #
-# is sufficient, it is not necessary to test for the existence of the
-# attribute.
-
+# is sufficient, it is not necessary to test for the existence of the attribute.
 
 def searchPredicate(objlist, predicate, tag=None):
     if tag is not None:
-        tag = tag.lower()
-    if not isinstance(objlist, list):
+        tag = string.lower(tag)
+    if type(objlist) is not types.ListType:
         objlist = [objlist]
 
     returnlist = []
     for obj in objlist:
-        returnlist = returnlist + obj.searchPredicate(predicate, tag)
+        returnlist = returnlist + obj.searchPredicate(predicate,tag)
 
     return returnlist
 
@@ -487,80 +439,74 @@ def searchPredicate(objlist, predicate, tag=None):
 # Classes
 
 # Generic CDMS object has a tree node, attributes
-
-
 class CdmsObj (object):
-# def __setattr__(self,name,value):
-# object.__setattr__(self,name,value)
-# if not name in self.__cdms_internals__ and not name[0]=='_':
-# self.attributes[name]=value
-# if name == 'shape' :
-# print self.__class__,name,value
+##     def __setattr__(self,name,value):
+##         object.__setattr__(self,name,value)
+##         if not name in self.__cdms_internals__ and not name[0]=='_':
+##             self.attributes[name]=value
+## ##             if name == 'shape' :
+## ##                 print self.__class__,name,value
 
     def _listatts(self):
-        dic = {}
-        for nm, val in self.__dict__.items():
-            if (nm[0] != '_' and not nm in self.__cdms_internals__) or nm in ['_FillValue']:
-                dic[nm] = val
+        dic={}
+        for nm,val in self.__dict__.items():
+            if (nm[0]!='_' and not nm in self.__cdms_internals__) or nm in ['_FillValue']:
+                dic[nm]=val
             if nm == '_units':
-                dic['units'] = val
+                dic['units']=val
         return dic
-
-    def _setatts(self, value):
+    def _setatts(self,value):
         return
 
-    attributes = property(_listatts, _setatts)
-
-    def __init__(self, node=None):
-        if not hasattr(self, '___cdms_internals__'):
-            self.__dict__['___cdms_internals__'] = [
-                '__cdms_internals__', '___cdms_internals__',
-                '_node_', '_obj_',
-                '_numericType_', '_grid_', '_bounds_',
-                'parent', 'attributes', 'shape', 'autoApiInfo']
-        self.attributes = {}
+    attributes = property(_listatts,_setatts)
+    
+        
+    def __init__(self, node = None):
+        if not hasattr(self,'___cdms_internals__'):
+            self.__dict__['___cdms_internals__']=[
+                '__cdms_internals__','___cdms_internals__',
+                '_node_','_obj_',
+                '_numericType_','_grid_','_bounds_',
+                'parent','attributes','shape','autoApiInfo']
+        self.attributes={}
         self._node_ = node
         if node is not None:
-            # Build an attribute dictionary from the node,
+            # Build an attribute dictionary from the node, 
             # CDML datatype constraints
 
-            if hasattr(node, 'datatype'):
+            if hasattr(node,'datatype'):
                 parenttype = node.datatype
             else:
                 parenttype = None
             atts = node.getExternalDict()
             adict = self.__dict__
             for attname in atts.keys():
-                (attval, datatype) = atts[attname]  # (XML value, datatype)
+                (attval,datatype)=atts[attname] # (XML value, datatype)
                 constraint = node.extra.get(attname)
                 if constraint is not None:
-                    (scaletype,
-                     reqtype) = constraint  # (CdScalar|CdArray, required type)
-                    if reqtype == CdFromObject:
+                    (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
+                    if reqtype==CdFromObject:
                         reqtype = parenttype
-                    if reqtype != datatype and datatype == CdString and scaletype == CdScalar:
-                        if reqtype in (CdFloat, CdDouble):
+                    if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
+                        if reqtype in (CdFloat,CdDouble):
                             try:
-                                attval = float(attval)
+                                attval = string.atof(attval)
                             except:
-                                raise RuntimeError(
-                                    "%s=%s must be a float" %
-                                    (attname, attval))
-                        elif reqtype in (CdShort, CdInt, CdLong, CdInt64):
+                                raise RuntimeError,"%s=%s must be a float"%(attname,attval)
+                        elif reqtype in (CdShort,CdInt,CdLong,CdInt64):
                             try:
-                                attval = int(attval)
+                                attval = string.atoi(attval)
                             except:
-                                raise RuntimeError(
-                                    "%s=%s must be an integer" %
-                                    (attname, attval))
+                                raise RuntimeError,"%s=%s must be an integer"%(attname,attval)
                 adict[attname] = attval
                 self.attributes[attname] = attval
 
+
     def searchone(self, pattern, attname):
         """Return true if the attribute with name attname is a string
         attribute which contains the compiled regular expression pattern, or
         if attname is None and pattern matches at least one string
-        attribute. Return false if the attribute is not found or is not
+        attribute. Return false if the attribute is not found or is not 
         a string.
         :::
         Input:::
@@ -573,12 +519,12 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if isinstance(attval, basestring) and pattern.search(attval) is not None:
+                if type(attval) is types.StringType and pattern.search(attval) is not None:
                     return 1
             return 0
-        elif attname in self.attributes:
+        elif self.attributes.has_key(attname):
             attval = self.attributes[attname]
-            return isinstance(attval, basestring) and pattern.search(attval) is not None
+            return (type(attval) is types.StringType and pattern.search(attval) is not None)
         else:
             return 0
 
@@ -603,19 +549,18 @@ class CdmsObj (object):
         """
         if attname is None:
             for attval in self.attributes.values():
-                if isinstance(attval, basestring) and pattern.match(attval) is not None:
+                if type(attval) is types.StringType and pattern.match(attval) is not None:
                     return 1
             return 0
-        elif attname in self.attributes:
+        elif self.attributes.has_key(attname):
             attval = self.attributes[attname]
-            return isinstance(attval, basestring) and pattern.match(attval) is not None
+            return (type(attval) is types.StringType and pattern.match(attval) is not None)
         else:
             return 0
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
-    # search all string attributes. If tag is not None, it must match the
-    # internal node tag.
-    def searchPattern(self, pattern, attribute, tag):
+    # search all string attributes. If tag is not None, it must match the internal node tag.
+    def searchPattern(self,pattern,attribute,tag):
         """
         Search for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -625,11 +570,11 @@ class CdmsObj (object):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
-        if tag is None or tag.lower() == self._node_.tag:
-            if self.searchone(pattern, attribute):
+        if tag is None or string.lower(tag)==self._node_.tag:
+            if self.searchone(pattern,attribute):
                 return [self]
             else:
                 return []
@@ -637,9 +582,8 @@ class CdmsObj (object):
             return []
 
     # Match a pattern in a string-valued attribute. If attribute is None,
-    # search all string attributes. If tag is not None, it must match the
-    # internal node tag.
-    def matchPattern(self, pattern, attribute, tag):
+    # search all string attributes. If tag is not None, it must match the internal node tag.
+    def matchPattern(self,pattern,attribute,tag):
         """
         Match for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -649,11 +593,11 @@ class CdmsObj (object):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
-        if tag is None or tag.lower() == self._node_.tag:
-            if self.matchone(pattern, attribute):
+        if tag is None or string.lower(tag)==self._node_.tag:
+            if self.matchone(pattern,attribute):
                 return [self]
             else:
                 return []
@@ -663,7 +607,7 @@ class CdmsObj (object):
     # Apply a truth-valued predicate. Return a list containing a single instance: [self]
     # if the predicate is true and either tag is None or matches the object node tag.
     # If the predicate returns false, return an empty list
-    def searchPredicate(self, predicate, tag):
+    def searchPredicate(self,predicate,tag):
         """
         Apply a truth-valued predicate. Return a list containing a single instance: [self] if the predicate is true and either tag is None or matches the object node tag. If the predicate returns false, return an empty list
         :::
@@ -672,12 +616,12 @@ class CdmsObj (object):
         tag :: (str/None) (1) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
-        if tag is None or tag.lower() == self._node_.tag:
+        if tag is None or string.lower(tag)==self._node_.tag:
             try:
-                if predicate(*(self,)) == 1:
+                if apply(predicate,(self,))==1:
                     result = [self]
             except:
                 result = []
@@ -685,7 +629,7 @@ class CdmsObj (object):
             result = []
         return result
 
-    def dump(self, path=None, format=1):
+    def dump(self,path=None,format=1):
         """ dump(self,path=None,format=1)
         Dump an XML representation of this object to a file.
         'path' is the result file name, None for standard output.
@@ -700,21 +644,24 @@ class CdmsObj (object):
         :::
         """
         if self._node_ is None:
-            raise CDMSError("No tree node found")
-        self._node_.dump(path, format)
+            raise CDMSError, "No tree node found"
+        self._node_.dump(path,format)
 
     def _getinternals(self):
         return self.___cdms_internals__
-
-    def _setinternals(self, value):
+    def _setinternals(self,value):
         self.___cdms_internals__ = value
-    __cdms_internals__ = property(_getinternals, _setinternals)
-# internattr.add_internal_attribute(CdmsObj)
+    __cdms_internals__ = property(_getinternals,_setinternals)
+#internattr.add_internal_attribute(CdmsObj)
 
 if __name__ == '__main__':
     x = CdmsObj(None)
     x.someatt = 1
     assert x.attributes['someatt'] == x.someatt
-    assert '_node' not in x.attributes
+    assert not x.attributes.has_key('_node')
     # need tests for the search routines...
     print "Test passed."
+
+
+
+
diff --git a/Packages/cdms2/Lib/cdurllib.py b/Packages/cdms2/Lib/cdurllib.py
index eefccb114..6ae1848b3 100644
--- a/Packages/cdms2/Lib/cdurllib.py
+++ b/Packages/cdms2/Lib/cdurllib.py
@@ -1,18 +1,13 @@
 """Customized URLopener"""
 
-import urllib
-import getpass
-import socket
-import string
-import sys
+import urllib, getpass, socket, string, sys
 
 MAXFTPCACHE = 10        # Trim the ftp cache beyond this size
 
-
 class CDURLopener(urllib.URLopener):
 
     def __init__(self, proxies=None):
-        urllib.URLopener.__init__(self, proxies)
+        urllib.URLopener.__init__(self,proxies)
         self._userObject = None
 
     # Attach an object to be returned with callbacks
@@ -22,15 +17,12 @@ class CDURLopener(urllib.URLopener):
     # Use FTP protocol
     def open_ftp(self, url):
         host, path = urllib.splithost(url)
-        if not host:
-            raise IOError, ('ftp error', 'no host given')
+        if not host: raise IOError, ('ftp error', 'no host given')
         host, port = urllib.splitport(host)
         user, host = urllib.splituser(host)
         # if user: user, passwd = splitpasswd(user)
-        if user:
-            passwd = getpass.getpass()
-        else:
-            passwd = None
+        if user: passwd = getpass.getpass()
+        else: passwd = None
         host = urllib.unquote(host)
         user = urllib.unquote(user or '')
         passwd = urllib.unquote(passwd or '')
@@ -44,8 +36,7 @@ class CDURLopener(urllib.URLopener):
         path = urllib.unquote(path)
         dirs = string.splitfields(path, '/')
         dirs, file = dirs[:-1], dirs[-1]
-        if dirs and not dirs[0]:
-            dirs = dirs[1:]
+        if dirs and not dirs[0]: dirs = dirs[1:]
         key = (user, host, port, string.joinfields(dirs, '/'))
         # XXX thread unsafe!
         if len(self.ftpcache) > MAXFTPCACHE:
@@ -56,14 +47,12 @@ class CDURLopener(urllib.URLopener):
                     del self.ftpcache[k]
                     v.close()
         try:
-            if key not in self.ftpcache:
-                print 'Creating ftpwrapper: ', user, host, port, dirs
+            if not self.ftpcache.has_key(key):
+                print 'Creating ftpwrapper: ',user,host,port,dirs
                 self.ftpcache[key] = \
                     urllib.ftpwrapper(user, passwd, host, port, dirs)
-            if not file:
-                type = 'D'
-            else:
-                type = 'I'
+            if not file: type = 'D'
+            else: type = 'I'
             for attr in attrs:
                 attr, value = urllib.splitvalue(attr)
                 if string.lower(attr) == 'type' and \
@@ -71,19 +60,18 @@ class CDURLopener(urllib.URLopener):
                     type = string.upper(value)
             (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
             if retrlen is not None and retrlen >= 0:
-                import mimetools
-                import StringIO
+                import mimetools, StringIO
                 headers = mimetools.Message(StringIO.StringIO(
                     'Content-Length: %d\n' % retrlen))
             else:
                 headers = noheaders()
             return urllib.addinfourl(fp, headers, "ftp:" + url)
-        except urllib.ftperrors() as msg:
+        except urllib.ftperrors(), msg:
             raise IOError, ('ftp error', msg), sys.exc_info()[2]
 
     def retrieve(self, url, filename=None, reporthook=None, blocksize=262144):
         url = urllib.unwrap(url)
-        if self.tempcache and url in self.tempcache:
+        if self.tempcache and self.tempcache.has_key(url):
             return self.tempcache[url]
         type, url1 = urllib.splittype(url)
         if not filename and (not type or type == 'file'):
@@ -92,7 +80,7 @@ class CDURLopener(urllib.URLopener):
                 hdrs = fp.info()
                 del fp
                 return url2pathname(urllib.splithost(url1)[1]), hdrs
-            except IOError as msg:
+            except IOError, msg:
                 pass
         fp = self.open(url)
         headers = fp.info()
@@ -113,28 +101,28 @@ class CDURLopener(urllib.URLopener):
         size = -1
         blocknum = 1
         if reporthook:
-            if "content-length" in headers:
+            if headers.has_key("content-length"):
                 size = int(headers["Content-Length"])
             stayopen = reporthook(0, bs, size, self._userObject)
-            if stayopen == 0:
+            if stayopen==0:
                 raise KeyboardInterrupt
         bytesread = 0
         block = fp.read(bs)
         if reporthook:
             stayopen = reporthook(1, bs, size, self._userObject)
-            if stayopen == 0:
+            if stayopen==0:
                 raise KeyboardInterrupt
         while block:
             tfp.write(block)
             bytesread = bytesread + len(block)
-# print blocknum, bytesread, size,
-# if blocknum*blocksize!=bytesread:
-# print ' (*)'
-# else:
-# print
+##             print blocknum, bytesread, size,
+##             if blocknum*blocksize!=bytesread:
+##                 print ' (*)'
+##             else:
+##                 print
             if block and reporthook:
                 stayopen = reporthook(blocknum, bs, size, self._userObject)
-                if stayopen == 0:
+                if stayopen==0:
                     raise KeyboardInterrupt
             blocknum = blocknum + 1
             block = fp.read(bs)
@@ -144,25 +132,23 @@ class CDURLopener(urllib.URLopener):
         del tfp
         return result
 
-
 def sampleReportHook(blocknum, blocksize, size, userObj):
-    sizekb = size / 1024
-    percent = min(100, int(100.0 * float(blocknum * blocksize) / float(size)))
-    print "Read: %3d%% of %dK" % (percent, sizekb)
+    sizekb = size/1024
+    percent = min(100,int(100.0*float(blocknum*blocksize)/float(size)))
+    print "Read: %3d%% of %dK"%(percent,sizekb)
     return 1
 
 if __name__ == '__main__':
 
     import sys
-    if len(sys.argv) != 4:
+    if len(sys.argv)!=4:
         print 'Usage: cdurllib.py URL filename blocksize'
         sys.exit(1)
 
     url = sys.argv[1]
     filename = sys.argv[2]
     blocksize = int(sys.argv[3])
-
+    
     urlopener = CDURLopener()
-    fname, headers = urlopener.retrieve(
-        url, filename, sampleReportHook, blocksize)
+    fname, headers = urlopener.retrieve(url, filename, sampleReportHook, blocksize)
     print fname, 'written'
diff --git a/Packages/cdms2/Lib/cdurlparse.py b/Packages/cdms2/Lib/cdurlparse.py
index d88dd4087..e743ebbc8 100644
--- a/Packages/cdms2/Lib/cdurlparse.py
+++ b/Packages/cdms2/Lib/cdurlparse.py
@@ -9,26 +9,25 @@ from string import joinfields, splitfields, find, rfind
 
 # A classification of schemes ('' means apply by default)
 uses_relative = ['ftp', 'http', 'ldap', 'gopher', 'nntp', 'wais', 'file',
-                 'https', 'shttp',
-                 'prospero', '']
+		 'https', 'shttp',
+		 'prospero', '']
 uses_netloc = ['ftp', 'http', 'ldap', 'gopher', 'nntp', 'telnet', 'wais',
-               'file',
-               'https', 'shttp', 'snews',
-               'prospero', '']
+	       'file',
+	       'https', 'shttp', 'snews',
+	       'prospero', '']
 non_hierarchical = ['gopher', 'hdl', 'mailto', 'news', 'telnet', 'wais',
-                    'snews',
-                    ]
+		    'snews',
+		    ]
 uses_params = ['ftp', 'hdl', 'prospero', 'http', 'ldap',
-               'https', 'shttp',
-               '']
+	       'https', 'shttp',
+	       '']
 uses_query = ['http', 'ldap', 'wais',
-              'https', 'shttp',
-              'gopher',
-              '']
-uses_fragment = [
-    'ftp', 'hdl', 'http', 'ldap', 'gopher', 'news', 'nntp', 'wais',
-                 'https', 'shttp', 'snews',
-                 'file', 'prospero', '']
+	      'https', 'shttp',
+	      'gopher',
+	      '']
+uses_fragment = ['ftp', 'hdl', 'http', 'ldap', 'gopher', 'news', 'nntp', 'wais',
+		 'https', 'shttp', 'snews',
+		 'file', 'prospero', '']
 
 # Characters valid in scheme names
 scheme_chars = string.letters + string.digits + '+-.'
@@ -36,7 +35,6 @@ scheme_chars = string.letters + string.digits + '+-.'
 MAX_CACHE_SIZE = 20
 _parse_cache = {}
 
-
 def clear_cache():
     """Clear the parse cache."""
     global _parse_cache
@@ -48,141 +46,134 @@ def clear_cache():
 # Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
 # Note that we don't break the components up in smaller bits
 # (e.g. netloc is a single string) and we don't expand % escapes.
-def urlparse(url, scheme='', allow_fragments=1):
-    key = url, scheme, allow_fragments
-    cached = _parse_cache.get(key, None)
-    if cached:
-        return cached
-    if len(_parse_cache) >= MAX_CACHE_SIZE:  # avoid runaway growth
-        clear_cache()
-    find = string.find
-    netloc = path = params = query = fragment = ''
-    i = find(url, ':')
-    if i > 0:
-        if url[:i] in ['http', 'ldap']:  # optimize the common case
-            scheme = string.lower(url[:i])
-            url = url[i + 1:]
-            if url[:2] == '//':
-                i = find(url, '/', 2)
-                if i < 0:
-                    i = len(url)
-                netloc = url[2:i]
-                url = url[i:]
-            if allow_fragments:
-                i = string.rfind(url, '#')
-                if i >= 0:
-                    fragment = url[i + 1:]
-                    url = url[:i]
-            i = find(url, '?')
-            if i >= 0:
-                query = url[i + 1:]
-                url = url[:i]
-            i = find(url, ';')
-            if i >= 0:
-                params = url[i + 1:]
-                url = url[:i]
-            tuple = scheme, netloc, url, params, query, fragment
-            _parse_cache[key] = tuple
-            return tuple
-        for c in url[:i]:
-            if c not in scheme_chars:
-                break
-        else:
-            scheme, url = string.lower(url[:i]), url[i + 1:]
-    if scheme in uses_netloc:
-        if url[:2] == '//':
-            i = find(url, '/', 2)
-            if i < 0:
-                i = len(url)
-            netloc, url = url[2:i], url[i:]
-    if allow_fragments and scheme in uses_fragment:
-        i = string.rfind(url, '#')
-        if i >= 0:
-            url, fragment = url[:i], url[i + 1:]
-    if scheme in uses_query:
-        i = find(url, '?')
-        if i >= 0:
-            url, query = url[:i], url[i + 1:]
-    if scheme in uses_params:
-        i = find(url, ';')
-        if i >= 0:
-            url, params = url[:i], url[i + 1:]
-    tuple = scheme, netloc, url, params, query, fragment
-    _parse_cache[key] = tuple
-    return tuple
+def urlparse(url, scheme = '', allow_fragments = 1):
+	key = url, scheme, allow_fragments
+	cached = _parse_cache.get(key, None)
+	if cached:
+		return cached
+	if len(_parse_cache) >= MAX_CACHE_SIZE:	# avoid runaway growth
+	    clear_cache()
+	find = string.find
+	netloc = path = params = query = fragment = ''
+	i = find(url, ':')
+	if i > 0:
+		if url[:i] in ['http','ldap']: # optimize the common case
+			scheme = string.lower(url[:i])
+			url = url[i+1:]
+			if url[:2] == '//':
+				i = find(url, '/', 2)
+				if i < 0:
+					i = len(url)
+				netloc = url[2:i]
+				url = url[i:]
+			if allow_fragments:
+				i = string.rfind(url, '#')
+				if i >= 0:
+					fragment = url[i+1:]
+					url = url[:i]
+			i = find(url, '?')
+			if i >= 0:
+				query = url[i+1:]
+				url = url[:i]
+			i = find(url, ';')
+			if i >= 0:
+				params = url[i+1:]
+				url = url[:i]
+			tuple = scheme, netloc, url, params, query, fragment
+			_parse_cache[key] = tuple
+			return tuple
+		for c in url[:i]:
+			if c not in scheme_chars:
+				break
+		else:
+			scheme, url = string.lower(url[:i]), url[i+1:]
+	if scheme in uses_netloc:
+		if url[:2] == '//':
+			i = find(url, '/', 2)
+			if i < 0:
+				i = len(url)
+			netloc, url = url[2:i], url[i:]
+	if allow_fragments and scheme in uses_fragment:
+		i = string.rfind(url, '#')
+		if i >= 0:
+			url, fragment = url[:i], url[i+1:]
+	if scheme in uses_query:
+		i = find(url, '?')
+		if i >= 0:
+			url, query = url[:i], url[i+1:]
+	if scheme in uses_params:
+		i = find(url, ';')
+		if i >= 0:
+			url, params = url[:i], url[i+1:]
+	tuple = scheme, netloc, url, params, query, fragment
+	_parse_cache[key] = tuple
+	return tuple
 
 # Put a parsed URL back together again.  This may result in a slightly
 # different, but equivalent URL, if the URL that was parsed originally
 # had redundant delimiters, e.g. a ? with an empty query (the draft
 # states that these are equivalent).
-
-
-def urlunparse(xxx_todo_changeme):
-    (scheme, netloc, url, params, query, fragment) = xxx_todo_changeme
-    if netloc or (scheme in uses_netloc and url[:2] == '//'):
-        if url[:1] != '/':
-            url = '/' + url
-        url = '//' + (netloc or '') + url
-    if scheme:
-        url = scheme + ':' + url
-    if params:
-        url = url + ';' + params
-    if query:
-        url = url + '?' + query
-    if fragment:
-        url = url + '#' + fragment
-    return url
+def urlunparse((scheme, netloc, url, params, query, fragment)):
+	if netloc or (scheme in uses_netloc and url[:2] == '//'):
+		if url[:1] != '/': url = '/' + url
+		url = '//' + (netloc or '') + url
+	if scheme:
+		url = scheme + ':' + url
+	if params:
+		url = url + ';' + params
+	if query:
+		url = url + '?' + query
+	if fragment:
+		url = url + '#' + fragment
+	return url
 
 # Join a base URL and a possibly relative URL to form an absolute
 # interpretation of the latter.
-
-
-def urljoin(base, url, allow_fragments=1):
-    if not base:
-        return url
-    bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
-        urlparse(base, '', allow_fragments)
-    scheme, netloc, path, params, query, fragment = \
-        urlparse(url, bscheme, allow_fragments)
-    if scheme != bscheme or scheme not in uses_relative:
-        return urlunparse((scheme, netloc, path,
-                           params, query, fragment))
-    if scheme in uses_netloc:
-        if netloc:
-            return urlunparse((scheme, netloc, path,
-                               params, query, fragment))
-        netloc = bnetloc
-    if path[:1] == '/':
-        return urlunparse((scheme, netloc, path,
-                           params, query, fragment))
-    if not path:
-        return urlunparse((scheme, netloc, bpath,
-                           params, query or bquery, fragment))
-    i = rfind(bpath, '/')
-    if i >= 0:
-        path = bpath[:i] + '/' + path
-    segments = splitfields(path, '/')
-    if segments[-1] == '.':
-        segments[-1] = ''
-    while '.' in segments:
-        segments.remove('.')
-    while True:
-        i = 1
-        n = len(segments) - 1
-        while i < n:
-            if segments[i] == '..' and segments[i - 1]:
-                del segments[i - 1:i + 1]
-                break
-            i = i + 1
-        else:
-            break
-    if len(segments) == 2 and segments[1] == '..' and segments[0] == '':
-        segments[-1] = ''
-    elif len(segments) >= 2 and segments[-1] == '..':
-        segments[-2:] = ['']
-    return urlunparse((scheme, netloc, joinfields(segments, '/'),
-                       params, query, fragment))
-
+def urljoin(base, url, allow_fragments = 1):
+	if not base:
+		return url
+	bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
+		urlparse(base, '', allow_fragments)
+	scheme, netloc, path, params, query, fragment = \
+		urlparse(url, bscheme, allow_fragments)
+	if scheme != bscheme or scheme not in uses_relative:
+		return urlunparse((scheme, netloc, path,
+				   params, query, fragment))
+	if scheme in uses_netloc:
+		if netloc:
+			return urlunparse((scheme, netloc, path,
+					   params, query, fragment))
+		netloc = bnetloc
+	if path[:1] == '/':
+		return urlunparse((scheme, netloc, path,
+				   params, query, fragment))
+	if not path:
+		return urlunparse((scheme, netloc, bpath,
+				   params, query or bquery, fragment))
+	i = rfind(bpath, '/')
+	if i >= 0:
+		path = bpath[:i] + '/' + path
+	segments = splitfields(path, '/')
+	if segments[-1] == '.':
+		segments[-1] = ''
+	while '.' in segments:
+		segments.remove('.')
+	while 1:
+		i = 1
+		n = len(segments) - 1
+		while i < n:
+			if segments[i] == '..' and segments[i-1]:
+				del segments[i-1:i+1]
+				break
+			i = i+1
+		else:
+			break
+	if len(segments) == 2 and segments[1] == '..' and segments[0] == '':
+		segments[-1] = ''
+	elif len(segments) >= 2 and segments[-1] == '..':
+		segments[-2:] = ['']
+	return urlunparse((scheme, netloc, joinfields(segments, '/'),
+			   params, query, fragment))
 
 def urldefrag(url):
     """Removes any existing fragment from URL.
@@ -231,37 +222,35 @@ test_input = """
 """
 # XXX The result for //g is actually http://g/; is this a problem?
 
-
 def test():
-    import sys
-    base = ''
-    if sys.argv[1:]:
-        fn = sys.argv[1]
-        if fn == '-':
-            fp = sys.stdin
-        else:
-            fp = open(fn)
-    else:
-        import StringIO
-        fp = StringIO.StringIO(test_input)
-    while True:
-        line = fp.readline()
-        if not line:
-            break
-        words = string.split(line)
-        if not words:
-            continue
-        url = words[0]
-        parts = urlparse(url)
-        print '%-10s : %s' % (url, parts)
-        abs = urljoin(base, url)
-        if not base:
-            base = abs
-        wrapped = '<URL:%s>' % abs
-        print '%-10s = %s' % (url, wrapped)
-        if len(words) == 3 and words[1] == '=':
-            if wrapped != words[2]:
-                print 'EXPECTED', words[2], '!!!!!!!!!!'
+	import sys
+	base = ''
+	if sys.argv[1:]:
+		fn = sys.argv[1]
+		if fn == '-':
+			fp = sys.stdin
+		else:
+			fp = open(fn)
+	else:
+		import StringIO
+		fp = StringIO.StringIO(test_input)
+	while 1:
+		line = fp.readline()
+		if not line: break
+		words = string.split(line)
+		if not words:
+			continue
+		url = words[0]
+		parts = urlparse(url)
+		print '%-10s : %s' % (url, parts)
+		abs = urljoin(base, url)
+		if not base:
+			base = abs
+		wrapped = '<URL:%s>' % abs
+		print '%-10s = %s' % (url, wrapped)
+		if len(words) == 3 and words[1] == '=':
+			if wrapped != words[2]:
+				print 'EXPECTED', words[2], '!!!!!!!!!!'
 
 if __name__ == '__main__':
-    test()
+	test()
diff --git a/Packages/cdms2/Lib/cdxmllib.py b/Packages/cdms2/Lib/cdxmllib.py
index bd7e2e45e..baa0f6dfb 100644
--- a/Packages/cdms2/Lib/cdxmllib.py
+++ b/Packages/cdms2/Lib/cdxmllib.py
@@ -12,7 +12,6 @@ import string
 
 version = '0.3'
 
-
 class Error(RuntimeError):
     pass
 
@@ -22,7 +21,7 @@ _S = '[ \t\r\n]+'                       # white space
 _opS = '[ \t\r\n]*'                     # optional white space
 _Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*'    # valid XML name
 _QStr = "(?:'[^']*'|\"[^\"]*\")"        # quoted XML string
-illegal = re.compile('[^\t\r\n -\176\240-\377]')  # illegal chars in content
+illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
 interesting = re.compile('[]&<]')
 
 amp = re.compile('&')
@@ -35,37 +34,37 @@ newline = re.compile('\n')
 attrfind = re.compile(
     _S + '(?P<name>' + _Name + ')'
     '(' + _opS + '=' + _opS +
-    '(?P<value>' + _QStr + '|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
+    '(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
 starttagopen = re.compile('<' + _Name)
 starttagend = re.compile(_opS + '(?P<slash>/?)>')
-starttagmatch = re.compile('<(?P<tagname>' + _Name + ')'
-                           '(?P<attrs>(?:' + attrfind.pattern + ')*)' +
-                           starttagend.pattern)
+starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
+                      '(?P<attrs>(?:'+attrfind.pattern+')*)'+
+                      starttagend.pattern)
 endtagopen = re.compile('</')
 endbracket = re.compile(_opS + '>')
-endbracketfind = re.compile('(?:[^>\'"]|' + _QStr + ')*>')
+endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
 tagfind = re.compile(_Name)
 cdataopen = re.compile(r'<!\[CDATA\[')
 cdataclose = re.compile(r'\]\]>')
 # this matches one of the following:
 # SYSTEM SystemLiteral
 # PUBLIC PubidLiteral SystemLiteral
-_SystemLiteral = '(?P<%s>' + _QStr + ')'
+_SystemLiteral = '(?P<%s>'+_QStr+')'
 _PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
-    "'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
+                        "'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
 _ExternalId = '(?:SYSTEM|' \
-    'PUBLIC' + _S + _PublicLiteral % 'pubid' + \
-              ')' + _S + _SystemLiteral % 'syslit'
-doctype = re.compile('<!DOCTYPE' + _S + '(?P<name>' + _Name + ')'
-                     '(?:' + _S + _ExternalId + ')?' + _opS)
-xmldecl = re.compile('<\?xml' + _S +
-                     'version' + _opS + '=' + _opS + '(?P<version>' + _QStr + ')' +
-                     '(?:' + _S + 'encoding' + _opS + '=' + _opS +
-                     "(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
-                     '"[A-Za-z][-A-Za-z0-9._]*"))?'
-                     '(?:' + _S + 'standalone' + _opS + '=' + _opS +
-                     '(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?' +
-                     _opS + '\?>')
+                 'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
+              ')'+_S+_SystemLiteral%'syslit'
+doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
+                     '(?:'+_S+_ExternalId+')?'+_opS)
+xmldecl = re.compile('<\?xml'+_S+
+                     'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
+                     '(?:'+_S+'encoding'+_opS+'='+_opS+
+                        "(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
+                        '"[A-Za-z][-A-Za-z0-9._]*"))?'
+                     '(?:'+_S+'standalone'+_opS+'='+_opS+
+                        '(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
+                     _opS+'\?>')
 procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
 procclose = re.compile(_opS + r'\?>')
 commentopen = re.compile('<!--')
@@ -76,10 +75,10 @@ attrtrans = string.maketrans(' \r\n\t', '    ')
 # definitions for XML namespaces
 _NCName = '[a-zA-Z_][-a-zA-Z0-9._]*'    # XML Name, minus the ":"
 ncname = re.compile(_NCName + '$')
-qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?'  # optional prefix
+qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
                    '(?P<local>' + _NCName + ')$')
 
-xmlns = re.compile('xmlns(?::(?P<ncname>' + _NCName + '))?$')
+xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
 
 # XML parser base class -- find tags and call handler functions.
 # Usage: p = XMLParser(); p.feed(data); ...; p.close().
@@ -89,7 +88,6 @@ xmlns = re.compile('xmlns(?::(?P<ncname>' + _NCName + '))?$')
 # parser by calling self.handle_data() with some data as argument (the
 # data may be split up in arbitrary chunks).
 
-
 class XMLParser:
     attributes = {}                     # default, to be overridden
     elements = {}                       # default, to be overridden
@@ -105,18 +103,15 @@ class XMLParser:
     def __init__(self, **kw):
         self.__fixed = 0
         if 'accept_unquoted_attributes' in kw:
-            self.__accept_unquoted_attributes = kw[
-                'accept_unquoted_attributes']
+            self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
         if 'accept_missing_endtag_name' in kw:
-            self.__accept_missing_endtag_name = kw[
-                'accept_missing_endtag_name']
+            self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
         if 'map_case' in kw:
             self.__map_case = kw['map_case']
         if 'accept_utf8' in kw:
             self.__accept_utf8 = kw['accept_utf8']
         if 'translate_attribute_references' in kw:
-            self.__translate_attribute_references = kw[
-                'translate_attribute_references']
+            self.__translate_attribute_references = kw['translate_attribute_references']
         self.reset()
 
     def __fixelements(self):
@@ -154,7 +149,7 @@ class XMLParser:
         self.__seen_doctype = None
         self.__seen_starttag = 0
         self.__use_namespaces = 0
-        self.__namespaces = {'xml': None}   # xml is implicitly declared
+        self.__namespaces = {'xml':None}   # xml is implicitly declared
         # backward compatibility hack: if elements not overridden,
         # fill it in ourselves
         if self.elements is XMLParser.elements:
@@ -185,11 +180,11 @@ class XMLParser:
             del self.elements
 
     # Interface -- translate references
-    def translate_references(self, data, all=1):
+    def translate_references(self, data, all = 1):
         if not self.__translate_attribute_references:
             return data
         i = 0
-        while True:
+        while 1:
             res = amp.search(data, i)
             if res is None:
                 return data
@@ -197,7 +192,7 @@ class XMLParser:
             res = ref.match(data, s)
             if res is None:
                 self.syntax_error("bogus `&'")
-                i = s + 1
+                i = s+1
                 continue
             i = res.end(0)
             str = res.group(1)
@@ -209,23 +204,21 @@ class XMLParser:
                     str = chr(int(str[1:]))
                 if data[i - 1] != ';':
                     self.syntax_error("`;' missing after char reference")
-                    i = i - 1
+                    i = i-1
             elif all:
                 if str in self.entitydefs:
                     str = self.entitydefs[str]
                     rescan = 1
                 elif data[i - 1] != ';':
                     self.syntax_error("bogus `&'")
-                    i = s + 1  # just past the &
+                    i = s + 1 # just past the &
                     continue
                 else:
-                    self.syntax_error(
-                        "reference to unknown entity `&%s;'" %
-                        str)
+                    self.syntax_error("reference to unknown entity `&%s;'" % str)
                     str = '&' + str + ';'
             elif data[i - 1] != ';':
                 self.syntax_error("bogus `&'")
-                i = s + 1  # just past the &
+                i = s + 1 # just past the &
                 continue
 
             # when we get here, str contains the translated text and i points
@@ -276,72 +269,63 @@ class XMLParser:
                 self.handle_data(data)
                 self.lineno = self.lineno + data.count('\n')
             i = j
-            if i == n:
-                break
+            if i == n: break
             if rawdata[i] == '<':
                 if starttagopen.match(rawdata, i):
                     if self.literal:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i + 1
+                        i = i+1
                         continue
                     k = self.parse_starttag(i)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.__seen_starttag = 1
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 if endtagopen.match(rawdata, i):
                     k = self.parse_endtag(i)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
-                    i = k
+                    i =  k
                     continue
                 if commentopen.match(rawdata, i):
                     if self.literal:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i + 1
+                        i = i+1
                         continue
                     k = self.parse_comment(i)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 if cdataopen.match(rawdata, i):
                     k = self.parse_cdata(i)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
                 res = xmldecl.match(rawdata, i)
                 if res:
                     if not self.__at_start:
-                        self.syntax_error(
-                            "<?xml?> declaration not at start of document")
+                        self.syntax_error("<?xml?> declaration not at start of document")
                     version, encoding, standalone = res.group('version',
                                                               'encoding',
                                                               'standalone')
                     if version[1:-1] != '1.0':
                         raise Error('only XML version 1.0 supported')
-                    if encoding:
-                        encoding = encoding[1:-1]
-                    if standalone:
-                        standalone = standalone[1:-1]
+                    if encoding: encoding = encoding[1:-1]
+                    if standalone: standalone = standalone[1:-1]
                     self.handle_xml(encoding, standalone)
                     i = res.end(0)
                     continue
                 res = procopen.match(rawdata, i)
                 if res:
                     k = self.parse_proc(i)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.lineno = self.lineno + rawdata[i:k].count('\n')
                     i = k
                     continue
@@ -351,16 +335,14 @@ class XMLParser:
                         data = rawdata[i]
                         self.handle_data(data)
                         self.lineno = self.lineno + data.count('\n')
-                        i = i + 1
+                        i = i+1
                         continue
                     if self.__seen_doctype:
                         self.syntax_error('multiple DOCTYPE elements')
                     if self.__seen_starttag:
-                        self.syntax_error(
-                            'DOCTYPE not at beginning of document')
+                        self.syntax_error('DOCTYPE not at beginning of document')
                     k = self.parse_doctype(res)
-                    if k < 0:
-                        break
+                    if k < 0: break
                     self.__seen_doctype = res.group('name')
                     if self.__map_case:
                         self.__seen_doctype = self.__seen_doctype.lower()
@@ -371,14 +353,14 @@ class XMLParser:
                 if self.literal:
                     data = rawdata[i]
                     self.handle_data(data)
-                    i = i + 1
+                    i = i+1
                     continue
                 res = charref.match(rawdata, i)
                 if res is not None:
                     i = res.end(0)
-                    if rawdata[i - 1] != ';':
+                    if rawdata[i-1] != ';':
                         self.syntax_error("`;' missing in charref")
-                        i = i - 1
+                        i = i-1
                     if not self.stack:
                         self.syntax_error('data not in content')
                     self.handle_charref(res.group('char')[:-1])
@@ -387,15 +369,14 @@ class XMLParser:
                 res = entityref.match(rawdata, i)
                 if res is not None:
                     i = res.end(0)
-                    if rawdata[i - 1] != ';':
+                    if rawdata[i-1] != ';':
                         self.syntax_error("`;' missing in entityref")
-                        i = i - 1
+                        i = i-1
                     name = res.group('name')
                     if self.__map_case:
                         name = name.lower()
                     if name in self.entitydefs:
-                        self.rawdata = rawdata = rawdata[
-                            :res.start(0)] + self.entitydefs[name] + rawdata[i:]
+                        self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
                         n = len(rawdata)
                         i = res.start(0)
                     else:
@@ -406,14 +387,14 @@ class XMLParser:
                 if self.literal:
                     data = rawdata[i]
                     self.handle_data(data)
-                    i = i + 1
+                    i = i+1
                     continue
-                if n - i < 3:
+                if n-i < 3:
                     break
                 if cdataclose.match(rawdata, i):
                     self.syntax_error("bogus `]]>'")
                 self.handle_data(rawdata[i])
-                i = i + 1
+                i = i+1
                 continue
             else:
                 raise Error('neither < nor & ??')
@@ -430,7 +411,7 @@ class XMLParser:
                 self.syntax_error('illegal character in content')
             self.handle_data(data)
             self.lineno = self.lineno + data.count('\n')
-            self.rawdata = rawdata[i + 1:]
+            self.rawdata = rawdata[i+1:]
             return self.goahead(end)
         self.rawdata = rawdata[i:]
         if end:
@@ -444,19 +425,19 @@ class XMLParser:
     # Internal -- parse comment, return length or -1 if not terminated
     def parse_comment(self, i):
         rawdata = self.rawdata
-        if rawdata[i:i + 4] != '<!--':
+        if rawdata[i:i+4] != '<!--':
             raise Error('unexpected call to handle_comment')
-        res = commentclose.search(rawdata, i + 4)
+        res = commentclose.search(rawdata, i+4)
         if res is None:
             return -1
-        if doubledash.search(rawdata, i + 4, res.start(0)):
+        if doubledash.search(rawdata, i+4, res.start(0)):
             self.syntax_error("`--' inside comment")
-        if rawdata[res.start(0) - 1] == '-':
+        if rawdata[res.start(0)-1] == '-':
             self.syntax_error('comment cannot end in three dashes')
         if not self.__accept_utf8 and \
-           illegal.search(rawdata, i + 4, res.start(0)):
+           illegal.search(rawdata, i+4, res.start(0)):
             self.syntax_error('illegal character in comment')
-        self.handle_comment(rawdata[i + 4: res.start(0)])
+        self.handle_comment(rawdata[i+4: res.start(0)])
         return res.end(0)
 
     # Internal -- handle DOCTYPE tag, return length or -1 if not terminated
@@ -469,15 +450,14 @@ class XMLParser:
         pubid, syslit = res.group('pubid', 'syslit')
         if pubid is not None:
             pubid = pubid[1:-1]         # remove quotes
-            pubid = ' '.join(pubid.split())  # normalize
-        if syslit is not None:
-            syslit = syslit[1:-1]  # remove quotes
+            pubid = ' '.join(pubid.split()) # normalize
+        if syslit is not None: syslit = syslit[1:-1] # remove quotes
         j = k = res.end(0)
         if k >= n:
             return -1
         if rawdata[k] == '[':
             level = 0
-            k = k + 1
+            k = k+1
             dq = sq = 0
             while k < n:
                 c = rawdata[k]
@@ -488,10 +468,10 @@ class XMLParser:
                 elif sq or dq:
                     pass
                 elif level <= 0 and c == ']':
-                    res = endbracket.match(rawdata, k + 1)
+                    res = endbracket.match(rawdata, k+1)
                     if res is None:
                         return -1
-                    self.handle_doctype(name, pubid, syslit, rawdata[j + 1:k])
+                    self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
                     return res.end(0)
                 elif c == '<':
                     level = level + 1
@@ -499,7 +479,7 @@ class XMLParser:
                     level = level - 1
                     if level < 0:
                         self.syntax_error("bogus `>' in DOCTYPE")
-                k = k + 1
+                k = k+1
         res = endbracketfind.match(rawdata, k)
         if res is None:
             return -1
@@ -511,31 +491,30 @@ class XMLParser:
     # Internal -- handle CDATA tag, return length or -1 if not terminated
     def parse_cdata(self, i):
         rawdata = self.rawdata
-        if rawdata[i:i + 9] != '<![CDATA[':
+        if rawdata[i:i+9] != '<![CDATA[':
             raise Error('unexpected call to parse_cdata')
-        res = cdataclose.search(rawdata, i + 9)
+        res = cdataclose.search(rawdata, i+9)
         if res is None:
             return -1
         if not self.__accept_utf8 and \
-           illegal.search(rawdata, i + 9, res.start(0)):
+           illegal.search(rawdata, i+9, res.start(0)):
             self.syntax_error('illegal character in CDATA')
         if not self.stack:
             self.syntax_error('CDATA not in content')
-        self.handle_cdata(rawdata[i + 9:res.start(0)])
+        self.handle_cdata(rawdata[i+9:res.start(0)])
         return res.end(0)
 
-    __xml_namespace_attributes = {'ns': None, 'src': None, 'prefix': None}
+    __xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
     # Internal -- handle a processing instruction tag
-
     def parse_proc(self, i):
         rawdata = self.rawdata
         end = procclose.search(rawdata, i)
         if end is None:
             return -1
         j = end.start(0)
-        if not self.__accept_utf8 and illegal.search(rawdata, i + 2, j):
+        if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
             self.syntax_error('illegal character in processing instruction')
-        res = tagfind.match(rawdata, i + 2)
+        res = tagfind.match(rawdata, i+2)
         if res is None:
             raise Error('unexpected call to parse_proc')
         k = res.end(0)
@@ -549,17 +528,13 @@ class XMLParser:
             # this must come after the <?xml?> declaration (if any)
             # and before the <!DOCTYPE> (if any).
             if self.__seen_doctype or self.__seen_starttag:
-                self.syntax_error(
-                    'xml:namespace declaration too late in document')
+                self.syntax_error('xml:namespace declaration too late in document')
             attrdict, namespace, k = self.parse_attributes(name, k, j)
             if namespace:
-                self.syntax_error(
-                    'namespace declaration inside namespace declaration')
+                self.syntax_error('namespace declaration inside namespace declaration')
             for attrname in attrdict.keys():
                 if not attrname in self.__xml_namespace_attributes:
-                    self.syntax_error(
-                        "unknown attribute `%s' in xml:namespace tag" %
-                        attrname)
+                    self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
             if not 'ns' in attrdict or not 'prefix' in attrdict:
                 self.syntax_error('xml:namespace without required attributes')
             prefix = attrdict.get('prefix')
@@ -589,12 +564,10 @@ class XMLParser:
                 attrname = attrname.lower()
             i = res.end(0)
             if attrvalue is None:
-                self.syntax_error(
-                    "no value specified for attribute `%s'" %
-                    attrname)
+                self.syntax_error("no value specified for attribute `%s'" % attrname)
                 attrvalue = attrname
             elif attrvalue[:1] == "'" == attrvalue[-1:] or \
-                    attrvalue[:1] == '"' == attrvalue[-1:]:
+                 attrvalue[:1] == '"' == attrvalue[-1:]:
                 attrvalue = attrvalue[1:-1]
             elif not self.__accept_unquoted_attributes:
                 self.syntax_error("attribute `%s' value not quoted" % attrname)
@@ -604,7 +577,7 @@ class XMLParser:
                 ncname = res.group('ncname')
                 namespace[ncname or ''] = attrvalue or None
                 if not self.__use_namespaces:
-                    self.__use_namespaces = len(self.stack) + 1
+                    self.__use_namespaces = len(self.stack)+1
                 continue
             if '<' in attrvalue:
                 self.syntax_error("`<' illegal in attribute value")
@@ -618,7 +591,7 @@ class XMLParser:
     def parse_starttag(self, i):
         rawdata = self.rawdata
         # i points to start of tag
-        end = endbracketfind.match(rawdata, i + 1)
+        end = endbracketfind.match(rawdata, i+1)
         if end is None:
             return -1
         tag = starttagmatch.match(rawdata, i)
@@ -653,11 +626,10 @@ class XMLParser:
             if ns is not None:
                 nstag = ns + ' ' + nstag
             elif prefix != '':
-                nstag = prefix + ':' + nstag  # undo split
+                nstag = prefix + ':' + nstag # undo split
             self.stack[-1] = tagname, nsdict, nstag
         # translate namespace of attributes
-        attrnamemap = {}
-            # map from new name to old name (used for error reporting)
+        attrnamemap = {} # map from new name to old name (used for error reporting)
         for key in attrdict.keys():
             attrnamemap[key] = key
         if self.__use_namespaces:
@@ -690,9 +662,7 @@ class XMLParser:
         if attributes is not None:
             for key in attrdict.keys():
                 if not key in attributes:
-                    self.syntax_error(
-                        "unknown attribute `%s' in tag `%s'" %
-                        (attrnamemap[key], tagname))
+                    self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
             for key, val in attributes.items():
                 if val is not None and not key in attrdict:
                     attrdict[key] = val
@@ -705,18 +675,18 @@ class XMLParser:
     # Internal -- parse endtag
     def parse_endtag(self, i):
         rawdata = self.rawdata
-        end = endbracketfind.match(rawdata, i + 1)
+        end = endbracketfind.match(rawdata, i+1)
         if end is None:
             return -1
-        res = tagfind.match(rawdata, i + 2)
+        res = tagfind.match(rawdata, i+2)
         if res is None:
             if self.literal:
                 self.handle_data(rawdata[i])
-                return i + 1
+                return i+1
             if not self.__accept_missing_endtag_name:
                 self.syntax_error('no name specified in end tag')
             tag = self.stack[-1][0]
-            k = i + 2
+            k = i+2
         else:
             tag = res.group(0)
             if self.__map_case:
@@ -724,7 +694,7 @@ class XMLParser:
             if self.literal:
                 if not self.stack or tag != self.stack[-1][0]:
                     self.handle_data(rawdata[i])
-                    return i + 1
+                    return i+1
             k = res.end(0)
         if endbracket.match(rawdata, k) is None:
             self.syntax_error('garbage in end tag')
@@ -757,9 +727,7 @@ class XMLParser:
                 return
         while len(self.stack) > found:
             if found < len(self.stack) - 1:
-                self.syntax_error(
-                    'missing close tag for %s' %
-                    self.stack[-1][2])
+                self.syntax_error('missing close tag for %s' % self.stack[-1][2])
             nstag = self.stack[-1][2]
             method = self.elements.get(nstag, (None, None))[1]
             if method is not None:
@@ -831,11 +799,8 @@ class XMLParser:
 
     # To be overridden -- handlers for unknown objects
     def unknown_starttag(self, tag, attrs): pass
-
     def unknown_endtag(self, tag): pass
-
     def unknown_charref(self, ref): pass
-
     def unknown_entityref(self, name):
         self.syntax_error("reference to unknown entity `&%s;'" % name)
 
@@ -848,34 +813,34 @@ class TestXMLParser(XMLParser):
 
     def handle_xml(self, encoding, standalone):
         self.flush()
-        print 'xml: encoding =', encoding, 'standalone =', standalone
+        print 'xml: encoding =',encoding,'standalone =',standalone
 
     def handle_doctype(self, tag, pubid, syslit, data):
         self.flush()
-        print 'DOCTYPE:', tag, repr(data)
+        print 'DOCTYPE:',tag, `data`
 
     def handle_data(self, data):
         self.testdata = self.testdata + data
-        if len(repr(self.testdata)) >= 70:
+        if len(`self.testdata`) >= 70:
             self.flush()
 
     def flush(self):
         data = self.testdata
         if data:
             self.testdata = ""
-            print 'data:', repr(data)
+            print 'data:', `data`
 
     def handle_cdata(self, data):
         self.flush()
-        print 'cdata:', repr(data)
+        print 'cdata:', `data`
 
     def handle_proc(self, name, data):
         self.flush()
-        print 'processing:', name, repr(data)
+        print 'processing:',name,`data`
 
     def handle_comment(self, data):
         self.flush()
-        r = repr(data)
+        r = `data`
         if len(r) > 68:
             r = r[:32] + '...' + r[-32:]
         print 'comment:', r
@@ -909,10 +874,8 @@ class TestXMLParser(XMLParser):
         XMLParser.close(self)
         self.flush()
 
-
-def test(args=None):
-    import sys
-    import getopt
+def test(args = None):
+    import sys, getopt
     from time import time
 
     if not args:
@@ -937,7 +900,7 @@ def test(args=None):
     else:
         try:
             f = open(file, 'r')
-        except IOError as msg:
+        except IOError, msg:
             print file, ":", msg
             sys.exit(1)
 
@@ -955,15 +918,15 @@ def test(args=None):
             for c in data:
                 x.feed(c)
             x.close()
-    except Error as msg:
+    except Error, msg:
         t1 = time()
         print msg
         if do_time:
-            print 'total time: %g' % (t1 - t0)
+            print 'total time: %g' % (t1-t0)
         sys.exit(1)
     t1 = time()
     if do_time:
-        print 'total time: %g' % (t1 - t0)
+        print 'total time: %g' % (t1-t0)
 
 
 if __name__ == '__main__':
diff --git a/Packages/cdms2/Lib/convention.py b/Packages/cdms2/Lib/convention.py
index 43086d8e4..4e8c9053e 100644
--- a/Packages/cdms2/Lib/convention.py
+++ b/Packages/cdms2/Lib/convention.py
@@ -1,6 +1,7 @@
 """ metadata conventions """
 
-from .error import CDMSError
+import string
+from error import CDMSError
 from UserList import UserList
 
 # On in order to turn off some warnings
@@ -8,20 +9,15 @@ WITH_GRIDSPEC_SUPPORT = True
 
 MethodNotImplemented = "Method not yet implemented"
 
-
 class AliasList (UserList):
-
     def __init__(self, alist):
-        UserList.__init__(self, alist)
-
-    def __setitem__(self, i, value):
-        self.data[i] = value.lower()
-
+        UserList.__init__(self,alist)
+    def __setitem__ (self, i, value):
+        self.data[i] = string.lower(value)
     def __setslice(self, i, j, values):
-        self.data[i:j] = map(lambda x: x.lower(), values)
-
+        self.data[i:j] = map(lambda x: string.lower(x), values)
     def append(self, value):
-        self.data.append(value.lower())
+        self.data.append(string.lower(value))
 
 level_aliases = AliasList(['plev'])
 longitude_aliases = AliasList([])
@@ -29,24 +25,23 @@ latitude_aliases = AliasList([])
 time_aliases = AliasList([])
 forecast_aliases = AliasList([])
 
-
 class AbstractConvention:
 
     def getAxisIds(self, vardict):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getAxisAuxIds(self, vardict, axiskeys):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getDsetnodeAuxAxisIds(self, dsetnode):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def axisIsLatitude(self, axis):
-        id = axis.id.lower()
+        id = string.lower(axis.id)
         return (id[0:3] == 'lat') or (id in latitude_aliases)
 
     def axisIsLongitude(self, axis):
-        id = axis.id.lower()
+        id = string.lower(axis.id)
         return (id[0:3] == 'lon') or (id in longitude_aliases)
 
     def getVarLatId(self, var, vardict=None):
@@ -55,7 +50,7 @@ class AbstractConvention:
 
         for obj in [d[0] for d in var.getDomain()]:
             if self.axisIsLatitude(obj):
-                if nlat == 0:
+                if nlat==0:
                     lat = obj
                 nlat += 1
         return (lat, nlat)
@@ -65,12 +60,11 @@ class AbstractConvention:
         nlon = 0
         for obj in [d[0] for d in var.getDomain()]:
             if self.axisIsLongitude(obj):
-                if nlon == 0:
+                if nlon==0:
                     lon = obj
                 nlon += 1
         return (lon, nlon)
 
-
 class NUGConvention(AbstractConvention):
 
     def __init__(self, version=None):
@@ -81,20 +75,18 @@ class NUGConvention(AbstractConvention):
         result = []
         for name in vardict.keys():
             dimensions = vardict[name].dimensions
-            if len(dimensions) == 1 and (name in dimensions):
+            if len(dimensions)==1 and (name in dimensions):
                 result.append(name)
         return result
 
     def getAxisAuxIds(self, vardict, axiskeys):
         return []
 
-
 class COARDSConvention(NUGConvention):
 
     def __init__(self, version=None):
         NUGConvention.__init__(self, version)
 
-
 class CFConvention(COARDSConvention):
 
     current = 'CF-1.0'
@@ -107,7 +99,7 @@ class CFConvention(COARDSConvention):
         coorddict = {}
         for var in vardict.values():
             if hasattr(var, 'coordinates'):
-                coordnames = var.coordinates.lower()
+                coordnames = string.split(var.coordinates)
                 for item in coordnames:
                     # Don't include if already a 1D coordinate axis.
                     if item in axiskeys:
@@ -120,11 +112,11 @@ class CFConvention(COARDSConvention):
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if not WITH_GRIDSPEC_SUPPORT:
-                    print 'Warning: coordinate attribute points to non-existent variable: %s' % key
+                    print 'Warning: coordinate attribute points to non-existent variable: %s'%key
                 del coorddict[key]
                 continue
             # Omit scalar dimensions, and dimensions greater than 2-D
-            if len(coord.shape) not in [1, 2]:
+            if len(coord.shape) not in [1,2]:
                 del coorddict[key]
         return coorddict.keys()
 
@@ -135,16 +127,15 @@ class CFConvention(COARDSConvention):
         for node in dsetdict.values():
             coordnames = node.getExternalAttr('coordinates')
             if coordnames is not None:
-                coordnames = coordnames.split()
+                coordnames = string.split(coordnames)
                 for item in coordnames:
                     # Don't include if already a 1D coordinate axis.
-                    if item in dsetdict and dsetdict[item].tag == 'axis':
+                    if dsetdict.has_key(item) and dsetdict[item].tag=='axis':
                         continue
                     # It's not an axis node, so must be a variable, so getDomain is defined.
-                    # Check the rank, don't include if not 1D or 2D (e.g.,
-                    # scalar coordinate)
+                    # Check the rank, don't include if not 1D or 2D (e.g., scalar coordinate)
                     domnode = dsetdict[item].getDomain()
-                    if domnode.getChildCount() not in [1, 2]:
+                    if domnode.getChildCount() not in [1,2]:
                         continue
                     coorddict[item] = 1
         return coorddict.keys()
@@ -158,21 +149,21 @@ class CFConvention(COARDSConvention):
             return (lat, nlat)
 
         if hasattr(var, 'coordinates'):
-            coordnames = var.coordinates.split()
+            coordnames = string.split(var.coordinates)
             for name in coordnames:
                 coord = vardict.get(name)
 
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if coord is not None and hasattr(coord, 'isLatitude') and coord.isLatitude():
-                    if nlat == 0:
+                    if nlat==0:
                         lat = coord
                     nlat += 1
         if lat is None:
             lat, nlat = AbstractConvention.getVarLatId(self, var, vardict)
 
         return (lat, nlat)
-
+                
     def getVarLonId(self, var, vardict):
         lon = None
         nlon = 0
@@ -182,37 +173,37 @@ class CFConvention(COARDSConvention):
             return (lon, nlon)
 
         if hasattr(var, 'coordinates'):
-            coordnames = var.coordinates.split()
+            coordnames = string.split(var.coordinates)
             for name in coordnames:
                 coord = vardict.get(name)
 
                 # Note: not everything referenced by .coordinates attribute is
                 # in fact a coordinate axis, e.g., scalar coordinates
                 if coord is not None and hasattr(coord, 'isLongitude') and coord.isLongitude():
-                    if nlon == 0:
+                    if nlon==0:
                         lon = coord
                     nlon += 1
         if lon is None:
             lon, nlon = AbstractConvention.getVarLonId(self, var, vardict)
 
         return (lon, nlon)
-
+                
     def axisIsLatitude(self, axis):
-        if (hasattr(axis, 'axis') and axis.axis == 'Y'):
+        if (hasattr(axis,'axis') and axis.axis=='Y'):
             return 1
-        elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', 'degreen', 'degreesn']):
+        elif (hasattr(axis, 'units') and string.lower(axis.units) in ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', 'degreen', 'degreesn']):
             return 1
-        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower() == 'latitude'):
+        elif (hasattr(axis, 'standard_name') and string.lower(axis.standard_name)=='latitude'):
             return 1
         else:
             return AbstractConvention.axisIsLatitude(self, axis)
-
+        
     def axisIsLongitude(self, axis):
-        if (hasattr(axis, 'axis') and axis.axis == 'X'):
+        if (hasattr(axis,'axis') and axis.axis=='X'):
             return 1
-        elif (hasattr(axis, 'units') and axis.units.lower() in ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese']):
+        elif (hasattr(axis, 'units') and string.lower(axis.units) in ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', 'degreee', 'degreese']):
             return 1
-        elif (hasattr(axis, 'standard_name') and axis.standard_name.lower() == 'longitude'):
+        elif (hasattr(axis, 'standard_name') and string.lower(axis.standard_name)=='longitude'):
             return 1
         else:
             return AbstractConvention.axisIsLongitude(self, axis)
@@ -221,10 +212,10 @@ class CFConvention(COARDSConvention):
         """Get the bounds variable for the variable, from a dataset or file."""
         if hasattr(var, 'bounds'):
             boundsid = var.bounds
-            if boundsid in dset.variables:
+            if dset.variables.has_key(boundsid):
                 result = dset[boundsid]
             else:
-                print 'Warning: bounds variable not found in %s: %s' % (dset.id, boundsid)
+                print 'Warning: bounds variable not found in %s: %s'%(dset.id, boundsid)
                 result = None
         else:
             result = None
@@ -235,7 +226,7 @@ NUG = NUGConvention()
 COARDS = COARDSConvention()
 CF1 = CFConvention('CF-1')
 
-
 def getDatasetConvention(dset):
     "Return an appropriate convention object. dset is a file or dataset object"
     return CF1
+
diff --git a/Packages/cdms2/Lib/coord.py b/Packages/cdms2/Lib/coord.py
index 8935220af..6f7773fd1 100644
--- a/Packages/cdms2/Lib/coord.py
+++ b/Packages/cdms2/Lib/coord.py
@@ -1,25 +1,26 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """
 CDMS CoordinateAxis objects
 """
 import sys
-from . import cdmsNode
+import cdmsNode
 import cdtime
 import copy
 import numpy
-# import internattr
-from .cdmsobj import CdmsObj
-from .axis import createAxis, TransientVirtualAxis
-from .error import CDMSError
-from .convention import AliasList, level_aliases, longitude_aliases, latitude_aliases, time_aliases,\
-    forecast_aliases
-from .fvariable import FileVariable
-from .variable import DatasetVariable
-from .tvariable import TransientVariable
-from .avariable import AbstractVariable
-from functools import reduce
+#import internattr
+import types
+import string
+from cdmsobj import CdmsObj
+from axis import createAxis, TransientVirtualAxis
+from error import CDMSError
+from convention import AliasList, level_aliases, longitude_aliases, latitude_aliases, time_aliases,\
+     forecast_aliases
+from fvariable import FileVariable
+from variable import DatasetVariable
+from tvariable import TransientVariable
+from avariable import AbstractVariable
 
 MethodNotImplemented = "Method not yet implemented"
 NoSuchAxisOrGrid = "No such axis or grid: "
@@ -29,38 +30,34 @@ std_axis_attributes = ['name', 'units', 'length', 'values', 'bounds']
 
 # Map between cdtime calendar and CF tags
 calendarToTag = {
-    cdtime.MixedCalendar: 'gregorian',
-    cdtime.NoLeapCalendar: 'noleap',
-    cdtime.GregorianCalendar: 'proleptic_gregorian',
-    cdtime.JulianCalendar: 'julian',
-    cdtime.Calendar360: '360_day'
-}
+    cdtime.MixedCalendar : 'gregorian',
+    cdtime.NoLeapCalendar : 'noleap',
+    cdtime.GregorianCalendar : 'proleptic_gregorian',
+    cdtime.JulianCalendar : 'julian',
+    cdtime.Calendar360 : '360_day'
+    }
 
 tagToCalendar = {
-    'gregorian': cdtime.MixedCalendar,
-    'standard': cdtime.GregorianCalendar,
-    'noleap': cdtime.NoLeapCalendar,
-    'julian': cdtime.JulianCalendar,
-    'proleptic_gregorian': cdtime.GregorianCalendar,
-    '360_day': cdtime.Calendar360,
-    '360': cdtime.Calendar360,
-    '365_day': cdtime.NoLeapCalendar,
-}
+    'gregorian' : cdtime.MixedCalendar,
+    'standard' : cdtime.GregorianCalendar,
+    'noleap' : cdtime.NoLeapCalendar,
+    'julian' : cdtime.JulianCalendar,
+    'proleptic_gregorian' : cdtime.GregorianCalendar,
+    '360_day' : cdtime.Calendar360,
+    '360' : cdtime.Calendar360,
+    '365_day' : cdtime.NoLeapCalendar,
+    }
 
 # This is not an error message, it is used to detect which things have
 # been left as default indices or coordinates.
 unspecified = "No value specified."
 
 # Create a transient axis
-
-
 def createCoordinateAxis(data, bounds=None, id=None, copy=0):
     return TransientAxis(data, bounds, id, copy=copy)
 
 # AbstractCoordinateAxis defines the common interface
 # for coordinate variables/axes.
-
-
 class AbstractCoordinateAxis(CdmsObj):
 
     axis_count = 0                      # Transient axis count
@@ -68,15 +65,15 @@ class AbstractCoordinateAxis(CdmsObj):
     def __init__(self, parent=None, variableNode=None, bounds=None):
         CdmsObj.__init__(self, variableNode)
         self._bounds_ = bounds
-
+        
     def isAbstractCoordinate(self):
         return 1
 
-    def clone(self, copyData=1):
+    def clone (self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     # Designate axis as a latitude axis.
     # If persistent is true, write metadata to the container.
@@ -120,17 +117,17 @@ class AbstractCoordinateAxis(CdmsObj):
     # or None. If the axis does not have a calendar attribute, return the global
     # calendar.
     def getCalendar(self):
-        if hasattr(self, 'calendar'):
-            calendar = self.calendar.lower()
+        if hasattr(self,'calendar'):
+            calendar = string.lower(self.calendar)
         else:
             calendar = None
 
-        cdcal = tagToCalendar.get(calendar, None)
+        cdcal = tagToCalendar.get(calendar,None)
         return cdcal
 
     def getData(self):
-        raise CDMSError(MethodNotImplemented)
-
+        raise CDMSError, MethodNotImplemented
+ 
     # Return None if not explicitly defined
     def getExplicitBounds(self):
         if self._bounds_ is not None:
@@ -140,8 +137,7 @@ class AbstractCoordinateAxis(CdmsObj):
 
     def info(self, flag=None, device=None):
         "Write info about axis; include dimension values and weights if flag"
-        if device is None:
-            device = sys.stdout
+        if device is None: device = sys.stdout
         device.write(str(self))
 
     # Return true iff the axis is a latitude axis
@@ -151,9 +147,8 @@ class AbstractCoordinateAxis(CdmsObj):
 
     # Return true iff the axis is a level axis
     def isLevel(self):
-        id = self.id.lower()
-        if (hasattr(self, 'axis') and self.axis == 'Z'):
-            return 1
+        id = string.lower(self.id)
+        if (hasattr(self,'axis') and self.axis=='Z'): return 1
         return ((id[0:3] == 'lev') or (id[0:5] == 'depth') or (id in level_aliases))
 
     # Return true iff the axis is a longitude axis
@@ -163,31 +158,25 @@ class AbstractCoordinateAxis(CdmsObj):
 
     # Return true iff the axis is a time axis
     def isTime(self):
-        id = self.id.lower()
-        if (hasattr(self, 'axis') and self.axis == 'T'):
-            return 1
+        id = string.lower(self.id)
+        if (hasattr(self,'axis') and self.axis=='T'): return 1
         return (id[0:4] == 'time') or (id in time_aliases)
 
     # Return true iff the axis is a forecast axis
     def isForecast(self):
-        id = self.id.lower()
-        if (hasattr(self, 'axis') and self.axis == 'F'):
-            return 1
+        id = string.lower(self.id)
+        if (hasattr(self,'axis') and self.axis=='F'): return 1
         return (id[0:6] == 'fctau0') or (id in forecast_aliases)
 
-    def listall(self, all=None):
+    def listall (self, all=None):
         "Get list of info about this axis."
         aname = self.id
         result = []
         result.append('   id: ' + aname)
-        if self.isLatitude():
-            result.append('   Designated a latitude axis.')
-        if self.isLongitude():
-            result.append('   Designated a longitude axis.')
-        if self.isTime():
-            result.append('   Designated a time axis.')
-        if self.isLevel():
-            result.append('   Designated a level axis.')
+        if self.isLatitude(): result.append('   Designated a latitude axis.')
+        if self.isLongitude(): result.append('   Designated a longitude axis.')
+        if self.isTime(): result.append('   Designated a time axis.')
+        if self.isLevel(): result.append('   Designated a level axis.')
         try:
             units = self.units
             result.append('   units:  ' + units)
@@ -197,12 +186,11 @@ class AbstractCoordinateAxis(CdmsObj):
         result.append('   Shape: ' + str(d.shape))
         flag = 1
         for k in self.attributes.keys():
-            if k in std_axis_attributes:
-                continue
+            if k in std_axis_attributes: continue
             if flag:
                 result.append('   Other axis attributes:')
                 flag = 0
-            result.append('      ' + k + ': ' + str(self.attributes[k]))
+            result.append('      '+k+': '+str(self.attributes[k]))
         result.append('   Python id:  %s' % hex(id(self)))
 
         if all:
@@ -227,106 +215,86 @@ class AbstractCoordinateAxis(CdmsObj):
         if persistent:
             self.calendar = calendarToTag.get(calendar, None)
             if self.calendar is None:
-                raise CDMSError(InvalidCalendar + calendar)
+                raise CDMSError, InvalidCalendar + calendar
         else:
             self.__dict__['calendar'] = calendarToTag.get(calendar, None)
             if self.__dict__['calendar'] is None:
-                raise CDMSError(InvalidCalendar + calendar)
+                raise CDMSError, InvalidCalendar + calendar
 
-    def size(self, axis=None):
+    def size (self, axis = None):
         "Number of elements in array, or in a particular axis."
         s = self.shape
         if axis is None:
             if len(s) == 0:
                 return 1
             else:
-                return reduce(lambda x, y: x * y, s)
+                return reduce(lambda x,y: x*y, s)
         else:
             return s[axis]
-
+        
     def writeToFile(self, file):
 
         if self._bounds_ is not None:
-            if hasattr(self, "bounds"):
+            if hasattr(self,"bounds"):
                 boundsid = self.bounds
             else:
-                boundsid = "bounds_" + self.id
+                boundsid = "bounds_"+self.id
             self.bounds = boundsid
 
         fvar = file.write(self)
 
-        # Create the bounds variable
-        if (self._bounds_ is not None) and boundsid not in file.variables:
+        # Create the bounds variable 
+        if (self._bounds_ is not None) and not file.variables.has_key(boundsid):
             boundslen = self._bounds_.shape[-1]
             try:
                 boundid = self._bounds_.getAxis(-1).id
-                boundsaxis = file.getBoundsAxis(boundslen, boundid=boundid)
+                boundsaxis = file.getBoundsAxis(boundslen,boundid=boundid)
             except:
                 boundsaxis = file.getBoundsAxis(boundslen)
-
+            
             axislist = fvar.getAxisList()
             axislist.append(boundsaxis)
-            boundsvar = file.createVariable(
-                boundsid,
-                cdmsNode.NumericToCdType.get(self.dtype.char),
-                axislist)
+            boundsvar = file.createVariable(boundsid, cdmsNode.NumericToCdType.get(self.dtype.char), axislist)
             boundsvar[:] = self._bounds_.astype(boundsvar.dtype)
         return fvar
 
-
 class AbstractAxis2D(AbstractCoordinateAxis):
 
-    def __init__(self, parent=None, variableNode=None, bounds=None):
-        AbstractCoordinateAxis.__init__(
-            self,
-            parent,
-            variableNode,
-            bounds=bounds)
+    def __init__ (self, parent=None, variableNode=None, bounds=None):
+        AbstractCoordinateAxis.__init__(self, parent, variableNode, bounds=bounds)
 
-    def clone(self, copyData=1):
+    def clone (self, copyData=1):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        result = TransientAxis2D(
-            self[:],
-            copy=copyData,
-            axes=self.getAxisList(),
-            attributes=self.attributes,
-            bounds=self.getBounds())
+        result = TransientAxis2D(self[:], copy=copyData, axes=self.getAxisList(), attributes=self.attributes, bounds=self.getBounds())
         return result
 
     def setBounds(self, bounds):
         if bounds is not None:
-            if len(bounds.shape) != 3:
-                raise CDMSError('Bounds must have rank=3')
-            if bounds.shape[0:2] != self.shape:
-                raise CDMSError(
-                    'Bounds shape %s is inconsistent with axis shape %s' %
-                    (repr(bounds.shape), repr(self.shape)))
+            if len(bounds.shape)!=3:
+                raise CDMSError, 'Bounds must have rank=3'
+            if bounds.shape[0:2]!=self.shape:
+                raise CDMSError, 'Bounds shape %s is inconsistent with axis shape %s'%(`bounds.shape`,`self.shape`)
         AbstractCoordinateAxis.setBounds(self, bounds)
 
-    def subSlice(self, *specs, **keys):
+    def subSlice (self, *specs, **keys):
         # Take a subslice, returning a TransientAxis2D
         avar = AbstractVariable.subSlice(self, *specs, **keys)
         bounds = self.getBounds()
         if bounds is None:
             newbounds = None
         else:
-            newbounds = bounds[
-                specs]   # bounds can be a numarray or DatasetVariable
+            newbounds = bounds[specs]   # bounds can be a numarray or DatasetVariable
 
-        # Note: disable axis copy to preserve identity of grid and variable
-        # domains
-        result = TransientAxis2D(avar, bounds=newbounds, copyaxes=0)
+        # Note: disable axis copy to preserve identity of grid and variable domains
+        result = TransientAxis2D(avar, bounds=newbounds, copyaxes=0)    
         return result
 
 # Two-dimensional coordinate axis in a dataset.
-
-
 class DatasetAxis2D(AbstractAxis2D, DatasetVariable):
 
     # Note: node is a VariableNode
-
     def __init__(self, parent, id=None, variableNode=None, bounds=None):
         AbstractAxis2D.__init__(self, parent, variableNode, bounds=bounds)
         DatasetVariable.__init__(self, parent, id, variableNode)
@@ -334,16 +302,13 @@ class DatasetAxis2D(AbstractAxis2D, DatasetVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<DatasetAxis2D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
+            return "<DatasetAxis2D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
         else:
-            return "<DatasetAxis2D: %s, file: **CLOSED**>" % self.id
+            return "<DatasetAxis2D: %s, file: **CLOSED**>"%self.id
 
-# internattr.initialize_internal_attributes(DatasetAxis2D) Copy internal
-# attrs from parents
+## internattr.initialize_internal_attributes(DatasetAxis2D) Copy internal attrs from parents
 
 # Two-dimensional coordinate axis in a file.
-
-
 class FileAxis2D(AbstractAxis2D, FileVariable):
 
     def __init__(self, parent, id, obj=None, bounds=None):
@@ -353,18 +318,15 @@ class FileAxis2D(AbstractAxis2D, FileVariable):
 
     def __repr__(self):
         if self.parent is not None:
-            return "<FileAxis2D: %s, file: %s, shape: %s>" % (self.id, self.parent.id, repr(self.shape))
+            return "<FileAxis2D: %s, file: %s, shape: %s>"%(self.id, self.parent.id, `self.shape`)
         else:
-            return "<FileAxis2D: %s, file: **CLOSED**>" % self.id
-
-# internattr.initialize_internal_attributes(FileAxis2D) # Copy internal
-# attrs from parents
+            return "<FileAxis2D: %s, file: **CLOSED**>"%self.id
 
+## internattr.initialize_internal_attributes(FileAxis2D) # Copy internal attrs from parents
 
 class TransientAxis2D(AbstractAxis2D, TransientVariable):
 
-    def __init__(
-        self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
+    def __init__(self, data, typecode=None, copy=0, savespace=0, mask=None, fill_value=None,
                  axes=None, attributes=None, id=None, copyaxes=1, bounds=None):
         """Create a transient 2D axis.
         All arguments are as for TransientVariable.
@@ -372,12 +334,11 @@ class TransientAxis2D(AbstractAxis2D, TransientVariable):
           nvert is the max number of vertices per cell.
         """
         AbstractAxis2D.__init__(self, None, None, bounds=bounds)
-        TransientVariable.__init__(
-            self, data, typecode=typecode, copy=copy, savespace=savespace,
+        TransientVariable.__init__(self, data, typecode=typecode, copy=copy, savespace=savespace,
                                    mask=mask, fill_value=fill_value, axes=axes, attributes=attributes,
                                    id=id, copyaxes=copyaxes)
         if axes is not None:
             self.setBounds(bounds)
 
-# internattr.initialize_internal_attributes(TransientAxis2D) # Copy
-# internal attrs from parents
+## internattr.initialize_internal_attributes(TransientAxis2D) # Copy internal attrs from parents
+
diff --git a/Packages/cdms2/Lib/cudsinterface.py b/Packages/cdms2/Lib/cudsinterface.py
index 04673c28e..c97f5fdef 100644
--- a/Packages/cdms2/Lib/cudsinterface.py
+++ b/Packages/cdms2/Lib/cudsinterface.py
@@ -1,46 +1,42 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 "Emulation of old cu package"
-import sys
-from .error import CDMSError
-from .dataset import openDataset, createDataset
-from .tvariable import createVariable
+import string, types, sys
+from error import CDMSError
+from dataset import openDataset, createDataset
+from tvariable import createVariable
 import numpy
 
-
 class cuDataset():
-
     "A mixin class to support the old cu interface"
-
-    def __init__(self):
+    def __init__ (self):
         self.cleardefault()
 
-    def __call__(self, id, *args, **kwargs):
+    def __call__ (self, id, *args, **kwargs):
         """Call a variable object with the given id. Exception if not found.
            Call the variable with the other arguments.
         """
-# note defined here because this is the class all the dataset-type classes
-# inherit
+# note defined here because this is the class all the dataset-type classes inherit
         v = self.variables.get(id)
         if v is None:
             try:
-                if (self.is_gridspec_grid_file() and
-                     (id == '' or id == 'grid' or id == 'gridspec') and
-                     len(args) == 0 and len(kwargs) == 0
-                    ):
+                if ( self.is_gridspec_grid_file() and
+                     ( id=='' or id=='grid' or id=='gridspec' ) and
+                     len(args)==0 and len(kwargs)==0
+                     ):
                     return self.readg()
                 else:
-                    raise CDMSError("No such variable or grid, " + id)
-            except (AttributeError, TypeError):
-                raise CDMSError("No such variable, " + id)
+                    raise CDMSError, "No such variable or grid, " + id
+            except ( AttributeError, TypeError ):
+                raise CDMSError, "No such variable, " + id
         return v(*args, **kwargs)
 
     def __getitem__(self, key):
         """Implement f['varname'] for file/dataset f.
         """
         for d in [self.variables, self.axes, self.grids]:
-            if key in d:
+            if d.has_key(key):
                 result = d[key]
                 break
         else:
@@ -52,19 +48,19 @@ class cuDataset():
         try:
             v = self.variables[vname]
         except KeyError:
-            raise CDMSError("No variable named " + vname + " in file " +
-                  self.id)
+            raise CDMSError, "No variable named " + vname + " in file " + \
+                  self.id
         return v
 
-    def default_variable(self, vname):
+    def default_variable (self, vname):
         "Set the default variable name."
         self.__dict__['default_variable_name'] = vname
 
-    def cleardefault(self):
+    def cleardefault (self):
         "Clear the default variable name."
         self.default_variable("no_default_variable_name_specified")
-
-    def listall(self, vname=None, all=None):
+    
+    def listall (self, vname=None, all=None):
         """Get info about data from the file.
         :::
         Options:::
@@ -72,12 +68,11 @@ class cuDataset():
         all :: (None/True/False/int) (None) include axes information
         :::
         """
-        if vname is None:
-            vname = self.default_variable_name
+        if vname is None: vname = self.default_variable_name
         try:
             m = numpy.get_printoptions()['threshold']
             result = []
-            result.append('*** Description of slab %s in file %s***' %
+            result.append('*** Description of slab %s in file %s***' % \
                           (vname, self.id))
             result.append('Name: ' + vname)
             v = self._v(vname)
@@ -99,24 +94,23 @@ class cuDataset():
                 result.append('Last: ' + str(axis[-1]))
                 if all:
                     result.append(str(self.dimensionarray(axis.id, vname)))
-            result.append('*** End of description of %s ***' % vname)
+            result.append ('*** End of description of %s ***' %vname)
             return result
         finally:
-            numpy.set_printoptions(threshold=m)
-
-    def listattribute(self, vname=None):
+            numpy.set_printoptions (threshold=m)
+        
+    def listattribute (self, vname=None):
         """Get attributes of data from the file.
         :::
         Options:::
         vname :: (str/None) (None) variable name
         :::
         """
-        if vname is None:
-            vname = self.default_variable_name
+        if vname is None: vname = self.default_variable_name
         v = self._v(vname)
         return v.attributes.keys()
 
-    def listdimension(self, vname=None):
+    def listdimension (self, vname=None):
         """Return a list of the dimension names associated with a variable.
            If no argument, return the file.axes.keys()
         :::
@@ -124,20 +118,20 @@ class cuDataset():
         vname :: (str/None) (None) variable name
         :::
         """
-        if vname is None:
+        if vname is None: 
             return self.axes.keys()
         v = self._v(vname)
         d = v.getDomain()
         x = map(lambda n: n[0], d)
-        return map(lambda n: getattr(n, 'id'), x)
+        return map (lambda n: getattr(n, 'id'), x)
 
-    def listglobal(self):
+    def listglobal (self):
         """Returns a list of the global attributes in the file.
         :::
         """
         return self.attributes.keys()
 
-    def listvariable(self):
+    def listvariable (self):
         """Return a list of the variables in the file.
         :::
         """
@@ -145,37 +139,35 @@ class cuDataset():
 
     listvariables = listvariable
 
-    def showglobal(self, device=None):
+    def showglobal (self, device=None):
         """Show the global attributes in the file.
         :::
         Options:::
         device :: (None/file) (None) output device
         :::
         """
-        if device is None:
-            device = sys.stdout
+        if device is None: device=sys.stdout
         device.write("Global attributes in file ")
         device.write(self.id)
         device.write(":\n")
         device.write(str(self.listglobal()))
         device.write("\n")
 
-    def showvariable(self, device=None):
+    def showvariable (self, device=None):
         """Show the variables in the file.
         :::
         Options:::
         device :: (None/file) (None) output device
         :::
         """
-        if device is None:
-            device = sys.stdout
+        if device is None: device=sys.stdout
         device.write("Variables in file ")
         device.write(self.id)
         device.write(":\n")
         device.write(str(self.listvariable()))
         device.write("\n")
 
-    def showattribute(self, vname=None, device=None):
+    def showattribute (self, vname=None, device=None):
         """Show the attributes of vname.
         :::
         Options:::
@@ -183,10 +175,8 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None:
-            device = sys.stdout
-        if vname is None:
-            vname = self.default_variable_name
+        if device is None: device=sys.stdout
+        if vname is None: vname = self.default_variable_name
         device.write("Attributes of ")
         device.write(vname)
         device.write(" in file ")
@@ -194,8 +184,8 @@ class cuDataset():
         device.write(":\n")
         device.write(str(self.listattribute(vname)))
         device.write("\n")
-
-    def showdimension(self, vname=None, device=None):
+        
+    def showdimension (self, vname=None, device=None):
         """Show the dimension names associated with a variable.
         :::
         Options:::
@@ -203,10 +193,8 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None:
-            device = sys.stdout
-        if vname is None:
-            vname = self.default_variable_name
+        if device is None: device=sys.stdout
+        if vname is None: vname = self.default_variable_name
         device.write("Dimension names of ")
         device.write(vname)
         device.write(" in file ")
@@ -214,8 +202,8 @@ class cuDataset():
         device.write(":\n")
         device.write(str(self.listdimension(vname)))
         device.write("\n")
-
-    def showall(self, vname=None, all=None, device=None):
+        
+    def showall (self, vname=None, all=None, device=None):
         """Show a full description of the variable.
         :::
         Options:::
@@ -224,15 +212,13 @@ class cuDataset():
         device :: (None/file) (None) output device
         :::
         """
-        if device is None:
-            device = sys.stdout
-        if vname is None:
-            vname = self.default_variable_name
+        if device is None: device=sys.stdout
+        if vname is None: vname = self.default_variable_name
         alist = self.listall(vname, all=all)
-        device.write("\n".join(alist))
+        device.write(string.join(alist, "\n"))
         device.write("\n")
 
-    def dimensionobject(self, dname, vname=None):
+    def dimensionobject (self, dname, vname=None):
         """CDMS axis object for the dimension named dname.
         :::
         Options:::
@@ -245,12 +231,12 @@ class cuDataset():
         axis :: (cdms2.axis.FileAxis) (0) file axis whose id is vname
         :::
         """
-        if vname is None:
+        if vname is None: 
             try:
                 return self.axes[dname]
             except KeyError:
-                raise CDMSError("No axis named " + dname + " in file " +
-                                self.id + ".")
+                raise CDMSError, "No axis named " + dname + " in file " +\
+                                self.id + "."
         else:
             v = self._v(vname)
             d = v.getDomain()
@@ -258,10 +244,10 @@ class cuDataset():
                 if x[0].id == dname:
                     return x[0]
             else:
-                raise CDMSError(vname + " has no axis named " + dname +
-                                " in file " + self.id + ".")
-
-    def dimensionarray(self, dname, vname=None):
+                raise CDMSError, vname + " has no axis named " + dname + \
+                                " in file " + self.id + "."
+        
+    def dimensionarray (self, dname, vname=None):
         """Values of the dimension named dname.
         :::
         Options:::
@@ -275,8 +261,8 @@ class cuDataset():
         :::
         """
         return self.dimensionobject(dname, vname).getValue()
-
-    def getdimensionunits(self, dname, vname=None):
+    
+    def getdimensionunits (self, dname, vname=None):
         """Get the units for the given dimension.
         :::
         Options:::
@@ -292,7 +278,7 @@ class cuDataset():
         x = self.dimensionobject(dname, vname)
         return x.units
 
-    def getglobal(self, attribute):
+    def getglobal (self, attribute):
         """Get the value of the global attribute.
         :::
         Input:::
@@ -306,8 +292,8 @@ class cuDataset():
             return self.attributes[attribute]
         except KeyError:
             return None
-
-    def getattribute(self, vname, attribute):
+    
+    def getattribute (self, vname, attribute):
         """Get the value of attribute for variable vname
         :::
         Input:::
@@ -320,8 +306,8 @@ class cuDataset():
         """
         v = self._v(vname)
         return getattr(v, attribute)
-
-    def getslab(self, vname, *args, **keys):
+            
+    def getslab (self, vname, *args,**keys):
         """getslab('name', arg1, arg2, ....) returns a cdms variable
            containing the data.
 
@@ -351,42 +337,40 @@ class cuDataset():
         nargs = len(args)
         v = self._v(vname)
         if nargs == 0:
-            return v.subRegion()
+           return v.subRegion()
 # note CDMS treats None as a colon in getRegion and mapInterval
         ndims = v.rank()
-        cdms_args = [':'] * ndims
+        cdms_args = [':'] * ndims 
         i = 0
         idim = 0
         ne = 0
         while i < nargs:
             if not (idim < ndims):
-                raise CDMSError("Too many arguments to getslab.")
+                raise CDMSError, "Too many arguments to getslab."
             x = args[i]
-            if x == ':' or x is None:
+            if x == ':' or x == None:
                 i = i + 1
                 idim = idim + 1
                 continue
             elif x == Ellipsis:
-                if ne:
-                    raise CDMSError("Only one ellipsis allowed.")
+                if ne: raise CDMSError, "Only one ellipsis allowed."
                 idim = ndims - (nargs - i - 1)
                 i = i + 1
                 ne = 1
-            elif isinstance(x, tuple):
+            elif type(x) == types.TupleType:
                 cdms_args[idim] = x
                 idim = idim + 1
                 i = i + 1
             else:
-                if not ((i + 1) < nargs):
-                    raise CDMSError(
-                        "Arguments to getslab not paired properly.")
+                if not ((i+1) < nargs):
+                    raise CDMSError, "Arguments to getslab not paired properly."
                 low = float(x)
-                high = float(args[i + 1])
+                high = float(args[i+1])
                 cdms_args[idim] = (low, high, 'cc')
                 idim = idim + 1
                 i = i + 2
         sq = keys.get('squeeze', 0)
-        result = v.subRegion(*tuple(cdms_args), **{'squeeze': sq})
+        result = apply(v.subRegion, tuple(cdms_args), {'squeeze':sq})
         result.parent = self
         result.id = vname
         return result
@@ -407,8 +391,8 @@ class cuDataset():
         grid :: (cdms2.hgrid.TransientCurveGrid/cdms2.gengrid.TransientGenericGrid) (0) variable requested
         :::
         """
-
-        from . import hgrid, gengrid
+        
+        import hgrid, gengrid
 
         # Grid file
         if 'grid_dims' in self.variables.keys():
@@ -416,7 +400,7 @@ class cuDataset():
             whichType = "grid"
 
         # Destination grid from mapping file
-        elif whichGrid == "destination":
+        elif whichGrid=="destination":
             dims = self('dst_grid_dims')
             whichType = "mapping"
 
@@ -425,16 +409,16 @@ class cuDataset():
             dims = self('src_grid_dims')
             whichType = "mapping"
 
-        if len(dims) == 2:
+        if len(dims)==2:
             result = hgrid.readScripCurveGrid(self, dims, whichType, whichGrid)
-        elif len(dims) == 1:
-            result = gengrid.readScripGenericGrid(
-                self, dims, whichType, whichGrid)
+        elif len(dims)==1:
+            result = gengrid.readScripGenericGrid(self, dims, whichType, whichGrid)
         else:
-            raise CDMSError("Grid rank must be 1 or 2, found: %d" % len(dims))
+            raise CDMSError, "Grid rank must be 1 or 2, found: %d"%len(dims)
 
-        if checkGrid == 1:
+        if checkGrid==1:
             nonConvexCells = result.checkConvex()
             result.fixCutCells(nonConvexCells)
 
         return result
+
diff --git a/Packages/cdms2/Lib/database.py b/Packages/cdms2/Lib/database.py
index 682f14628..80f749fb4 100644
--- a/Packages/cdms2/Lib/database.py
+++ b/Packages/cdms2/Lib/database.py
@@ -1,16 +1,18 @@
 """CDMS database objects"""
 
-from .error import CDMSError
-from . import cdmsobj
-from . import cdurlparse
-# import internattr
+from error import CDMSError
+import cdmsobj
+import cdurlparse
+## import internattr
 import copy
 import os
 import re
+import string
 import sys
-from .CDMLParser import CDMLParser
-from .cdmsobj import CdmsObj
-from .dataset import Dataset
+import types
+from CDMLParser import CDMLParser
+from cdmsobj import CdmsObj
+from dataset import Dataset
 
 try:
     import ldap
@@ -31,11 +33,9 @@ MethodNotImplemented = "Method not yet implemented"
 PermissionError = "No permission to access"
 SchemeNotSupported = "Scheme not supported"
 
-_Att = re.compile('([a-zA-Z_:][-a-zA-Z0-9._:]*)=(.*)', re.DOTALL)
+_Att = re.compile('([a-zA-Z_:][-a-zA-Z0-9._:]*)=(.*)',re.DOTALL)
 
 # Open a database connection
-
-
 def connect(uri=None, user="", password=""):
     """
     Method:
@@ -64,48 +64,36 @@ def connect(uri=None, user="", password=""):
         try:
             uri = os.environ['CDMSROOT']
         except KeyError:
-            raise CDMSError(
-                ConnectError +
-                '%s\nSet environment variable CDMSROOT to default database location' %
-                uri)
-    (scheme, netloc, path, parameters, query,
-     fragment) = cdurlparse.urlparse(uri)
-
-    if scheme in ['', 'ldap']:
+            raise CDMSError, ConnectError + '%s\nSet environment variable CDMSROOT to default database location'%uri
+    (scheme,netloc,path,parameters,query,fragment)=cdurlparse.urlparse(uri)
+
+    if scheme in ['','ldap']:
         try:
             ldapdb = ldap.open(netloc)
         except:
-            raise CDMSError(ConnectError + "%s\n%s" % (uri, sys.exc_info()[1]))
+            raise CDMSError, ConnectError +"%s\n%s"%(uri,sys.exc_value)
 
         try:
-            ldapdb.simple_bind_s(user, password)
+            ldapdb.simple_bind_s(user,password)
         except:
-            raise CDMSError(
-                AuthenticationError + "%s\n%s" %
-                (uri, sys.exc_info()[1]))
+            raise CDMSError, AuthenticationError + "%s\n%s"%(uri,sys.exc_value)
 
         try:
-            result = ldapdb.search_s(
-                path[1:],
-                ldap.SCOPE_SUBTREE,
-                "objectclass=database")
+            result = ldapdb.search_s(path[1:], ldap.SCOPE_SUBTREE, "objectclass=database")
         except:
-            raise CDMSError(
-                DatabaseNotFound + "%s\n%s" %
-                (uri, sys.exc_info()[1]))
+            raise CDMSError, DatabaseNotFound + "%s\n%s"%(uri,sys.exc_value)
 
         try:
             dn, attrs = result[0]
         except:
-            raise CDMSError(PermissionError + uri)
-        newuri = "ldap://%s/%s" % (netloc, dn)
+            raise CDMSError, PermissionError + uri
+        newuri = "ldap://%s/%s"%(netloc,dn)
         db = LDAPDatabase(newuri, ldapdb)
         db.setExternalDict(attrs)
         return db
 
     else:
-        raise CDMSError(SchemeNotSupported + scheme)
-
+        raise CDMSError, SchemeNotSupported +  scheme
 
 def loadString(text, uri, parent=None, datapath=None):
     """ Create a dataset from a text string. <text> is the string in CDML format.
@@ -113,61 +101,59 @@ def loadString(text, uri, parent=None, datapath=None):
         <parent> is the containing database object, if any.
         <datapath> is the location of data files relative to the parent database URL.
     """
-    p = CDMLParser()
+    p=CDMLParser()
     p.feed(text)
     p.close()
-    return Dataset(uri, 'r', p.getRoot(), parent, datapath)
-
+    return Dataset(uri,'r',p.getRoot(),parent,datapath)
+    
 
 class AbstractDatabase(CdmsObj):
-
-    """AbstractDatabase defines the common database interface. Concrete database classes are
+    """AbstractDatabase defines the common database interface. Concrete database classes are 
        derived from this class.
     """
 
     def __init__(self, uri, path):
-        CdmsObj.__init__(self, None)
+        CdmsObj.__init__(self,None)
         self.uri = uri
         self.path = path
         self._cache_ = {}
         self._cdmlcache_ = {}
-        self._datacache_ = None  # datasetdn: obj # Remote file data cache
+        self._datacache_ = None # datasetdn: obj # Remote file data cache
         self.lcBaseDN = None            # Logical Collection base distinguished name
         self.useReplica = None          # Use replica catalog if true (request manager transfers only)
         self.userid = None              # User ID for request manager transfers
 
     def close(self):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def cachecdml(self, name, cdml):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getDataset(self, name):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getObjFromDataset(self, name):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def openDataset(self, dsetid, mode='r'):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
-    def searchFilter(self, filter, classtag=None,
-                     relbase=None, scope=Subtree, attnames=[]):
-        raise CDMSError(MethodNotImplemented)
+    def searchFilter(self, filter, classtag=None, relbase=None, scope=Subtree, attnames=[]):
+        raise CDMSError, MethodNotImplemented
 
     def enableCache(self):
         if self._datacache_ is None:
-            from . import cache
+            import cache
             self._datacache_ = cache.Cache()
         return self._datacache_
 
     def disableCache(self):
-        if self._datacache_ is not None:
+        if self._datacache_ != None:
             self._datacache_.delete()
             self._datacache_ = None
 
-    def useRequestManager(self, lcBaseDN, useReplica=1, userid="anonymous"):
-        from . import cache
+    def useRequestManager(self, lcBaseDN, useReplica=1, userid = "anonymous"):
+        import cache
         self.enableCache()
         cache.useRequestManagerTransfer()
         self.lcBaseDN = lcBaseDN
@@ -175,23 +161,20 @@ class AbstractDatabase(CdmsObj):
         self.userid = userid
 
     def usingRequestManager(self):
-        from . import cache
-        return (cache._transferMethod == cache._requestManagerTransfer)
+        import cache
+        return (cache._transferMethod==cache._requestManagerTransfer)
 
     def __repr__(self):
-        return "<Database '%s'>" % (self.uri)
+        return "<Database '%s'>"%(self.uri)
 
-# internattr.add_internal_attribute(AbstractDatabase, 'uri', 'path')
+## internattr.add_internal_attribute(AbstractDatabase, 'uri', 'path')
 
 # Database implemented via LDAP (Lightweight Directory Access Protocol)
-
-
 class LDAPDatabase(AbstractDatabase):
 
     def __init__(self, uri, db):
-        (scheme, netloc, path, parameters, query,
-         fragment) = cdurlparse.urlparse(uri)
-        AbstractDatabase.__init__(self, uri, path[1:])
+        (scheme,netloc,path,parameters,query,fragment)=cdurlparse.urlparse(uri)
+        AbstractDatabase.__init__(self,uri,path[1:])
         self.netloc = netloc
         self.db = db
 
@@ -210,64 +193,58 @@ class LDAPDatabase(AbstractDatabase):
           None
 
         """
-        if self.db is not None:
+        if self.db != None:
             self.db.unbind()
         self.db = None
         self.disableCache()
-
+        
     def __del__(self):
         # if cdmsobj._debug==1:
         #    print 'Deleting object',self
         self.close()
-
+    
     def normalizedn(self, dn):
         explodeddn = ldap.explode_dn(dn)
-        return ','.join(explodeddn)
+        return string.join(explodeddn,',')
 
     def cachecdml(self, name, cdml, datapath):
         normaldn = self.normalizedn(name)
-        self._cdmlcache_[normaldn] = (cdml, datapath)
+        self._cdmlcache_[normaldn] = (cdml,datapath)
 
     def getDataset(self, dn):
         normaldn = self.normalizedn(dn)
-        if normaldn in self._cache_:
+        if self._cache_.has_key(normaldn):
             dataset = self._cache_[normaldn]
-        elif normaldn in self._cdmlcache_:
-            (text, datapath) = self._cdmlcache_[normaldn]
-            uri = "ldap://%s/%s" % (self.netloc, normaldn)
-            if cdmsobj._debug == 1:
-                print 'Loading %s from cached CDML' % uri
-            dataset = loadString(text, uri, self, datapath)
+        elif self._cdmlcache_.has_key(normaldn):
+            (text,datapath) = self._cdmlcache_[normaldn]
+            uri = "ldap://%s/%s"%(self.netloc,normaldn)
+            if cdmsobj._debug==1:
+                print 'Loading %s from cached CDML'%uri
+            dataset = loadString(text,uri,self,datapath)
             self._cache_[normaldn] = dataset
         else:
-            if cdmsobj._debug == 1:
-                print 'Search filter: (objectclass=dataset), scope: base, base: "%s", attributes=["cdml"]' % (dn,)
-            result = self.db.search_s(
-                dn,
-                ldap.SCOPE_BASE,
-                "objectclass=dataset",
-                ["cdml",
-                 "datapath"])
-            resultdn, attrs = result[0]
+            if cdmsobj._debug==1:
+                print 'Search filter: (objectclass=dataset), scope: base, base: "%s", attributes=["cdml"]'%(dn,)
+            result = self.db.search_s(dn, ldap.SCOPE_BASE, "objectclass=dataset",["cdml","datapath"])
+            resultdn,attrs = result[0]
             text = attrs["cdml"][0]
-            uri = "ldap://%s/%s" % (self.netloc, normaldn)
+            uri = "ldap://%s/%s"%(self.netloc,normaldn)
             datapath = attrs.get("datapath")
-            if datapath:
-                datapath = datapath[0]
-            dataset = loadString(text, uri, self, datapath)
+            if datapath: datapath = datapath[0]
+            dataset = loadString(text,uri,self,datapath)
             self._cache_[normaldn] = dataset
         return dataset
-
+ 
     def getObjFromDataset(self, dn):
 
         # Get the parent dataset
         explodeddn = ldap.explode_dn(dn)
-        dsetdn = ','.join(explodeddn[1:])  # Dataset node is parent of variable
+        dsetdn = string.join(explodeddn[1:],',') # Dataset node is parent of variable
         dset = self.getDataset(dsetdn)
         rdn = explodeddn[0]
         matchobj = _Att.match(rdn)
         if matchobj is None:
-            raise CDMSError(InvalidEntryName + dn)
+            raise CDMSError, InvalidEntryName +  dn
         tag, id = matchobj.groups()
 
         # Get the correct dictionary for this tag
@@ -298,7 +275,7 @@ class LDAPDatabase(AbstractDatabase):
 
           dset = db.openDataset('ncep_reanalysis_mo')
         """
-        dn = "dataset=%s,%s" % (dsetid, self.path)
+        dn = "dataset=%s,%s"%(dsetid,self.path)
         dset = self.getDataset(dn)
         return dset
 
@@ -310,24 +287,23 @@ class LDAPDatabase(AbstractDatabase):
     def setExternalDict(self, ldapattrs):
         for attname in ldapattrs.keys():
             attvals = ldapattrs[attname]
-            if attname == 'objectclass':
+            if attname=='objectclass':
                 continue
-            elif attname == 'attr':       # Handle attr: name=value
+            elif attname=='attr':       # Handle attr: name=value
                 for attval in attvals:
                     matchobj = _Att.match(attval)
                     if matchobj is not None:
-                        newname, newval = matchobj.groups()
+                        newname,newval = matchobj.groups()
                         self.attributes[newname] = newval
 
             # If the attribute value is a multi-valued list, keep it as a list
             # otherwise copy the single value from the list.
-            if len(attvals) == 1:
+            if len(attvals)==1:
                 self.attributes[attname] = attvals[0]
             else:
                 self.attributes[attname] = attvals
 
-    def searchFilter(self, filter=None, tag=None,
-                     relbase=None, scope=Subtree, attnames=None, timeout=None):
+    def searchFilter(self, filter=None, tag=None, relbase=None, scope=Subtree, attnames=None, timeout=None):
         """
         Method:
 
@@ -358,7 +334,7 @@ class LDAPDatabase(AbstractDatabase):
                              "<=" |     # lexicographically less than or equal to
                              ">="       # lexicographically greater than or equal to
               value      ::= string, may include '*' as a wild card
-
+                             
           tag: string class tag ("dataset" | "variable" | "database" | "axis" | "grid").
             Restricts the search to a class of objects
           relbase: string search base, relative to the database path
@@ -390,26 +366,25 @@ class LDAPDatabase(AbstractDatabase):
           result = db.searchFilter(relbase="dataset=ncep_reanalysis_mo"), scope=cdms.Onelevel)
 
         """
-        if tag is None:
-            tag = '*'
-        newfilter = "(objectclass=%s)" % tag
+        if tag is None: tag='*'
+        newfilter = "(objectclass=%s)"%tag
         if filter is not None:
-            if filter[0] != '(':
-                filter = "(%s)" % filter
-            newfilter = "(&%s%s)" % (newfilter, filter)
+            if filter[0]!='(':
+                filter = "(%s)"%filter
+            newfilter = "(&%s%s)"%(newfilter,filter)
 
         if relbase is None:
             base = self.path
         else:
-            base = "%s,%s" % (relbase, self.path)
+            base = "%s,%s"%(relbase,self.path)
 
         if attnames is None:
             atts = None
         else:
-            atts = ["objectclass", "cdml", "id"] + attnames
-
-        if cdmsobj._debug == 1:
-            print 'Search filter:%s, scope %s, base: "%s", attributes=%s' % (newfilter, repr(scope), base, repr(atts))
+            atts = ["objectclass","cdml","id"]+attnames
+            
+        if cdmsobj._debug==1:
+            print 'Search filter:%s, scope %s, base: "%s", attributes=%s'%(newfilter,`scope`,base,`atts`)
         if timeout is None:
             result = self.db.search_s(base, scope, newfilter, atts)
         else:
@@ -419,12 +394,11 @@ class LDAPDatabase(AbstractDatabase):
 
     def listDatasets(self):
         """ Return a list of the dataset IDs in this database."""
-        entries = self.searchFilter(tag='dataset', scope=Onelevel)
+        entries = self.searchFilter(tag='dataset', scope=Onelevel )
         result = map(lambda x: x.attributes['id'][0], entries)
         return result
 
-# internattr.add_internal_attribute(LDAPDatabase, 'netloc', 'db')
-
+## internattr.add_internal_attribute(LDAPDatabase, 'netloc', 'db')
 
 class AbstractSearchResult:
 
@@ -437,7 +411,6 @@ class AbstractSearchResult:
     def searchPredicate(self, predicate, tag=None):
         MethodNotImplemented = "Method not yet implemented"
 
-
 class LDAPSearchResult(AbstractSearchResult):
 
     def __init__(self, db, LDAPresult):
@@ -446,15 +419,15 @@ class LDAPSearchResult(AbstractSearchResult):
 
         # Scan the result for CDML attributes, cache them in the database
         for dn, attrs in self.result:
-            if 'cdml' in attrs and 'datapath' in attrs:
+            if attrs.has_key('cdml') and attrs.has_key('datapath'):
                 cdml = attrs['cdml'][0]
                 datapath = attrs['datapath'][0]
-                self.db.cachecdml(dn, cdml, datapath)
+                self.db.cachecdml(dn,cdml,datapath)
                 del attrs['cdml']
 
     def __getitem__(self, key):
-        if key >= len(self):
-            raise IndexError('index out of bounds')
+        if key>=len(self):
+            raise IndexError, 'index out of bounds'
 
         dn, attributes = self.result[key]
 
@@ -495,15 +468,15 @@ class LDAPSearchResult(AbstractSearchResult):
 
         """
         if tag is not None:
-            tag = tag.lower()
+            tag = string.lower(tag)
 
         resultlist = []
         for entry in self:
             obj = entry.getObject()
-            if tag is None or tag == entry.tag:
+            if tag is None or tag==entry.tag:
                 try:
-                    if predicate(*(obj,)) == 1:
-                        resultlist.append((entry.name, entry.attributes))
+                    if apply(predicate,(obj,))==1:
+                        resultlist.append((entry.name,entry.attributes))
                 except:
                     pass
 
@@ -512,7 +485,6 @@ class LDAPSearchResult(AbstractSearchResult):
     def __len__(self):
         return len(self.result)
 
-
 class AbstractResultEntry:
 
     def __init__(self, db):
@@ -534,16 +506,15 @@ class AbstractResultEntry:
 
         """
 
-        if self.tag == "database":
+        if self.tag=="database":
             obj = self.db
-        elif self.tag == "dataset":
+        elif self.tag=="dataset":
             obj = self.db.getDataset(self.name)
         else:
             obj = self.db.getObjFromDataset(self.name)
 
         return obj
 
-
 class LDAPResultEntry(AbstractResultEntry):
 
     def __init__(self, db, dn, attributes):
@@ -556,6 +527,9 @@ class LDAPResultEntry(AbstractResultEntry):
         rdn = explodeddn[0]
         matchobj = _Att.match(rdn)
         if matchobj is None:
-            raise IndexError(InvalidEntryName + dn)
+            raise IndexError, InvalidEntryName + dn
 
         self.tag = matchobj.group(1)
+
+
+
diff --git a/Packages/cdms2/Lib/dataset.py b/Packages/cdms2/Lib/dataset.py
index f9f9562ec..977b1d8bb 100644
--- a/Packages/cdms2/Lib/dataset.py
+++ b/Packages/cdms2/Lib/dataset.py
@@ -1,38 +1,38 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by foo
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by foo
+## Further modified to be pure new numpy June 24th 2008
 
 """ CDMS dataset and file objects"""
-from .error import CDMSError
+from error import CDMSError
 import Cdunif
 import numpy
-from . import cdmsNode
-import os
-import sys
+import cdmsNode
+import os, sys
+import string
 import urllib
-from . import cdmsURLopener                    # Import after urllib, to handle errors
+import cdmsURLopener                    # Import after urllib, to handle errors
 import urlparse
-# import internattr
-from . import cdmsobj
+## import internattr
+import cdmsobj
 import re
-from .CDMLParser import CDMLParser
-from .cdmsobj import CdmsObj
-from .axis import Axis, FileAxis, FileVirtualAxis, isOverlapVector
-from .coord import FileAxis2D, DatasetAxis2D
-from .auxcoord import FileAuxAxis1D, DatasetAuxAxis1D
-from .grid import RectGrid, FileRectGrid
-from .hgrid import FileCurveGrid, DatasetCurveGrid
-from .gengrid import FileGenericGrid, DatasetGenericGrid
-from .variable import DatasetVariable
-from .fvariable import FileVariable
-from .tvariable import asVariable
-from .cdmsNode import CdDatatypes
-from . import convention
-from . import typeconv
+from CDMLParser import CDMLParser
+from cdmsobj import CdmsObj
+from axis import Axis, FileAxis, FileVirtualAxis, isOverlapVector
+from coord import FileAxis2D, DatasetAxis2D
+from auxcoord import FileAuxAxis1D, DatasetAuxAxis1D
+from grid import RectGrid, FileRectGrid
+from hgrid import FileCurveGrid, DatasetCurveGrid
+from gengrid import FileGenericGrid, DatasetGenericGrid
+from variable import DatasetVariable
+from fvariable import FileVariable
+from tvariable import asVariable
+from cdmsNode import CdDatatypes
+import convention
+import typeconv
 
 # Default is serial mode until setNetcdfUseParallelFlag(1) is called
 rk = 0
 sz = 1
-Cdunif.CdunifSetNCFLAGS("use_parallel", 0)
+Cdunif.CdunifSetNCFLAGS("use_parallel",0)
 CdMpi = False
 
 try:
@@ -43,19 +43,17 @@ except:
     rk = 0
 
 try:
-    from . import gsHost
+    import gsHost
     from pycf import libCFConfig as libcf
 except:
     libcf = None
 
 try:
-    from . import cache
+    import cache
 except ImportError:
     pass
 
 DuplicateAxis = "Axis already defined: "
-
-
 class DuplicateAxisError(CDMSError):
     pass
 DuplicateGrid = "Grid already defined: "
@@ -78,37 +76,12 @@ _IndexPat = r'(\d+|-)'
 _FilePath = r"([^\s\]\',]+)"
 # Two file map patterns, _IndexList4 is the original one, _IndexList5 supports
 # forecast data too...
-_IndexList4 = re.compile(
-    _ListStartPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _FilePath +
-    _ListEndPat)
-_IndexList5 = re.compile(
-    _ListStartPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _IndexPat +
-    _ListSepPat +
-    _FilePath +
-    _ListEndPat)
+_IndexList4 = re.compile(_ListStartPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_FilePath+_ListEndPat)
+_IndexList5 = re.compile(_ListStartPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_IndexPat+_ListSepPat+_FilePath+_ListEndPat)
 
 _NPRINT = 20
 _showCompressWarnings = True
 
-
 def setCompressionWarnings(value=None):
     """Turn on/off the warnings for compression
     Usage:
@@ -121,57 +94,49 @@ def setCompressionWarnings(value=None):
     global _showCompressWarnings
     if value is None:
         value = not _showCompressWarnings
-    if isinstance(value, str):
-        if not value.slower() in ['y', 'n', 'yes', 'no']:
-            raise CDMSError(
-                "setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
-        if value.lower()[0] == 'y':
+    if isinstance(value,str):
+        if not value.slower() in ['y','n','yes','no']:
+            raise CDMSError("setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
+        if value.lower()[0]=='y':
             value = 1
         else:
             value = 0
-    if not isinstance(value, (int, bool)):
-        raise CDMSError(
-            "setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
+    if not isinstance(value, (int,bool)):
+        raise CDMSError("setCompressionWarnings flags must be yes/no or 1/0, or None to invert it")
 
-    if value in [1, True]:
+    if value in [1,True]:
         _showCompressWarnings = True
-    elif value in [0, False]:
+    elif value in [0,False]:
         _showCompressWarnings = False
     else:
-        raise CDMSError(
-            "setCompressionWarnings flags must be yes\/no or 1\/0, or None to invert it")
+        raise CDMSError("setCompressionWarnings flags must be yes\/no or 1\/0, or None to invert it")
 
     return _showCompressWarnings
 
-
 def setNetcdfUseNCSwitchModeFlag(value):
     """ Tells cdms2 to switch constantly between netcdf define/write modes"""
 
-    if value not in [True, False, 0, 1]:
-        raise CDMSError(
-            "Error UseNCSwitchMode flag must be 1(can use)/0(do not use) or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("use_define_mode", 0)
+    if value not in [True,False,0,1]:
+        raise CDMSError("Error UseNCSwitchMode flag must be 1(can use)/0(do not use) or true/False")
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("use_define_mode",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("use_define_mode", 1)
-
+        Cdunif.CdunifSetNCFLAGS("use_define_mode",1)
 
 def setNetcdfUseParallelFlag(value):
     """ Sets NetCDF classic flag value"""
     global CdMpi
-    if value not in [True, False, 0, 1]:
-        raise CDMSError(
-            "Error UseParallel flag must be 1(can use)/0(do not use) or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("use_parallel", 0)
+    if value not in [True,False,0,1]:
+        raise CDMSError("Error UseParallel flag must be 1(can use)/0(do not use) or true/False")
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("use_parallel",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("use_parallel", 1)
+        Cdunif.CdunifSetNCFLAGS("use_parallel",1)
         CdMpi = True
         if not MPI.Is_initialized():
             MPI.Init()
         rk = MPI.COMM_WORLD.Get_rank()
 
-
 def getMpiRank():
     ''' Return number of processor available '''
     if CdMpi:
@@ -180,7 +145,6 @@ def getMpiRank():
     else:
         return 0
 
-
 def getMpiSize():
     if CdMpi:
         sz = MPI.COMM_WORLD.Get_size()
@@ -188,90 +152,75 @@ def getMpiSize():
     else:
         return 1
 
-
 def setNetcdf4Flag(value):
     """ Sets NetCDF classic flag value"""
-    if value not in [True, False, 0, 1]:
+    if value not in [True,False,0,1]:
         raise CDMSError("Error NetCDF4 flag must be 1/0 or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("netcdf4", 0)
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("netcdf4",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("netcdf4", 1)
+        Cdunif.CdunifSetNCFLAGS("netcdf4",1)
 
-
-def setNetcdfClassicFlag(value):
+def setNetcdfClassicFlag(value):        
     """ Sets NetCDF classic flag value"""
-    if value not in [True, False, 0, 1]:
+    if value not in [True,False,0,1]:
         raise CDMSError("Error NetCDF Classic flag must be 1/0 or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("classic", 0)
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("classic",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("classic", 1)
-
+        Cdunif.CdunifSetNCFLAGS("classic",1)
 
-def setNetcdfShuffleFlag(value):
+def setNetcdfShuffleFlag(value):        
     """ Sets NetCDF shuffle flag value"""
-    if value not in [True, False, 0, 1]:
+    if value not in [True,False,0,1]:
         raise CDMSError("Error NetCDF Shuffle flag must be 1/0 or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("shuffle", 0)
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("shuffle",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("shuffle", 1)
-
+        Cdunif.CdunifSetNCFLAGS("shuffle",1)
 
 def setNetcdfDeflateFlag(value):
     """ Sets NetCDF deflate flag value"""
-    if value not in [True, False, 0, 1]:
+    if value not in [True,False,0,1]:
         raise CDMSError("Error NetCDF deflate flag must be 1/0 or true/False")
-    if value in [0, False]:
-        Cdunif.CdunifSetNCFLAGS("deflate", 0)
+    if value in [0,False]:
+        Cdunif.CdunifSetNCFLAGS("deflate",0)
     else:
-        Cdunif.CdunifSetNCFLAGS("deflate", 1)
-
-
+        Cdunif.CdunifSetNCFLAGS("deflate",1)
+        
 def setNetcdfDeflateLevelFlag(value):
     """ Sets NetCDF deflate level flag value"""
-    if value not in [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]:
-        raise CDMSError(
-            "Error NetCDF deflate_level flag must be an integer < 10")
-    Cdunif.CdunifSetNCFLAGS("deflate_level", value)
-
+    if value not in [0,1,2,3,4,5,6,7,8,9]:
+        raise CDMSError("Error NetCDF deflate_level flag must be an integer < 10")
+    Cdunif.CdunifSetNCFLAGS("deflate_level",value)
 
 def getNetcdfUseNCSwitchModeFlag():
     """ Returns NetCDF UseParallel flag value"""
     return Cdunif.CdunifGetNCFLAGS("use_define_mode")
 
-
 def getNetcdfUseParallelFlag():
     """ Returns NetCDF UseParallel flag value"""
     return Cdunif.CdunifGetNCFLAGS("use_parallel")
 
-
 def getNetcdf4Flag():
     """ Returns NetCDF4 flag value"""
     return Cdunif.CdunifGetNCFLAGS("netcdf4")
 
-
 def getNetcdfClassicFlag():
     """ Returns NetCDF classic flag value"""
     return Cdunif.CdunifGetNCFLAGS("classic")
 
-
 def getNetcdfShuffleFlag():
     """ Returns NetCDF shuffle flag value"""
     return Cdunif.CdunifGetNCFLAGS("shuffle")
 
-
 def getNetcdfDeflateFlag():
     """ Returns NetCDF deflate flag value"""
     return Cdunif.CdunifGetNCFLAGS("deflate")
 
-
 def getNetcdfDeflateLevelFlag():
     """ Returns NetCDF deflate level flag value"""
     return Cdunif.CdunifGetNCFLAGS("deflate_level")
-
-
 def useNetcdf3():
     """ Turns off (0) NetCDF flags for shuffle/defalte/defaltelevel
     Output files are generated as NetCDF3 Classic after that
@@ -282,13 +231,11 @@ def useNetcdf3():
 
 # Create a tree from a file path.
 # Returns the parse tree root node.
-
-
 def load(path):
     fd = open(path)
     text = fd.read()
     fd.close()
-    p = CDMLParser()
+    p=CDMLParser()
     p.feed(text)
     p.close()
     return p.getRoot()
@@ -297,16 +244,13 @@ def load(path):
 # URI is of the form scheme://netloc/path;parameters?query#fragment
 # where fragment may be an XPointer.
 # Returns the parse tree root node.
-
-
 def loadURI(uri):
-    (scheme, netloc, path, parameters,
-     query, fragment) = urlparse.urlparse(uri)
-    uripath = urlparse.urlunparse((scheme, netloc, path, '', '', ''))
+    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(uri)
+    uripath = urlparse.urlunparse((scheme,netloc,path,'','',''))
     fd = urllib.urlopen(uripath)
     text = fd.read()
     fd.close()
-    p = CDMLParser()
+    p=CDMLParser()
     p.feed(text)
     p.close()
     return p.getRoot()
@@ -314,19 +258,14 @@ def loadURI(uri):
 # Create a dataset
 # 'path' is the XML file name, or netCDF filename for simple file create
 # 'template' is a string template for the datafile(s), for dataset creation
-
-
-def createDataset(path, template=None):
-    return openDataset(path, 'w', template)
+def createDataset(path,template=None):
+    return openDataset(path,'w',template)
 
 # Open an existing dataset
 # 'uri' is a Uniform Resource Identifier, referring to a cdunif file, XML file,
 #   or LDAP URL of a catalog dataset entry.
 # 'mode' is 'r', 'r+', 'a', or 'w'
-
-
-def openDataset(uri, mode='r', template=None,
-                dods=1, dpath=None, hostObj=None):
+def openDataset(uri,mode='r',template=None,dods=1,dpath=None, hostObj=None):
     """
     Options:::
 mode :: (str) ('r') mode to open the file in read/write/append
@@ -341,46 +280,42 @@ Output:::
 file :: (cdms2.dataset.CdmsFile) (0) file to read from
 :::
     """
-    uri = uri.strip()
-    (scheme, netloc, path, parameters,
-     query, fragment) = urlparse.urlparse(uri)
-    if scheme in ('', 'file'):
+    uri = string.strip(uri)
+    (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(uri)
+    if scheme in ('','file'):
         if netloc:
             # In case of relative path...
             path = netloc + path
         path = os.path.expanduser(path)
         path = os.path.normpath(os.path.join(os.getcwd(), path))
 
-        root, ext = os.path.splitext(path)
-        if ext in ['.xml', '.cdml']:
-            if mode != 'r':
-                raise ModeNotSupported(mode)
+        root,ext = os.path.splitext(path)
+        if ext in ['.xml','.cdml']:
+            if mode!='r': raise ModeNotSupported(mode)
             datanode = load(path)
         else:
             # If the doesn't exist allow it to be created
-            # Ok mpi has issues with bellow we need to test this only with 1
-            # rank
+            ##Ok mpi has issues with bellow we need to test this only with 1 rank
             if not os.path.exists(path):
-                return CdmsFile(path, mode, mpiBarrier=CdMpi)
-            elif mode == "w":
+                return CdmsFile(path,mode,mpiBarrier=CdMpi)
+            elif mode=="w":
                 try:
                     os.remove(path)
                 except:
                     pass
-                return CdmsFile(path, mode, mpiBarrier=CdMpi)
-
+                return CdmsFile(path,mode,mpiBarrier=CdMpi)
+            
             # The file exists
-            file1 = CdmsFile(path, "r")
+            file1 = CdmsFile(path,"r")
             if libcf is not None:
                 if hasattr(file1, libcf.CF_FILETYPE):
                     if getattr(file1, libcf.CF_FILETYPE) == libcf.CF_GLATT_FILETYPE_HOST:
                         file = gsHost.open(path, mode)
-                    elif mode == 'r' and hostObj is None:
-                        # helps performance on machines where file open (in
-                        # CdmsFile) is costly
+                    elif mode=='r' and hostObj is None:
+                        # helps performance on machines where file open (in CdmsFile) is costly
                         file = file1
                     else:
-                        file = CdmsFile(path, mode, hostObj=hostObj)
+                        file = CdmsFile(path, mode, hostObj = hostObj)
                     file1.close()
                 else:
                     file1.close()
@@ -390,18 +325,17 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
                 file1.close()
                 return CdmsFile(path, mode)
     elif scheme in ['http', 'gridftp']:
-
+        
         if (dods):
-            if mode != 'r':
-                raise ModeNotSupported(mode)
+            if mode!='r': raise ModeNotSupported(mode)
             # DODS file?
             try:
-                file = CdmsFile(uri, mode)
+                file = CdmsFile(uri,mode)
                 return file
-            except Exception as err:
-                msg = "Error in DODS open of: " + uri
-                if os.path.exists(os.path.join(os.path.expanduser("~"), ".dodsrc")):
-                    msg += "\nYou have a .dodsrc in your HOME directory, try to remove it"
+            except Exception,err:
+                msg = "Error in DODS open of: "+uri
+                if os.path.exists(os.path.join(os.path.expanduser("~"),".dodsrc")):
+                  msg+="\nYou have a .dodsrc in your HOME directory, try to remove it"
                 raise CDMSError(msg)
         else:
             try:
@@ -409,8 +343,8 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
                 return datanode
             except:
                 datanode = loadURI(uri)
-                raise CDMSError("Error in loadURI of: " + uri)
-
+                raise CDMSError("Error in loadURI of: "+uri)
+            
     else:
         raise SchemeNotSupported(scheme)
 
@@ -424,13 +358,13 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
     # Note: In general, dset.datapath is relative to the URL of the
     #   enclosing database, but here the database is null, so the
     #   datapath should be absolute.
-    if dpath is None:
+    if dpath==None:
         direc = datanode.getExternalAttr('directory')
         head = os.path.dirname(path)
         if direc and os.path.isabs(direc):
             dpath = direc
         elif direc:
-            dpath = os.path.join(head, direc)
+            dpath = os.path.join(head,direc)
         else:
             dpath = head
 
@@ -438,8 +372,6 @@ file :: (cdms2.dataset.CdmsFile) (0) file to read from
     return dataset
 
 # Functions for parsing the file map.
-
-
 def parselist(text, f):
     """Parse a string of the form [A, A, ...].
     f is a function which parses A and returns (A, nconsumed)
@@ -448,13 +380,13 @@ def parselist(text, f):
     n = 0
     m = _ListStart.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
     result = []
     n += m.end()
     s, nconsume = f(text[n:])
     result.append(s)
     n += nconsume
-    while True:
+    while 1:
         m = _ListSep.match(text[n:])
         if m is None:
             break
@@ -465,11 +397,10 @@ def parselist(text, f):
         n += nconsume
     m = _ListEnd.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
     n += m.end()
     return result, n
 
-
 def parseIndexList(text):
     """Parse a string of the form [i,j,k,l,...,path] where
     i,j,k,l,... are indices or '-', and path is a filename.
@@ -481,48 +412,45 @@ def parseIndexList(text):
         m = _IndexList5.match(text)
         nindices = 5
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
-    result = [None] * (nindices + 1)
+        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
+    result = [None]*(nindices+1)
     for i in range(nindices):
-        s = m.group(i + 1)
-        if s != '-':
-            result[i] = int(s)
-    result[nindices] = m.group(nindices + 1)
+        s = m.group(i+1)
+        if s!='-':
+            result[i] = string.atoi(s)
+    result[nindices] = m.group(nindices+1)
     return result, m.end()
 
-
 def parseName(text):
     m = _Name.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
     return m.group(), m.end()
 
-
 def parseVarMap(text):
     """Parse a string of the form [ namelist, slicelist ]"""
     n = 0
     m = _ListStart.match(text)
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[0:_NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[0:_NPRINT])
     result = []
     n += m.end()
-    s, nconsume = parselist(text[n:], parseName)
+    s, nconsume = parselist(text[n:],parseName)
     result.append(s)
     n += nconsume
     m = _ListSep.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
     n += m.end()
     s, nconsume = parselist(text[n:], parseIndexList)
     result.append(s)
     n += nconsume
     m = _ListEnd.match(text[n:])
     if m is None:
-        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
+        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
     n += m.end()
     return result, n
 
-
 def parseFileMap(text):
     """Parse a CDMS filemap. having the form:
     filemap :== [ varmap, varmap, ...]
@@ -532,43 +460,38 @@ def parseFileMap(text):
     indexlist :== [i,j,k,l,path]
     """
     result, n = parselist(text, parseVarMap)
-    if n < len(text):
-        raise CDMSError("Parsing cdms_filemap near " + text[n:n + _NPRINT])
+    if n<len(text):
+        raise CDMSError("Parsing cdms_filemap near "+text[n:n+_NPRINT])
     return result
 
 
 # A CDMS dataset consists of a CDML/XML file and one or more data files
-from .cudsinterface import cuDataset
-
-
+from cudsinterface import cuDataset
 class Dataset(CdmsObj, cuDataset):
-
-    def __init__(self, uri, mode,
-                 datasetNode=None, parent=None, datapath=None):
-        if datasetNode is not None and datasetNode.tag != 'dataset':
+    def __init__(self, uri, mode, datasetNode=None, parent=None, datapath=None):
+        if datasetNode is not None and datasetNode.tag !='dataset':
             raise CDMSError('Node is not a dataset node')
-        CdmsObj.__init__(self, datasetNode)
-        for v in ['datapath',
-                  'variables',
-                  'axes',
-                  'grids',
-                  'xlinks',
-                  'dictdict',
-                  'default_variable_name',
-                  'parent',
-                  'uri',
-                  'mode']:
+        CdmsObj.__init__(self,datasetNode)
+        for v in [ 'datapath',
+                   'variables',
+                   'axes',
+                   'grids',
+                   'xlinks',
+                   'dictdict',
+                   'default_variable_name',
+                   'parent',
+                   'uri',
+                   'mode']:
             if not v in self.__cdms_internals__:
-                val = self.__cdms_internals__ + [v, ]
+                val = self.__cdms_internals__+[v,]
                 self.___cdms_internals__ = val
-
+                
         cuDataset.__init__(self)
         self.parent = parent
         self.uri = uri
         self.mode = mode
             # Path of data files relative to parent db.
-            # Note: .directory is the location of data relative to the location
-            # of the XML file
+            # Note: .directory is the location of data relative to the location of the XML file
         self.datapath = datapath
         self.variables = {}
         self.axes = {}
@@ -576,20 +499,19 @@ class Dataset(CdmsObj, cuDataset):
         self.xlinks = {}
         self._gridmap_ = {}
             # Gridmap:(latname,lonname,order,maskname,gridclass) => grid
-        (scheme, netloc, xmlpath, parameters,
-         query, fragment) = urlparse.urlparse(uri)
+        (scheme,netloc,xmlpath,parameters,query,fragment)=urlparse.urlparse(uri)
         self._xmlpath_ = xmlpath
         # Dictionary of dictionaries, keyed on node tags
-        self.dictdict = {'variable': self.variables,
-                         'axis': self.axes,
-                         'rectGrid': self.grids,
-                         'curveGrid': self.grids,
-                         'genericGrid': self.grids,
-                         'xlink': self.xlinks
-                         }
+        self.dictdict = {'variable':self.variables, 
+                         'axis':self.axes, 
+                         'rectGrid':self.grids, 
+                         'curveGrid':self.grids,
+                         'genericGrid':self.grids,
+                         'xlink':self.xlinks
+                        }
         # Dataset IDs are external, so may not have been defined yet.
-        if not hasattr(self, 'id'):
-            self.id = '<None>'
+        if not hasattr(self,'id'):
+            self.id='<None>'
         self._status_ = 'open'
         self._convention_ = convention.getDatasetConvention(self)
 
@@ -598,7 +520,7 @@ class Dataset(CdmsObj, cuDataset):
             coordsaux = self._convention_.getDsetnodeAuxAxisIds(datasetNode)
 
             for node in datasetNode.getIdDict().values():
-                if node.tag == 'variable':
+                if node.tag=='variable':
                     if node.id in coordsaux:
                         if node.getDomain().getChildCount() == 1:
                             obj = DatasetAuxAxis1D(self, node.id, node)
@@ -606,22 +528,22 @@ class Dataset(CdmsObj, cuDataset):
                             obj = DatasetAxis2D(self, node.id, node)
                     else:
                         obj = DatasetVariable(self, node.id, node)
-                    self.variables[node.id] = obj
-                elif node.tag == 'axis':
-                    obj = Axis(self, node)
-                    self.axes[node.id] = obj
-                elif node.tag == 'rectGrid':
-                    obj = RectGrid(self, node)
-                    self.grids[node.id] = obj
-                elif node.tag == 'xlink':
+                    self.variables[node.id]=obj
+                elif node.tag=='axis':
+                    obj = Axis(self,node)
+                    self.axes[node.id]=obj
+                elif node.tag=='rectGrid':
+                    obj = RectGrid(self,node)
+                    self.grids[node.id]=obj
+                elif node.tag=='xlink':
                     obj = Xlink(node)
-                    self.xlinks[node.id] = obj
+                    self.xlinks[node.id]=obj
                 else:
                     dict = self.dictdict.get(node.tag)
                     if dict is not None:
-                        dict[node.id] = node
+                        dict[node.id]=node
                     else:
-                        self.dictdict[node.tag] = {node.id: node}
+                        self.dictdict[node.tag] = {node.id:node}
 
             # Initialize grid domains
             for grid in self.grids.values():
@@ -633,11 +555,7 @@ class Dataset(CdmsObj, cuDataset):
                     maskname = ""
                 else:
                     maskname = mask.id
-                self._gridmap_[(
-                               latname,
-                               lonname,
-                               grid.getOrder(),
-                               maskname)] = grid
+                self._gridmap_[(latname, lonname, grid.getOrder(), maskname)] = grid
 
             # Initialize variable domains.
             for var in self.variables.values():
@@ -647,43 +565,40 @@ class Dataset(CdmsObj, cuDataset):
 
                 # Get grid information for the variable. gridkey has the form
                 # (latname,lonname,order,maskname,abstract_class).
-                gridkey, lat, lon = var.generateGridkey(
-                    self._convention_, self.variables)
+                gridkey, lat, lon = var.generateGridkey(self._convention_, self.variables)
 
                 # If the variable is gridded, lookup the grid. If no such grid exists,
-                # create a unique gridname, create the grid, and add to the
-                # gridmap.
+                # create a unique gridname, create the grid, and add to the gridmap.
                 if gridkey is None:
                     grid = None
                 else:
                     grid = self._gridmap_.get(gridkey)
                     if grid is None:
-                        if hasattr(var, 'grid_type'):
+                        if hasattr(var,'grid_type'):
                             gridtype = var.grid_type
                         else:
                             gridtype = "generic"
 
                         candidateBasename = None
                         if gridkey[4] == 'rectGrid':
-                            gridshape = (len(lat), len(lon))
+                            gridshape = (len(lat),len(lon))
                         elif gridkey[4] == 'curveGrid':
                             gridshape = lat.shape
                         elif gridkey[4] == 'genericGrid':
                             gridshape = lat.shape
-                            candidateBasename = 'grid_%d' % gridshape
+                            candidateBasename = 'grid_%d'%gridshape
                         else:
-                            gridshape = (len(lat), len(lon))
+                            gridshape = (len(lat),len(lon))
 
                         if candidateBasename is None:
-                            candidateBasename = 'grid_%dx%d' % gridshape
-                        if candidateBasename not in self.grids:
+                            candidateBasename = 'grid_%dx%d'%gridshape
+                        if not self.grids.has_key(candidateBasename):
                             gridname = candidateBasename
                         else:
                             foundname = 0
-                            for i in range(97, 123):  # Lower-case letters
-                                candidateName = candidateBasename + \
-                                    '_' + chr(i)
-                                if candidateName not in self.grids:
+                            for i in range(97,123): # Lower-case letters
+                                candidateName = candidateBasename+'_'+chr(i)
+                                if not self.grids.has_key(candidateName):
                                     gridname = candidateName
                                     foundname = 1
                                     break
@@ -691,16 +606,11 @@ class Dataset(CdmsObj, cuDataset):
                             if not foundname:
                                 print 'Warning: cannot generate a grid for variable', var.id
                                 continue
-
+                            
                         # Create the grid
                         if gridkey[4] == 'rectGrid':
-                            node = cdmsNode.RectGridNode(
-                                gridname,
-                                lat.id,
-                                lon.id,
-                                gridtype,
-                                gridkey[2])
-                            grid = RectGrid(self, node)
+                            node = cdmsNode.RectGridNode(gridname, lat.id, lon.id, gridtype, gridkey[2])
+                            grid = RectGrid(self,node)
                             grid.initDomain(self.axes, self.variables)
                         elif gridkey[4] == 'curveGrid':
                             grid = DatasetCurveGrid(lat, lon, gridname, self)
@@ -741,39 +651,34 @@ class Dataset(CdmsObj, cuDataset):
                     # but now there _may_ be an additional item before path...
                     for varm1 in varmap:
                         tstart, tend, levstart, levend = varm1[0:4]
-                        if (len(varm1) >= 6):
+                        if (len(varm1)>=6):
                             forecast = varm1[4]
                         else:
                             forecast = None
                         path = varm1[-1]
-                        self._filemap_[(
-                                       varname,
-                                       tstart,
-                                       levstart,
-                                       forecast)] = path
+                        self._filemap_[(varname, tstart, levstart, forecast)] = path
                         if tstart is not None:
-                            timemap[
-                                (tstart, tend)] = 1  # Collect unique (tstart, tend) tuples
+                            timemap[(tstart, tend)] = 1 # Collect unique (tstart, tend) tuples
                         if levstart is not None:
                             levmap[(levstart, levend)] = 1
                         if forecast is not None:
-                            fcmap[(forecast, forecast)] = 1
+                            fcmap[(forecast,forecast)] = 1
                     tkeys = timemap.keys()
-                    if len(tkeys) > 0:
+                    if len(tkeys)>0:
                         tkeys.sort()
                         tpart = map(lambda x: list(x), tkeys)
                     else:
                         tpart = None
                     levkeys = levmap.keys()
-                    if len(levkeys) > 0:
+                    if len(levkeys)>0:
                         levkeys.sort()
                         levpart = map(lambda x: list(x), levkeys)
                     else:
                         levpart = None
                     fckeys = fcmap.keys()
-                    if len(fckeys) > 0:
+                    if len(fckeys)>0:
                         fckeys.sort()
-                    if varname in self.variables:
+                    if self.variables.has_key(varname):
                         self.variables[varname]._varpart_ = [tpart, levpart]
 
     def getConvention(self):
@@ -781,7 +686,7 @@ class Dataset(CdmsObj, cuDataset):
         return self._convention_
 
     # Get a dictionary of objects with the given tag
-    def getDictionary(self, tag):
+    def getDictionary(self,tag):
         return self.dictdict[tag]
 
     # Synchronize writes with data/metadata files
@@ -802,24 +707,24 @@ class Dataset(CdmsObj, cuDataset):
         self.parent = None
         self._status_ = 'closed'
 
-# Note: Removed to allow garbage collection of reference cycles
-# def __del__(self):
-# if cdmsobj._debug==1:
-# print 'Deleting dataset',self.id
-# self.close()
-
+##  Note: Removed to allow garbage collection of reference cycles
+##     def __del__(self):
+##         if cdmsobj._debug==1:
+##             print 'Deleting dataset',self.id
+##         self.close()
+    
     # Create an axis
     # 'name' is the string name of the Axis
     # 'ar' is the 1-D data array, or None for an unlimited axis
     # Return an axis object.
-    def createAxis(self, name, ar):
+    def createAxis(self,name,ar):
         pass
 
     # Create an implicit rectilinear grid. lat, lon, and mask are objects.
     # order and type are strings
-    def createRectGrid(self, id, lat, lon, order, type="generic", mask=None):
+    def createRectGrid(self,id, lat, lon, order, type="generic", mask=None):
         node = cdmsNode.RectGridNode(id, lat.id, lon.id, type, order, mask.id)
-        grid = RectGrid(self, node)
+        grid = RectGrid(self,node)
         grid.initDomain(self.axes, self.variables)
         self.grids[grid.id] = grid
 #        self._gridmap_[gridkey] = grid
@@ -829,31 +734,31 @@ class Dataset(CdmsObj, cuDataset):
     # 'datatype' is a CDMS datatype
     # 'axisnames' is a list of axes or grids
     # Return a variable object.
-    def createVariable(self, name, datatype, axisnames):
+    def createVariable(self,name,datatype,axisnames):
         pass
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
     # search all string attributes. If tag is 'dataset', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def searchPattern(self, pattern, attribute, tag):
+    def searchPattern(self,pattern,attribute,tag):
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('dataset', None):
-            if self.searchone(pattern, attribute) == 1:
+            tag = string.lower(tag)
+        if tag in ('dataset',None):
+            if self.searchone(pattern,attribute)==1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.searchone(pattern, attribute):
+                    if obj.searchone(pattern,attribute):
                         resultlist.append(obj)
-        elif tag != 'dataset':
+        elif tag!='dataset':
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.searchone(pattern, attribute):
+                if obj.searchone(pattern,attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -861,24 +766,24 @@ class Dataset(CdmsObj, cuDataset):
     # search all string attributes. If tag is 'dataset', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def matchPattern(self, pattern, attribute, tag):
+    def matchPattern(self,pattern,attribute,tag):
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('dataset', None):
-            if self.matchone(pattern, attribute) == 1:
+            tag = string.lower(tag)
+        if tag in ('dataset',None):
+            if self.matchone(pattern,attribute)==1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.matchone(pattern, attribute):
+                    if obj.matchone(pattern,attribute):
                         resultlist.append(obj)
-        elif tag != 'dataset':
+        elif tag!='dataset':
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.matchone(pattern, attribute):
+                if obj.matchone(pattern,attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -889,13 +794,13 @@ class Dataset(CdmsObj, cuDataset):
     # If 'variable', 'axis', etc., it is applied only to that type of object
     # in the dataset. If None, it is applied to all objects, including
     # the dataset itself.
-    def searchPredicate(self, predicate, tag):
+    def searchPredicate(self,predicate,tag):
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('dataset', None):
+            tag = string.lower(tag)
+        if tag in ('dataset',None):
             try:
-                if predicate(*(self,)) == 1:
+                if apply(predicate,(self,))==1:
                     resultlist.append(self)
             except AttributeError:
                 pass
@@ -903,15 +808,15 @@ class Dataset(CdmsObj, cuDataset):
             for dict in self.dictdict.values():
                 for obj in dict.values():
                     try:
-                        if predicate(*(obj,)) == 1:
+                        if apply(predicate,(obj,))==1:
                             resultlist.append(obj)
                     except AttributeError:
                         pass
-        elif tag != "dataset":
+        elif tag!="dataset":
             dict = self.dictdict[tag]
             for obj in dict.values():
                 try:
-                    if predicate(*(obj,)) == 1:
+                    if apply(predicate,(obj,))==1:
                         resultlist.append(obj)
                 except:
                     pass
@@ -923,7 +828,8 @@ class Dataset(CdmsObj, cuDataset):
         for var in self.variables.values():
             for path, stuple in var.getPaths():
                 pathdict[path] = 1
-        result = sorted(pathdict.keys())
+        result = pathdict.keys()
+        result.sort()
         return result
 
     # Open a data file associated with this dataset.
@@ -933,36 +839,33 @@ class Dataset(CdmsObj, cuDataset):
 
         # Opened via a local XML file?
         if self.parent is None:
-            path = os.path.join(self.datapath, filename)
-            if cdmsobj._debug == 1:
-                sys.stdout.write(path + '\n')
-                sys.stdout.flush()
-            f = Cdunif.CdunifFile(path, mode)
+            path = os.path.join(self.datapath,filename)
+            if cdmsobj._debug==1:
+                sys.stdout.write(path+'\n'); sys.stdout.flush()
+            f = Cdunif.CdunifFile(path,mode)
             return f
 
         # Opened via a database
         else:
             dburls = self.parent.url
-            if not isinstance(dburls, type([])):
+            if type(dburls)!=type([]):
                 dburls = [dburls]
 
             # Try first to open as a local file
             for dburl in dburls:
                 if os.path.isabs(self.directory):
-                    fileurl = os.path.join(self.directory, filename)
+                    fileurl = os.path.join(self.directory,filename)
                 else:
                     try:
-                        fileurl = os.path.join(dburl, self.datapath, filename)
+                        fileurl = os.path.join(dburl,self.datapath,filename)
                     except:
-                        print 'Error joining', repr(dburl), self.datapath, filename
+                        print 'Error joining',`dburl`,self.datapath,filename
                         raise
-                (scheme, netloc, path, parameters, query,
-                 fragment) = urlparse.urlparse(fileurl)
-                if scheme in ['file', ''] and os.path.isfile(path):
-                    if cdmsobj._debug == 1:
-                        sys.stdout.write(fileurl + '\n')
-                        sys.stdout.flush()
-                    f = Cdunif.CdunifFile(path, mode)
+                (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fileurl)
+                if scheme in ['file',''] and os.path.isfile(path):
+                    if cdmsobj._debug==1:
+                        sys.stdout.write(fileurl+'\n'); sys.stdout.flush()
+                    f = Cdunif.CdunifFile(path,mode)
                     return f
 
             # See if request manager is being used for file transfer
@@ -972,40 +875,32 @@ class Dataset(CdmsObj, cuDataset):
                 lcbase = db.lcBaseDN
                 lcpath = self.getLogicalCollectionDN(lcbase)
 
-                # File location is logical collection path combined with
-                # relative filename
+                # File location is logical collection path combined with relative filename
                 fileDN = (self.uri, filename)
-                path = cache.getFile(
-                    filename,
-                    fileDN,
-                    lcpath=lcpath,
-                    userid=db.userid,
-                    useReplica=db.useReplica)
+                path = cache.getFile(filename, fileDN, lcpath=lcpath, userid=db.userid, useReplica=db.useReplica)
                 try:
-                    f = Cdunif.CdunifFile(path, mode)
+                    f = Cdunif.CdunifFile(path,mode)
                 except:
                     # Try again, in case another process clobbered this file
-                    path = cache.getFile(fileurl, fileDN)
-                    f = Cdunif.CdunifFile(path, mode)
+                    path = cache.getFile(fileurl,fileDN)
+                    f = Cdunif.CdunifFile(path,mode)
                 return f
 
             # Try to read via FTP:
 
             for dburl in dburls:
-                fileurl = os.path.join(dburl, self.datapath, filename)
-                (scheme, netloc, path, parameters, query,
-                 fragment) = urlparse.urlparse(fileurl)
-                if scheme == 'ftp':
+                fileurl = os.path.join(dburl,self.datapath,filename)
+                (scheme,netloc,path,parameters,query,fragment)=urlparse.urlparse(fileurl)
+                if scheme=='ftp':
                     cache = self.parent.enableCache()
-                    fileDN = (self.uri, filename)  # Global file name
-                    path = cache.getFile(fileurl, fileDN)
+                    fileDN = (self.uri, filename) # Global file name
+                    path = cache.getFile(fileurl,fileDN)
                     try:
-                        f = Cdunif.CdunifFile(path, mode)
+                        f = Cdunif.CdunifFile(path,mode)
                     except:
-                        # Try again, in case another process clobbered this
-                        # file
-                        path = cache.getFile(fileurl, fileDN)
-                        f = Cdunif.CdunifFile(path, mode)
+                        # Try again, in case another process clobbered this file
+                        path = cache.getFile(fileurl,fileDN)
+                        f = Cdunif.CdunifFile(path,mode)
                     return f
 
             # File not found
@@ -1018,9 +913,9 @@ class Dataset(CdmsObj, cuDataset):
         if hasattr(self, "lc"):
             dn = self.lc
         else:
-            dn = "lc=%s" % self.id
+            dn = "lc=%s"%self.id
         if base is not None:
-            dn = "%s,%s" % (dn, base)
+            dn = "%s,%s"%(dn,base)
         return dn
 
     def getVariable(self, id):
@@ -1032,8 +927,7 @@ class Dataset(CdmsObj, cuDataset):
         axes defined on latitude or longitude, excluding weights and bounds."""
         retval = self.variables.values()
         if spatial:
-            retval = filter(lambda x: x.id[0:7] != "bounds_" and x.id[0:8] != "weights_" and (
-                (x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
+            retval = filter(lambda x: x.id[0:7]!="bounds_" and x.id[0:8]!="weights_" and ((x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
         return retval
 
     def getAxis(self, id):
@@ -1045,40 +939,38 @@ class Dataset(CdmsObj, cuDataset):
         return self.grids.get(id)
 
     def __repr__(self):
-        return "<Dataset: '%s', URI: '%s', mode: '%s', status: %s>" % (self.id, self.uri, self.mode, self._status_)
-
-# internattr.add_internal_attribute (Dataset, 'datapath',
-# 'variables',
-# 'axes',
-# 'grids',
-# 'xlinks',
-# 'dictdict',
-# 'default_variable_name',
-# 'parent',
-# 'uri',
-# 'mode')
-
+        return "<Dataset: '%s', URI: '%s', mode: '%s', status: %s>"%(self.id, self.uri, self.mode, self._status_)
+
+## internattr.add_internal_attribute (Dataset, 'datapath',
+##                                             'variables',
+##                                             'axes',
+##                                             'grids',
+##                                             'xlinks',
+##                                             'dictdict',
+##                                             'default_variable_name',
+##                                             'parent',
+##                                             'uri',
+##                                             'mode')
 
 class CdmsFile(CdmsObj, cuDataset):
-
-    def __init__(self, path, mode, hostObj=None, mpiBarrier=False):
+    def __init__(self, path, mode, hostObj = None, mpiBarrier=False):
 
         if mpiBarrier:
             MPI.COMM_WORLD.Barrier()
 
         CdmsObj.__init__(self, None)
         cuDataset.__init__(self)
-        value = self.__cdms_internals__ + ['datapath',
-                                           'variables',
-                                           'axes',
-                                           'grids',
-                                           'xlinks',
-                                           'dictdict',
-                                           'default_variable_name',
-                                           'id',
-                                           'uri',
-                                           'parent',
-                                           'mode']
+        value = self.__cdms_internals__+['datapath',
+                                'variables',
+                                'axes',
+                                'grids',
+                                'xlinks',
+                                'dictdict',
+                                'default_variable_name',
+                                'id',
+                                'uri',
+                                'parent',
+                                'mode']
         self.___cdms_internals__ = value
         self.id = path
         if "://" in path:
@@ -1087,14 +979,14 @@ class CdmsFile(CdmsObj, cuDataset):
             self.uri = "file://" + os.path.abspath(os.path.expanduser(path))
         self._mode_ = mode
         try:
-            if mode[0].lower() == "w":
+            if mode[0].lower()=="w":
                 try:
                     os.remove(path)
                 except:
                     pass
-            _fileobj_ = Cdunif.CdunifFile(path, mode)
-        except Exception as err:
-            raise CDMSError('Cannot open file %s (%s)' % (path, err))
+            _fileobj_ = Cdunif.CdunifFile (path, mode)
+        except Exception,err:
+            raise CDMSError('Cannot open file %s (%s)'%(path,err))
         self._file_ = _fileobj_   # Cdunif file object
         self.variables = {}
         self.axes = {}
@@ -1102,40 +994,33 @@ class CdmsFile(CdmsObj, cuDataset):
         self.xlinks = {}
         self._gridmap_ = {}
 
-        # self.attributes returns the Cdunif file dictionary.
-# self.replace_external_attributes(self._file_.__dict__)
-        for att in self._file_.__dict__.keys():
-            self.__dict__.__setitem__(att, self._file_.__dict__[att])
-            self.attributes[att] = self._file_.__dict__[att]
+        # self.attributes returns the Cdunif file dictionary. 
+##         self.replace_external_attributes(self._file_.__dict__)
+        for att in  self._file_.__dict__.keys():
+            self.__dict__.__setitem__(att,self._file_.__dict__[att])
+            self.attributes[att]=self._file_.__dict__[att]
         self._boundAxis_ = None         # Boundary axis for cell vertices
-        if self._mode_ == 'w':
+        if self._mode_=='w':
             self.Conventions = convention.CFConvention.current
         self._status_ = 'open'
         self._convention_ = convention.getDatasetConvention(self)
 
         try:
-
+            
             # A mosaic variable with coordinates attached, but the coordinate variables reside in a
-            # different file. Add the coordinate variables to the mosaic
-            # variables list.
+            # different file. Add the coordinate variables to the mosaic variables list.
             if not hostObj is None:
                 for name in self._file_.variables.keys():
                     if 'coordinates' in dir(self._file_.variables[name]):
-                        coords = self._file_.variables[
-                            name].coordinates.split()
+                        coords = self._file_.variables[name].coordinates.split()
                         for coord in coords:
                             if not coord in self._file_.variables.keys():
-                                cdunifvar = Cdunif.CdunifFile(
-                                    hostObj.gridVars[coord][0],
-                                    mode)
-                                self._file_.variables[
-                                    coord] = cdunifvar.variables[
-                                        coord]
-
+                                cdunifvar = Cdunif.CdunifFile(hostObj.gridVars[coord][0], mode)
+                                self._file_.variables[coord] = cdunifvar.variables[coord]
+                
             # Get lists of 1D and auxiliary coordinate axes
             coords1d = self._convention_.getAxisIds(self._file_.variables)
-            coordsaux = self._convention_.getAxisAuxIds(
-                self._file_.variables, coords1d)
+            coordsaux = self._convention_.getAxisAuxIds(self._file_.variables, coords1d)
 
             # Build variable list
             for name in self._file_.variables.keys():
@@ -1144,15 +1029,12 @@ class CdmsFile(CdmsObj, cuDataset):
                     if name in coordsaux:
                         # Put auxiliary coordinate axes with variables, since there may be
                         # a dimension with the same name.
-                        if len(cdunifvar.shape) == 2:
-                            self.variables[name] = FileAxis2D(
-                                self, name, cdunifvar)
+                        if len(cdunifvar.shape)==2:
+                            self.variables[name] = FileAxis2D(self, name, cdunifvar)
                         else:
-                            self.variables[name] = FileAuxAxis1D(
-                                self, name, cdunifvar)
+                            self.variables[name] = FileAuxAxis1D(self, name, cdunifvar)
                     else:
-                        self.variables[name] = FileVariable(
-                            self, name, cdunifvar)
+                        self.variables[name] = FileVariable(self,name,cdunifvar)
 
             # Build axis list
             for name in self._file_.dimensions.keys():
@@ -1162,7 +1044,7 @@ class CdmsFile(CdmsObj, cuDataset):
                     cdunifvar = self._file_.variables[name]
                 else:
                     cdunifvar = None
-                self.axes[name] = FileAxis(self, name, cdunifvar)
+                self.axes[name] = FileAxis(self,name,cdunifvar)
 
             # Attach boundary variables
             for name in coordsaux:
@@ -1170,12 +1052,7 @@ class CdmsFile(CdmsObj, cuDataset):
                 bounds = self._convention_.getVariableBounds(self, var)
                 var.setBounds(bounds)
 
-            self.dictdict = {
-                'variable': self.variables,
-                'axis': self.axes,
-                'rectGrid': self.grids,
-                'curveGrid': self.grids,
-                'genericGrid': self.grids}
+            self.dictdict = {'variable':self.variables, 'axis':self.axes, 'rectGrid':self.grids, 'curveGrid':self.grids, 'genericGrid':self.grids}
 
             # Initialize variable domains
             for var in self.variables.values():
@@ -1185,44 +1062,41 @@ class CdmsFile(CdmsObj, cuDataset):
             for var in self.variables.values():
                 # Get grid information for the variable. gridkey has the form
                 # (latname,lonname,order,maskname, abstract_class).
-                gridkey, lat, lon = var.generateGridkey(
-                    self._convention_, self.variables)
+                gridkey, lat, lon = var.generateGridkey(self._convention_, self.variables)
 
                 # If the variable is gridded, lookup the grid. If no such grid exists,
-                # create a unique gridname, create the grid, and add to the
-                # gridmap.
+                # create a unique gridname, create the grid, and add to the gridmap.
                 if gridkey is None:
                     grid = None
                 else:
                     grid = self._gridmap_.get(gridkey)
                     if grid is None:
 
-                        if hasattr(var, 'grid_type'):
+                        if hasattr(var,'grid_type'):
                             gridtype = var.grid_type
                         else:
                             gridtype = "generic"
 
                         candidateBasename = None
                         if gridkey[4] == 'rectGrid':
-                            gridshape = (len(lat), len(lon))
+                            gridshape = (len(lat),len(lon))
                         elif gridkey[4] == 'curveGrid':
                             gridshape = lat.shape
                         elif gridkey[4] == 'genericGrid':
                             gridshape = lat.shape
-                            candidateBasename = 'grid_%d' % gridshape
+                            candidateBasename = 'grid_%d'%gridshape
                         else:
-                            gridshape = (len(lat), len(lon))
+                            gridshape = (len(lat),len(lon))
 
                         if candidateBasename is None:
-                            candidateBasename = 'grid_%dx%d' % gridshape
-                        if candidateBasename not in self.grids:
+                            candidateBasename = 'grid_%dx%d'%gridshape
+                        if not self.grids.has_key(candidateBasename):
                             gridname = candidateBasename
                         else:
                             foundname = 0
-                            for i in range(97, 123):  # Lower-case letters
-                                candidateName = candidateBasename + \
-                                    '_' + chr(i)
-                                if candidateName not in self.grids:
+                            for i in range(97,123): # Lower-case letters
+                                candidateName = candidateBasename+'_'+chr(i)
+                                if not self.grids.has_key(candidateName):
                                     gridname = candidateName
                                     foundname = 1
                                     break
@@ -1233,36 +1107,20 @@ class CdmsFile(CdmsObj, cuDataset):
 
                         # Create the grid
                         if gridkey[4] == 'rectGrid':
-                            grid = FileRectGrid(
-                                self,
-                                gridname,
-                                lat,
-                                lon,
-                                gridkey[2],
-                                gridtype)
+                            grid = FileRectGrid(self, gridname, lat, lon, gridkey[2], gridtype)
                         else:
-                            if gridkey[3] != '':
-                                if gridkey[3] in self.variables:
+                            if gridkey[3]!='':
+                                if self.variables.has_key(gridkey[3]):
                                     maskvar = self.variables[gridkey[3]]
                                 else:
-                                    print 'Warning: mask variable %s not found' % gridkey[3]
+                                    print 'Warning: mask variable %s not found'%gridkey[3]
                                     maskvar = None
                             else:
                                 maskvar = None
                             if gridkey[4] == 'curveGrid':
-                                grid = FileCurveGrid(
-                                    lat,
-                                    lon,
-                                    gridname,
-                                    parent=self,
-                                    maskvar=maskvar)
+                                grid = FileCurveGrid(lat, lon, gridname, parent=self, maskvar=maskvar)
                             else:
-                                grid = FileGenericGrid(
-                                    lat,
-                                    lon,
-                                    gridname,
-                                    parent=self,
-                                    maskvar=maskvar)
+                                grid = FileGenericGrid(lat, lon, gridname, parent=self, maskvar=maskvar)
                         self.grids[grid.id] = grid
                         self._gridmap_[gridkey] = grid
 
@@ -1273,45 +1131,45 @@ class CdmsFile(CdmsObj, cuDataset):
             raise
 
     # setattr writes external global attributes to the file
-    def __setattr__(self, name, value):
-# s = self.get_property_s(name)
-# if s is not None:
-# print '....handler'
-# s(self, name, value)
-# return
-        self.__dict__[name] = value  # attributes kept in sync w/file
-        if not name in self.__cdms_internals__ and name[0] != '_':
+    def __setattr__ (self, name, value):
+##         s = self.get_property_s(name)
+##         if s is not None:
+##             print '....handler'
+##             s(self, name, value)
+##             return
+        self.__dict__[name] =  value #attributes kept in sync w/file
+        if not name in self.__cdms_internals__ and name[0]!='_':
             setattr(self._file_, name, value)
-            self.attributes[name] = value
-
-# getattr reads external global attributes from the file
-# def __getattr__ (self, name):
-# g = self.get_property_g(name)
-# if g is not None:
-# return g(self, name)
-# if name in self.__cdms_internals__:
-# try:
-# return self.__dict__[name]
-# except KeyError:
-# raise AttributeError("%s instance has no attribute %s." % \
-# (self.__class__.__name__, name))
-# else:
-# return getattr(self._file_,name)
+            self.attributes[name]=value
+
+##     # getattr reads external global attributes from the file
+##     def __getattr__ (self, name):
+## ##         g = self.get_property_g(name)
+## ##         if g is not None:
+## ##             return g(self, name)
+##         if name in self.__cdms_internals__:
+##             try:
+##                 return self.__dict__[name]
+##             except KeyError:
+##                 raise AttributeError("%s instance has no attribute %s." % \
+##                            (self.__class__.__name__, name))
+##         else:
+##             return getattr(self._file_,name)
 
     # delattr deletes external global attributes in the file
     def __delattr__(self, name):
-# d = self.get_property_d(name)
-# if d is not None:
-# d(self, name)
-# return
+##         d = self.get_property_d(name)
+##         if d is not None:
+##             d(self, name)
+##             return
         try:
             del self.__dict__[name]
         except KeyError:
-            raise AttributeError("%s instance has no attribute %s." %
-                                 (self.__class__.__name__, name))
+            raise AttributeError("%s instance has no attribute %s." % \
+                  (self.__class__.__name__, name))
         if not name in self.__cdms_internals__:
             delattr(self._file_, name)
-            if(name in self.attributes.keys()):
+            if( name in self.attributes.keys() ):
                 del(self.attributes[name])
 
     def sync(self):
@@ -1321,12 +1179,12 @@ class CdmsFile(CdmsObj, cuDataset):
    None :: (None) (0) yep
    :::
    """
-        if self._status_ == "closed":
+        if self._status_=="closed":
             raise CDMSError(FileWasClosed + self.id)
         self._file_.sync()
 
     def close(self):
-        if self._status_ == "closed":
+        if self._status_=="closed":
             return
         if hasattr(self, 'dictdict'):
             for dict in self.dictdict.values():
@@ -1337,21 +1195,21 @@ class CdmsFile(CdmsObj, cuDataset):
         self._file_.close()
         self._status_ = 'closed'
 
-# Note: Removed to allow garbage collection of reference cycles
-# def __del__(self):
-# if cdmsobj._debug==1:
-# print 'Deleting file',self.id
-# If the object has been deallocated due to open error,
-# it will not have an attribute .dictdict
-# if hasattr(self,"dictdict") and self.dictdict != {}:
-# self.close()
+##  Note: Removed to allow garbage collection of reference cycles
+##     def __del__(self):
+##         if cdmsobj._debug==1:
+##             print 'Deleting file',self.id
+##         # If the object has been deallocated due to open error,
+##         # it will not have an attribute .dictdict
+##         if hasattr(self,"dictdict") and self.dictdict != {}:
+##             self.close()
 
     # Create an axis
     # 'name' is the string name of the Axis
     # 'ar' is the 1-D data array, or None for an unlimited axis
     # Set unlimited to true to designate the axis as unlimited
     # Return an axis object.
-    def createAxis(self, name, ar, unlimited=0):
+    def createAxis(self,name,ar,unlimited=0):
         """
         Create an axis
         'name' is the string name of the Axis
@@ -1370,36 +1228,36 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.axis.FileAxis) (0) file axis whose id is name
         :::
         """
-        if self._status_ == "closed":
+        if self._status_=="closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
-        if ar is None or (unlimited == 1 and getNetcdfUseParallelFlag() == 0):
-            cufile.createDimension(name, None)
+        if ar is None or (unlimited==1 and getNetcdfUseParallelFlag()==0):
+            cufile.createDimension(name,None)
             if ar is None:
                 typecode = numpy.float
             else:
                 typecode = ar.dtype.char
         else:
-            cufile.createDimension(name, len(ar))
+            cufile.createDimension(name,len(ar))
             typecode = ar.dtype.char
 
         # Compatibility: revert to old typecode for cdunif
         typecode = typeconv.oldtypecodes[typecode]
-        cuvar = cufile.createVariable(name, typecode, (name,))
+        cuvar = cufile.createVariable(name,typecode,(name,))
 
         # Cdunif should really create this extra dimension info:
         #   (units,typecode,filename,varname_local,dimension_type,ncid)
-        cufile.dimensioninfo[name] = ('', typecode, name, '', 'global', -1)
+        cufile.dimensioninfo[name] = ('',typecode,name,'','global',-1)
 
         # Note: like netCDF-3, cdunif does not support 64-bit integers.
         # If ar has dtype int64 on a 64-bit machine, cuvar will be a 32-bit int,
         # and ar must be downcast.
         if ar is not None:
-            if ar.dtype.char != 'l':
+            if ar.dtype.char!='l':
                 cuvar[0:len(ar)] = numpy.ma.filled(ar)
             else:
                 cuvar[0:len(ar)] = numpy.ma.filled(ar).astype(cuvar.typecode())
-        axis = FileAxis(self, name, cuvar)
+        axis = FileAxis(self,name,cuvar)
         self.axes[name] = axis
         return axis
 
@@ -1416,24 +1274,23 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Input:::
         name :: (str) (0) dimension name
-        axislen :: (int) (1)
+        axislen :: (int) (1) 
         :::
         Output:::
         axis :: (cdms2.axis.FileVirtualAxis) (0) file axis whose id is name
         :::
         """
-        if self._status_ == "closed":
+        if self._status_=="closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
         cufile.createDimension(name, axislen)
-        cufile.dimensioninfo[name] = ('', 'f', name, '', 'global', -1)
+        cufile.dimensioninfo[name] = ('','f',name,'','global',-1)
         axis = FileVirtualAxis(self, name, axislen)
         self.axes[name] = axis
         return axis
 
     # Copy axis description and data from another axis
-    def copyAxis(self, axis, newname=None,
-                 unlimited=0, index=None, extbounds=None):
+    def copyAxis(self, axis, newname=None, unlimited=0, index=None, extbounds=None):
         """
         Copy axis description and data from another axis
         :::
@@ -1450,25 +1307,24 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.axis.FileAxis/cdms2.axis.FileVirtualAxis) (0) copy of input axis
         :::
         """
-        if newname is None:
-            newname = axis.id
+        if newname is None: newname=axis.id
 
         # If the axis already exists and has the same values, return existing
-        if newname in self.axes:
+        if self.axes.has_key(newname):
             newaxis = self.axes[newname]
             if newaxis.isVirtual():
-                if len(axis) != len(newaxis):
-                    raise DuplicateAxisError(DuplicateAxis + newname)
-            elif unlimited == 0 or (unlimited == 1 and getNetcdfUseParallelFlag() != 0):
-                if len(axis) != len(newaxis) or numpy.alltrue(numpy.less(numpy.absolute(newaxis[:] - axis[:]), 1.e-5)) == 0:
-                    raise DuplicateAxisError(DuplicateAxis + newname)
+                if len(axis)!=len(newaxis):
+                    raise DuplicateAxisError(DuplicateAxis+newname)
+            elif unlimited==0 or (unlimited==1 and getNetcdfUseParallelFlag()!=0):
+                if len(axis)!=len(newaxis) or numpy.alltrue(numpy.less(numpy.absolute(newaxis[:]-axis[:]),1.e-5))==0:
+                    raise DuplicateAxisError(DuplicateAxis+newname)
             else:
                 if index is None:
-                    isoverlap, index = isOverlapVector(axis[:], newaxis[:])
+                    isoverlap, index = isOverlapVector(axis[:],newaxis[:])
                 else:
                     isoverlap = 1
                 if isoverlap:
-                    newaxis[index:index + len(axis)] = axis[:]
+                    newaxis[index:index+len(axis)] = axis[:]
                     if extbounds is None:
                         axisBounds = axis.getBounds()
                     else:
@@ -1476,7 +1332,7 @@ class CdmsFile(CdmsObj, cuDataset):
                     if axisBounds is not None:
                         newaxis.setBounds(axisBounds)
                 else:
-                    raise DuplicateAxisError(DuplicateAxis + newname)
+                    raise DuplicateAxisError(DuplicateAxis+newname)
 
         elif axis.isVirtual():
             newaxis = self.createVirtualAxis(newname, len(axis))
@@ -1491,8 +1347,8 @@ class CdmsFile(CdmsObj, cuDataset):
                 else:
                     boundsid = None
                 newaxis.setBounds(bounds, persistent=1, boundsid=boundsid)
-            for attname, attval in axis.attributes.items():
-                if attname not in ["datatype", "id", "length", "isvar", "name_in_file", "partition"]:
+            for attname,attval in axis.attributes.items():
+                if attname not in ["datatype", "id","length","isvar","name_in_file","partition"]:
                     setattr(newaxis, attname, attval)
         return newaxis
 
@@ -1538,7 +1394,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if newname is None:
-            if hasattr(grid, 'id'):
+            if hasattr(grid,'id'):
                 newname = grid.id
             else:
                 newname = 'Grid'
@@ -1555,26 +1411,20 @@ class CdmsFile(CdmsObj, cuDataset):
         lon.designateLongitude(persistent=1)
 
         # If the grid name already exists, and is the same, just return it
-        if newname in self.grids:
+        if self.grids.has_key(newname):
             newgrid = self.grids[newname]
             newlat = newgrid.getLatitude()
             newlon = newgrid.getLongitude()
             if ((newlat is not lat) or
                 (newlon is not lon) or
                 (newgrid.getOrder() != grid.getOrder()) or
-                    (newgrid.getType() != grid.getType())):
+                (newgrid.getType() != grid.getType())):
                 raise DuplicateGrid(newname)
 
         # else create a new grid and copy metadata
         else:
             newmask = grid.getMask()    # Get the mask array
-            newgrid = self.createRectGrid(
-                newname,
-                lat,
-                lon,
-                grid.getOrder(),
-                grid.getType(),
-                None)
+            newgrid = self.createRectGrid(newname, lat, lon, grid.getOrder(), grid.getType(), None)
             newgrid.setMask(newmask)    # Set the mask array, non-persistently
             for attname in grid.attributes.keys():
                 setattr(newgrid, attname, getattr(grid, attname))
@@ -1587,7 +1437,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # 'axesOrGrids' is a list of axes, grids. (Note: this should be
     #   generalized to allow subintervals of axes and/or grids)
     # Return a variable object.
-    def createVariable(self, name, datatype, axesOrGrids, fill_value=None):
+    def createVariable(self,name,datatype,axesOrGrids,fill_value=None):
         """
         Create a variable
         'name' is the string name of the Variable
@@ -1607,7 +1457,7 @@ class CdmsFile(CdmsObj, cuDataset):
         axis :: (cdms2.fvariable.FileVariable) (0) file variable
         :::
         """
-        if self._status_ == "closed":
+        if self._status_=="closed":
             raise CDMSError(FileWasClosed + self.id)
         cufile = self._file_
         if datatype in CdDatatypes:
@@ -1615,77 +1465,75 @@ class CdmsFile(CdmsObj, cuDataset):
         else:
             numericType = datatype
 
-        # dimensions = map(lambda x: x.id, axes)
+        #dimensions = map(lambda x: x.id, axes)
         # Make a list of names of axes for _Cdunif
         dimensions = []
         for obj in axesOrGrids:
             if isinstance(obj, FileAxis):
                 dimensions.append(obj.id)
             elif isinstance(obj, FileRectGrid):
-                dimensions = dimensions + \
-                    [obj.getAxis(0).id, obj.getAxis(1).id]
+                dimensions = dimensions + [obj.getAxis(0).id, obj.getAxis(1).id]
             else:
                 raise InvalidDomain
 
         try:
             # Compatibility: revert to old typecode for cdunif
             numericType = typeconv.oldtypecodes[numericType]
-            cuvar = cufile.createVariable(name, numericType, tuple(dimensions))
-        except Exception as err:
+            cuvar = cufile.createVariable(name,numericType,tuple(dimensions))
+        except Exception,err:
             print err
-            raise CDMSError("Creating variable " + name)
-        var = FileVariable(self, name, cuvar)
+            raise CDMSError("Creating variable "+name)
+        var = FileVariable(self,name,cuvar)
         var.initDomain(self.axes)
         self.variables[name] = var
-        if fill_value is not None:
-            var.setMissing(fill_value)
+        if fill_value is not None: var.setMissing(fill_value)
         return var
 
     # Create a variable from an existing variable, and copy the metadata
-# def createVariableCopy(self, var, newname=None):
+##     def createVariableCopy(self, var, newname=None):
 
-# if newname is None: newname=var.id
-# if self.variables.has_key(newname):
-# raise DuplicateVariable(newname)
+##         if newname is None: newname=var.id
+##         if self.variables.has_key(newname):
+##             raise DuplicateVariable(newname)
 
 
-# Create axes if necessary
-# axislist = []
-# for (axis,start,length,true_length) in var.getDomain():
-# try:
-# newaxis = self.copyAxis(axis)
-# except DuplicateAxisError:
+##         # Create axes if necessary
+##         axislist = []
+##         for (axis,start,length,true_length) in var.getDomain():
+##             try:
+##                 newaxis = self.copyAxis(axis)
+##             except DuplicateAxisError:
 
-# Create a unique axis name
-# setit = 0
-# for i in range(97,123): # Lower-case letters
-# try:
-# newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
-# setit = 1
-# break
-# except DuplicateAxisError:
-# continue
+##                 # Create a unique axis name
+##                 setit = 0
+##                 for i in range(97,123): # Lower-case letters
+##                     try:
+##                         newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
+##                         setit = 1
+##                         break
+##                     except DuplicateAxisError:
+##                         continue
 
-# if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
+##                 if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
 
-# axislist.append(newaxis)
+##             axislist.append(newaxis)
 
-# Create the new variable
-# datatype = cdmsNode.NumericToCdType.get(var.dtype.char)
-# newvar = self.createVariable(newname, datatype, axislist)
+##         # Create the new variable
+##         datatype = cdmsNode.NumericToCdType.get(var.dtype.char)
+##         newvar = self.createVariable(newname, datatype, axislist)
 
-# Copy variable metadata
-# for attname in var.attributes.keys():
-# if attname not in ["id", "datatype"]:
-# setattr(newvar, attname, getattr(var, attname))
+##         # Copy variable metadata
+##         for attname in var.attributes.keys():
+##             if attname not in ["id", "datatype"]:
+##                 setattr(newvar, attname, getattr(var, attname))
 
-# return newvar
+##         return newvar
 
     # Search for a pattern in a string-valued attribute. If attribute is None,
     # search all string attributes. If tag is 'cdmsFile', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def searchPattern(self, pattern, attribute, tag):
+    def searchPattern(self,pattern,attribute,tag):
         """
         Search for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -1695,26 +1543,26 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('cdmsFile', None, 'dataset'):
-            if self.searchone(pattern, attribute) == 1:
+            tag = string.lower(tag)
+        if tag in ('cdmsFile',None,'dataset'):
+            if self.searchone(pattern,attribute)==1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.searchone(pattern, attribute):
+                    if obj.searchone(pattern,attribute):
                         resultlist.append(obj)
-        elif tag not in ('cdmsFile', 'dataset'):
+        elif tag not in ('cdmsFile','dataset'):
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.searchone(pattern, attribute):
+                if obj.searchone(pattern,attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -1722,7 +1570,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # search all string attributes. If tag is 'cdmsFile', just check the dataset,
     # else check all nodes in the dataset of class type matching the tag. If tag
     # is None, search the dataset and all objects contained in it.
-    def matchPattern(self, pattern, attribute, tag):
+    def matchPattern(self,pattern,attribute,tag):
         """
         Match for a pattern in a string-valued attribute. If attribute is None, search all string attributes. If tag is not None, it must match the internal node tag.
         :::
@@ -1732,26 +1580,26 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (2) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('cdmsFile', None, 'dataset'):
-            if self.matchone(pattern, attribute) == 1:
+            tag = string.lower(tag)
+        if tag in ('cdmsFile',None,'dataset'):
+            if self.matchone(pattern,attribute)==1:
                 resultlist = [self]
             else:
                 resultlist = []
         if tag is None:
             for dict in self.dictdict.values():
                 for obj in dict.values():
-                    if obj.matchone(pattern, attribute):
+                    if obj.matchone(pattern,attribute):
                         resultlist.append(obj)
-        elif tag not in ('cdmsFile', 'dataset'):
+        elif tag not in ('cdmsFile','dataset'):
             dict = self.dictdict[tag]
             for obj in dict.values():
-                if obj.matchone(pattern, attribute):
+                if obj.matchone(pattern,attribute):
                     resultlist.append(obj)
         return resultlist
 
@@ -1762,7 +1610,7 @@ class CdmsFile(CdmsObj, cuDataset):
     # If 'variable', 'axis', etc., it is applied only to that type of object
     # in the dataset. If None, it is applied to all objects, including
     # the dataset itself.
-    def searchPredicate(self, predicate, tag):
+    def searchPredicate(self,predicate,tag):
         """
         Apply a truth-valued predicate. Return a list containing a single instance: [self] if the predicate is true and either tag is None or matches the object node tag. If the predicate returns false, return an empty list
         :::
@@ -1771,15 +1619,15 @@ class CdmsFile(CdmsObj, cuDataset):
         tag :: (str/None) (1) node tag
         :::
         Output:::
-        result :: (list) (0)
+        result :: (list) (0) 
         :::
         """
         resultlist = []
         if tag is not None:
-            tag = tag.lower()
-        if tag in ('cdmsFile', None, 'dataset'):
+            tag = string.lower(tag)
+        if tag in ('cdmsFile',None,'dataset'):
             try:
-                if predicate(*(self,)) == 1:
+                if apply(predicate,(self,))==1:
                     resultlist.append(self)
             except AttributeError:
                 pass
@@ -1787,22 +1635,21 @@ class CdmsFile(CdmsObj, cuDataset):
             for dict in self.dictdict.values():
                 for obj in dict.values():
                     try:
-                        if predicate(*(obj,)) == 1:
+                        if apply(predicate,(obj,))==1:
                             resultlist.append(obj)
                     except AttributeError:
                         pass
-        elif tag not in ('dataset', 'cdmsFile'):
+        elif tag not in ('dataset','cdmsFile'):
             dict = self.dictdict[tag]
             for obj in dict.values():
                 try:
-                    if predicate(*(obj,)) == 1:
+                    if apply(predicate,(obj,))==1:
                         resultlist.append(obj)
                 except:
                     pass
         return resultlist
 
-    def createVariableCopy(
-        self, var, id=None, attributes=None, axes=None, extbounds=None,
+    def createVariableCopy(self, var, id=None, attributes=None, axes=None, extbounds=None,
                               extend=0, fill_value=None, index=None, newname=None, grid=None):
         """Define a new variable, with the same axes and attributes as in <var>.
         This does not copy the data itself.
@@ -1838,10 +1685,10 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if newname is None:
-            newname = var.id
+            newname=var.id
         if id is not None:
             newname = id
-        if newname in self.variables:
+        if self.variables.has_key(newname):
             raise DuplicateVariable(newname)
 
         # Determine the extended axis name if any
@@ -1850,12 +1697,12 @@ class CdmsFile(CdmsObj, cuDataset):
         else:
             sourceAxislist = axes
 
-        if var.rank() == 0:      # scalars are not extensible
+        if var.rank()==0:      # scalars are not extensible
             extend = 0
-
-        if extend in (1, None):
+            
+        if extend in (1,None):
             firstAxis = sourceAxislist[0]
-            if firstAxis is not None and (extend == 1 or firstAxis.isTime()):
+            if firstAxis is not None and (extend==1 or firstAxis.isTime()):
                 extendedAxis = firstAxis.id
             else:
                 extendedAxis = None
@@ -1865,31 +1712,24 @@ class CdmsFile(CdmsObj, cuDataset):
         # Create axes if necessary
         axislist = []
         for axis in sourceAxislist:
-            if extendedAxis is None or axis.id != extendedAxis:
+            if extendedAxis is None or axis.id!=extendedAxis:
                 try:
                     newaxis = self.copyAxis(axis)
                 except DuplicateAxisError:
 
                     # Create a unique axis name
                     setit = 0
-                    for i in range(97, 123):  # Lower-case letters
+                    for i in range(97,123): # Lower-case letters
                         try:
-                            newaxis = self.copyAxis(
-                                axis,
-                                axis.id + '_' + chr(i))
+                            newaxis = self.copyAxis(axis,axis.id+'_'+chr(i))
                             setit = 1
                             break
                         except DuplicateAxisError:
                             continue
 
-                    if setit == 0:
-                        raise DuplicateAxisError(DuplicateAxis + axis.id)
+                    if setit==0: raise DuplicateAxisError(DuplicateAxis+axis.id)
             else:
-                newaxis = self.copyAxis(
-                    axis,
-                    unlimited=1,
-                    index=index,
-                    extbounds=extbounds)
+                newaxis = self.copyAxis(axis, unlimited=1, index=index, extbounds=extbounds)
 
             axislist.append(newaxis)
 
@@ -1897,29 +1737,25 @@ class CdmsFile(CdmsObj, cuDataset):
         if attributes is None:
             attributes = var.attributes
             try:
-                attributes['missing_value'] = var.missing_value
-            except Exception as err:
+                attributes['missing_value']=var.missing_value
+            except Exception,err:
                 print err
                 pass
             try:
                 if fill_value is None:
-                    if('_FillValue' in attributes.keys()):
-                        attributes['_FillValue'] = numpy.array(
-                            var._FillValue).astype(var.dtype)
-                        attributes['missing_value'] = numpy.array(
-                            var._FillValue).astype(var.dtype)
-                    if('missing_value' in attributes.keys()):
-                        attributes['_FillValue'] = numpy.array(
-                            var.missing_value).astype(var.dtype)
-                        attributes['missing_value'] = numpy.array(
-                            var.missing_value).astype(var.dtype)
+                    if( '_FillValue' in attributes.keys() ):
+                       attributes['_FillValue']=numpy.array(var._FillValue).astype(var.dtype)
+                       attributes['missing_value']=numpy.array(var._FillValue).astype(var.dtype)
+                    if( 'missing_value' in attributes.keys() ):
+                       attributes['_FillValue']=numpy.array(var.missing_value).astype(var.dtype)
+                       attributes['missing_value']=numpy.array(var.missing_value).astype(var.dtype)
                 else:
-                    attributes['_FillValue'] = fill_value
-                    attributes['missing_value'] = fill_value
+                    attributes['_FillValue']=fill_value
+                    attributes['missing_value']=fill_value
             except:
                 pass
-            if "name" in attributes:
-                if attributes['name'] != var.id:
+            if attributes.has_key("name"):
+                if attributes['name']!=var.id:
                     del(attributes['name'])
 
         # Create grid as necessary
@@ -1928,7 +1764,7 @@ class CdmsFile(CdmsObj, cuDataset):
         if grid is not None:
             coords = grid.writeToFile(self)
             if coords is not None:
-                coordattr = "%s %s" % (coords[0].id, coords[1].id)
+                coordattr = "%s %s"%(coords[0].id, coords[1].id)
                 if attributes is None:
                     attributes = {'coordinates': coordattr}
                 else:
@@ -1937,19 +1773,19 @@ class CdmsFile(CdmsObj, cuDataset):
         # Create the new variable
         datatype = cdmsNode.NumericToCdType.get(var.typecode())
         newvar = self.createVariable(newname, datatype, axislist)
-        for attname, attval in attributes.items():
+        for attname,attval in attributes.items():
             if attname not in ["id", "datatype", "parent"]:
                 setattr(newvar, attname, attval)
                 if (attname == "_FillValue") or (attname == "missing_value"):
-                    setattr(newvar, "_FillValue", attval)
-                    setattr(newvar, "missing_value", attval)
+                   setattr(newvar, "_FillValue", attval)
+                   setattr(newvar, "missing_value", attval)
 
         if fill_value is not None:
             newvar.setMissing(fill_value)
 
         return newvar
 
-    def write(self, var, attributes=None, axes=None, extbounds=None, id=None,
+    def write(self, var, attributes=None, axes=None, extbounds=None, id=None, \
               extend=None, fill_value=None, index=None, typecode=None, dtype=None, pack=False):
         """Write var to the file. If the variable is not yet defined in the file,
         a definition is created. By default, the time dimension of the variable is defined as the
@@ -1990,17 +1826,15 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if _showCompressWarnings:
-            if (Cdunif.CdunifGetNCFLAGS("shuffle") != 0) or (Cdunif.CdunifGetNCFLAGS("deflate") != 0) or (Cdunif.CdunifGetNCFLAGS("deflate_level") != 0):
+            if  (Cdunif.CdunifGetNCFLAGS("shuffle")!=0) or (Cdunif.CdunifGetNCFLAGS("deflate")!=0) or (Cdunif.CdunifGetNCFLAGS("deflate_level")!=0):
                 import warnings
-                warnings.warn(
-                    "Files are written with compression and shuffling\nYou can query different values of compression using the functions:\ncdms2.getNetcdfShuffleFlag() returning 1 if shuffling is enabled, 0 otherwise\ncdms2.getNetcdfDeflateFlag() returning 1 if deflate is used, 0 otherwise\ncdms2.getNetcdfDeflateLevelFlag() returning the level of compression for the deflate method\n\nIf you want to turn that off or set different values of compression use the functions:\nvalue = 0\ncdms2.setNetcdfShuffleFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateLevelFlag(value) ## where value is a integer between 0 and 9 included\n\nTurning all values to 0 will produce NetCDF3 Classic files\nTo Force NetCDF4 output with classic format and no compressing use:\ncdms2.setNetcdf4Flag(1)\nNetCDF4 file with no shuffling or defalte and noclassic will be open for parallel i/o",
-                    Warning)
-
+                warnings.warn("Files are written with compression and shuffling\nYou can query different values of compression using the functions:\ncdms2.getNetcdfShuffleFlag() returning 1 if shuffling is enabled, 0 otherwise\ncdms2.getNetcdfDeflateFlag() returning 1 if deflate is used, 0 otherwise\ncdms2.getNetcdfDeflateLevelFlag() returning the level of compression for the deflate method\n\nIf you want to turn that off or set different values of compression use the functions:\nvalue = 0\ncdms2.setNetcdfShuffleFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateFlag(value) ## where value is either 0 or 1\ncdms2.setNetcdfDeflateLevelFlag(value) ## where value is a integer between 0 and 9 included\n\nTurning all values to 0 will produce NetCDF3 Classic files\nTo Force NetCDF4 output with classic format and no compressing use:\ncdms2.setNetcdf4Flag(1)\nNetCDF4 file with no shuffling or defalte and noclassic will be open for parallel i/o",Warning)
+                
         # Make var an AbstractVariable
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = dtype
-        if typecode is not None and var.dtype.char != typecode:
+        if typecode is not None and var.dtype.char!=typecode:
             var = var.astype(typecode)
         var = asVariable(var, writeable=0)
 
@@ -2009,26 +1843,25 @@ class CdmsFile(CdmsObj, cuDataset):
             varid = var.id
         else:
             varid = id
-        if varid in self.variables:
+        if self.variables.has_key(varid):
             if pack:
-                raise CDMSError(
-                    "You cannot pack an existing variable %s " %
-                    varid)
+              raise CDMSError("You cannot pack an existing variable %s " % varid)
             v = self.variables[varid]
         else:
-            if pack is not False:
-                typ = numpy.int16
-                n = 16
-            else:
-                typ = var.dtype
-            v = self.createVariableCopy(
-                var.astype(typ), attributes=attributes, axes=axes, extbounds=extbounds,
-                id=varid, extend=extend, fill_value=fill_value, index=index)
+          if pack is not False:
+              typ = numpy.int16
+              n = 16
+          else:
+            typ = var.dtype
+          v = self.createVariableCopy(var.astype(typ), attributes=attributes, axes=axes, extbounds=extbounds,
+              id=varid, extend=extend, fill_value=fill_value, index=index)
+
+
 
         # If var has typecode numpy.int, and v is created from var, then v will have
         # typecode numpy.int32. (This is a Cdunif 'feature'). This causes a downcast error
         # for numpy versions 23+, so make the downcast explicit.
-        if var.typecode() == numpy.int and v.typecode() == numpy.int32 and pack is False:
+        if var.typecode()==numpy.int and v.typecode()==numpy.int32 and pack is False:
             var = var.astype(numpy.int32)
 
         # Write
@@ -2038,22 +1871,21 @@ class CdmsFile(CdmsObj, cuDataset):
             sourceAxislist = axes
 
         vrank = var.rank()
-        if vrank == 0:      # scalars are not extensible
+        if vrank==0:      # scalars are not extensible
             extend = 0
         else:
             vec1 = sourceAxislist[0]
-
-        if extend == 0 or (extend is None and not vec1.isTime()):
-            if vrank > 0:
+            
+        if extend==0 or (extend is None and not vec1.isTime()):
+            if vrank>0:
                 if pack is not False:
-                    v[:] = numpy.zeros(var.shape, typ)
+                  v[:] = numpy.zeros(var.shape,typ)
                 else:
-                    v[:] = var.astype(v.dtype)
+                  v[:] = var.astype(v.dtype)
             else:
                 v.assignValue(var.getValue())
         else:
-            # Determine if the first dimension of var overlaps the first
-            # dimension of v
+            # Determine if the first dimension of var overlaps the first dimension of v
             vec2 = v.getAxis(0)
             if extbounds is None:
                 bounds1 = vec1.getBounds()
@@ -2063,51 +1895,47 @@ class CdmsFile(CdmsObj, cuDataset):
                 isoverlap, index = isOverlapVector(vec1[:], vec2[:])
             else:
                 isoverlap = 1
-            if isoverlap == 1:
-                v[index:index + len(vec1)] = var.astype(v.dtype)
-                vec2[index:index + len(vec1)] = vec1[:].astype(vec2[:].dtype)
+            if isoverlap==1:
+                v[index:index+len(vec1)] = var.astype(v.dtype)
+                vec2[index:index+len(vec1)] = vec1[:].astype(vec2[:].dtype)
                 if bounds1 is not None:
                     vec2.setBounds(bounds1, persistent=1, index=index)
             else:
-                raise CDMSError(
-                    'Cannot write variable %s: the values of dimension %s=%s, do not overlap the extended dimension %s values: %s' %
-                    (varid, vec1.id, repr(vec1[:]), vec2.id, repr(vec2[:])))
+                raise CDMSError('Cannot write variable %s: the values of dimension %s=%s, do not overlap the extended dimension %s values: %s'%(varid, vec1.id,`vec1[:]`,vec2.id,`vec2[:]`))
 
-        # pack implementation source:
-        # https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
+        # pack implementation source: https://www.unidata.ucar.edu/software/netcdf/docs/BestPractices.html
         if pack:
-            M = var.max()
-            m = var.min()
-            scale_factor = (M - m) / (pow(2, n) - 2)
-            add_offset = (M + m) / 2.
-            missing = -pow(2, n - 1)
-            v.setMissing(-pow(2, n - 1))
-            scale_factor = scale_factor.astype(var.dtype)
-            add_offset = add_offset.astype(var.dtype)
-            tmp = (var - add_offset) / scale_factor
-            tmp = numpy.round(tmp)
-            tmp = tmp.astype(typ)
-            v[:] = tmp.filled()
-            v.scale_factor = scale_factor.astype(var.dtype)
-            v.add_offset = add_offset.astype(var.dtype)
-            if not hasattr(var, "valid_min"):
-                v.valid_min = m.astype(var.dtype)
-            if not hasattr(var, "valid_max"):
-                v.valid_max = M.astype(var.dtype)
+          M = var.max()
+          m = var.min()
+          scale_factor = (M-m)/(pow(2,n)-2)
+          add_offset = (M+m)/2.
+          missing = -pow(2,n-1)
+          v.setMissing(-pow(2,n-1))
+          scale_factor = scale_factor.astype(var.dtype)
+          add_offset = add_offset.astype(var.dtype)
+          tmp = (var-add_offset)/scale_factor
+          tmp= numpy.round(tmp)
+          tmp=tmp.astype(typ)
+          v[:] = tmp.filled()
+          v.scale_factor = scale_factor.astype(var.dtype)
+          v.add_offset = add_offset.astype(var.dtype)
+          if not hasattr(var,"valid_min"):
+            v.valid_min = m.astype(var.dtype)
+          if not hasattr(var,"valid_max"):
+            v.valid_max = M.astype(var.dtype)
         return v
 
-    def write_it_yourself(self, obj):
+    def write_it_yourself( self, obj ):
         """Tell obj to write itself to self (already open for writing), using its
         writeg method (AbstractCurveGrid has such a method, for example).  If no
         such method be available, writeToFile will be used.  If that is not
         available, then self.write(obj) will be called to try to write obj as
         a variable."""
-        # This method was formerly called writeg and just wrote an
-        # AbstractCurveGrid.
-        if (hasattr(obj, 'writeg') and callable(getattr(obj, 'writeg'))):
-            obj.writeg(self)
-        elif (hasattr(obj, 'writeToFile') and callable(getattr(obj, 'writeToFile'))):
-            obj.writeToFile(self)
+        # This method was formerly called writeg and just wrote an AbstractCurveGrid.
+        if ( hasattr(obj,'writeg') and callable(getattr(obj,'writeg')) ):
+            obj.writeg( self )
+        elif ( hasattr(obj,'writeToFile') and callable(getattr(obj,'writeToFile')) ):
+            obj.writeToFile( self )
         else:
             self.write(obj)
 
@@ -2120,7 +1948,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         variable :: (cdms2.fvariable.FileVariable/None) (0) file variable
-        :::
+        :::        
         """
         return self.variables.get(id)
 
@@ -2133,12 +1961,11 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         variables :: ([cdms2.fvariable.FileVariable]) (0) file variables
-        :::
+        :::        
 """
         retval = self.variables.values()
         if spatial:
-            retval = filter(lambda x: x.id[0:7] != "bounds_" and x.id[0:8] != "weights_" and (
-                (x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
+            retval = filter(lambda x: x.id[0:7]!="bounds_" and x.id[0:8]!="weights_" and ((x.getLatitude() is not None) or (x.getLongitude() is not None) or (x.getLevel() is not None)), retval)
         return retval
 
     def getAxis(self, id):
@@ -2149,7 +1976,7 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         axis :: (cdms2.axis.FileAxis/None) (0) file axis
-        :::
+        :::        
         """
         return self.axes.get(id)
 
@@ -2162,11 +1989,11 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         Output:::
         grid :: (cdms2.hgrid.FileCurveGrid/cdms2.gengrid.FileGenericGrid/cdms2.grid.FileRectGrid/None) (0) file axis
-        :::
+        :::        
         """
         return self.grids.get(id)
 
-    def getBoundsAxis(self, n, boundid=None):
+    def getBoundsAxis(self, n,boundid=None):
         """Get a bounds axis of length n. Create the bounds axis if necessary.
         :::
         Input:::
@@ -2177,31 +2004,30 @@ class CdmsFile(CdmsObj, cuDataset):
         :::
         """
         if boundid is None:
-            if n == 2:
+            if n==2:
                 boundid = "bound"
             else:
-                boundid = "bound_%d" % n
-
-        if boundid in self.axes:
+                boundid = "bound_%d"%n
+            
+        if self.axes.has_key(boundid):
             boundaxis = self.axes[boundid]
         else:
             boundaxis = self.createVirtualAxis(boundid, n)
         return boundaxis
 
     def __repr__(self):
-        filerep = repr(self._file_)
-        loc = filerep.find("file")
-        if loc == -1:
-            loc = 0
-        return "<CDMS " + filerep[loc:-1] + ", status: %s>" % self._status_
-
-# internattr.add_internal_attribute (CdmsFile, 'datapath',
-# 'variables',
-# 'axes',
-# 'grids',
-# 'xlinks',
-# 'dictdict',
-# 'default_variable_name',
-# 'id',
-# 'parent',
-# 'mode')
+        filerep = `self._file_`
+        loc = string.find(filerep,"file")
+        if loc==-1: loc=0
+        return "<CDMS "+filerep[loc:-1]+", status: %s>"%self._status_
+
+## internattr.add_internal_attribute (CdmsFile, 'datapath',
+##                                             'variables',
+##                                             'axes',
+##                                             'grids',
+##                                             'xlinks',
+##                                             'dictdict',
+##                                             'default_variable_name',
+##                                             'id',
+##                                             'parent',
+##                                             'mode')
diff --git a/Packages/cdms2/Lib/error.py b/Packages/cdms2/Lib/error.py
index a8e3091b7..df73aa688 100644
--- a/Packages/cdms2/Lib/error.py
+++ b/Packages/cdms2/Lib/error.py
@@ -1,7 +1,5 @@
 "Error object for cdms module"
 
-
 class CDMSError (Exception):
-
-    def __init__(self, args="Unspecified error from package cdms"):
+    def __init__ (self, args="Unspecified error from package cdms"):
         self.args = (args,)
diff --git a/Packages/cdms2/Lib/forecast.py b/Packages/cdms2/Lib/forecast.py
index f3a6a6cd6..0c08ab922 100644
--- a/Packages/cdms2/Lib/forecast.py
+++ b/Packages/cdms2/Lib/forecast.py
@@ -7,58 +7,54 @@ import cdms2
 import copy
 from cdms2 import CDMSError
 
-
-def two_times_from_one(t):
+def two_times_from_one( t ):
     """Input is a time representation, either as the long int used in the cdscan
     script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime
     (component time) object.
     Output is the same time, both as a long _and_ as a comptime."""
-    if t == 0:
-        t = 0
-    if isinstance(t, str):
+    if t==0:
+        t = 0L
+    if isinstance(t,str):
         t = cdtime.s2c(t)
-    if (isinstance(t, long) or isinstance(t, int)) and t > 1000000000:
+    if (isinstance(t,long) or isinstance(t,int)) and t>1000000000L :
         tl = t
-        year = tl / 1000000000
-        rem = tl % 1000000000
-        month = rem / 10000000
-        rem = rem % 10000000
-        day = rem / 100000
+        year = tl / 1000000000L
+        rem =  tl % 1000000000L
+        month = rem / 10000000L
+        rem =   rem % 10000000L
+        day =     rem / 100000
         allsecs = rem % 100000
-        sec = allsecs % 60
-        allmins = allsecs / 60
-        min = allmins % 60
-        hour = allmins / 60
-        tc = cdtime.comptime(year, month, day, hour, min, sec)
+        sec =     allsecs%60
+        allmins = allsecs/60
+        min =  allmins%60
+        hour = allmins/60
+        tc = cdtime.comptime(year,month,day,hour,min,sec)
     else:
         # I'd like to check that t is type comptime, but although Python
         # prints the type as <type 'comptime'> it won't recognize as a type
         # comptime or anything similar.  Note that cdtime.comptime is a C
         # function available from Python.
         tc = t
-        tl = tc.year * 1000000000
-        tl += tc.month * 10000000
-        tl += tc.day * 100000
+        tl = tc.year * 1000000000L
+        tl += tc.month * 10000000L
+        tl += tc.day   * 100000
         tl += tc.hour * 3600
-        tl += tc.minute * 60
+        tl += tc.minute *60
         tl += tc.second.__int__()
-    return tl, tc
-
+    return tl,tc
 
-def comptime(t):
+def comptime( t ):
     """Input is a time representation, either as the long int used in the cdscan
     script, or a string in the format "2010-08-25 15:26:00", or as a cdtime comptime
     (component time) object.
     Output is the same time a cdtime.comptime (component time)."""
-    tl, tc = two_times_from_one(t)
+    tl,tc = two_times_from_one( t )
     return tc
 
-
 class forecast():
-
     """represents a forecast starting at a single time"""
 
-    def __init__(self, tau0time, dataset_list, path="."):
+    def __init__( self, tau0time, dataset_list, path="." ):
         """tau0time is the first time of the forecast, i.e. the time at which tau=0.
         dataset_list is used to get the forecast file from the forecast time.
         Each list item should look like this example:
@@ -69,33 +65,33 @@ class forecast():
         N.B.  This is like a CdmsFile.  Creating a forecast means opening a file,
         so later on you should call forecast.close() to close it.
         """
-        self.fctl, self.fct = two_times_from_one(tau0time)
+        self.fctl, self.fct = two_times_from_one( tau0time )
 
-        filenames = [l[5] for l in dataset_list if l[4] == self.fctl]
-        if len(filenames) > 0:
+        filenames = [ l[5] for l in dataset_list if l[4]==self.fctl ]
+        if len(filenames)>0:
             filename = filenames[0]
         else:
-            raise CDMSError("Cannot find filename for forecast %d" % self.fctl)
+            raise CDMSError, "Cannot find filename for forecast %d"%self.fctl
         self.filename = path + '/' + filename
-        self.file = cdms2.open(self.filename)
+        self.file = cdms2.open( self.filename )
 
-    def close(self):
+    def close( self ):
         self.file.close()
 
-    def __call__(self, varname):
+    def __call__( self, varname ):
         """Reads the specified variable from this forecast's file."""
         return self.file(varname)
 
-    def __getitem__(self, varname):
+    def __getitem__( self, varname ):
         """Reads variable attributes from this forecast's file."""
         return self.file.__getitem__(varname)
 
     def __repr__(self):
-        return "<forecast from %s>" % (self.fct)
+        return "<forecast from %s>"%(self.fct)
     __str__ = __repr__
 
 
-def available_forecasts(dataset_file, path="."):
+def available_forecasts( dataset_file, path="." ):
     """Returns a list of forecasts (as their generating times) which are
     available through the specified cdscan-generated dataset xml file.
     The forecasts are given in 64-bit integer format, but can be converted
@@ -103,18 +99,16 @@ def available_forecasts(dataset_file, path="."):
     This function may help in choosing the right arguments for initializing
     a "forecasts" (forecast set) object.
     """
-    dataset = cdms2.openDataset(dataset_file, dpath=path)
-    fm = cdms2.dataset.parseFileMap(dataset.cdms_filemap)
-    alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
+    dataset=cdms2.openDataset( dataset_file, dpath=path )
+    fm=cdms2.dataset.parseFileMap(dataset.cdms_filemap)
+    alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
     dataset.close()
     return alltimesl
 
-
 class forecasts():
-
     """represents a set of forecasts"""
 
-    def __init__(self, dataset_file, forecast_times, path="."):
+    def __init__( self, dataset_file, forecast_times, path="." ):
         """Creates a set of forecasts.  Normally you do it by something like
         f = forecasts( 'file.xml', (min_time, max_time) )
         or
@@ -123,9 +117,9 @@ class forecasts():
         f = forecasts( 'file.xml', [ time1, time2, time3, time4, time5 ] )
 
         where the two or three arguments are::
-
+        
         1. the name of a dataset xml file generated by "cdscan --forecast ..."
-
+        
         2. Times here are the times when the forecasts began (tau=0, aka reference time).
         (i) If you use a 2-item tuple, forecasts will be chosen which start at a time
         t between the min and max times, e.g. min_time <= t < max_time .
@@ -144,78 +138,72 @@ class forecasts():
         2006012300000 for the first second of January 23, 2006, or as
         component times (comptime) in the cdtime module, or as
         a string in the format "2010-08-25 15:26:00".
-
+        
         3. An optional path for the data files; use this if the xml file
         contains filenames without complete paths.
-
+         
         As for the forecast class, this opens files when initiated, so when you
         are finished with the forecasts, you should close the files by calling
         forecasts.close() .
         """
 
         # Create dataset_list to get a forecast file from each forecast time.
-        self.dataset = cdms2.openDataset(dataset_file, dpath=path)
-        fm = cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
-        self.alltimesl = [f[4] for f in fm[0][1]]  # 64-bit (long) integers
+        self.dataset=cdms2.openDataset( dataset_file, dpath=path )
+        fm=cdms2.dataset.parseFileMap(self.dataset.cdms_filemap)
+        self.alltimesl =[ f[4] for f in fm[0][1] ]  # 64-bit (long) integers
         dataset_list = fm[0][1]
         for f in fm[1:]:
             dataset_list.extend(f[1])
 
-        mytimesl = self.forecast_times_to_list(forecast_times)
+        mytimesl = self.forecast_times_to_list( forecast_times )
         if mytimesl == []:
-            raise CDMSError(
-                "bad forecast_times argument to forecasts.__init__")
-        self.fcs = [forecast(t, dataset_list, path) for t in mytimesl]
+            raise CDMSError, "bad forecast_times argument to forecasts.__init__"
+        self.fcs = [ forecast( t, dataset_list, path ) for t in mytimesl ]
 
-    def forecast_times_to_list(self, forecast_times):
+    def forecast_times_to_list( self, forecast_times ):
         """For internal list, translates a "forecast_times" argument of __init__ or
         other methods, into a list of times."""
-        if isinstance(forecast_times, tuple):
-            if len(forecast_times) <= 2:
+        if type(forecast_times) is tuple:
+            if len(forecast_times)<=2:
                 openclosed = 'co'
             else:
                 openclosed = forecast_times[2]
-            mytimesl = self.time_interval_to_list(
-                forecast_times[
-                    0],
-                forecast_times[
-                    1],
-                openclosed)
+            mytimesl = self.time_interval_to_list( forecast_times[0], forecast_times[1], openclosed )
             return mytimesl
-        elif isinstance(forecast_times, list):
+        elif type(forecast_times) is list:
             return forecast_times
         elif forecast_times == 'All':
             return self.alltimesl
         else:
             return []
 
-    def time_interval_to_list(self, tlo, thi, openclosed='co'):
+    def time_interval_to_list( self, tlo, thi, openclosed='co' ):
         """For internal use, translates a time interval to a list of times.
         """
-        if not isinstance(tlo, long):  # make tlo a long integer
-            tlo, tdummy = two_times_from_one(tlo)
-        if not isinstance(thi, long):  # make thi a long integer
-            thi, tdummy = two_times_from_one(thi)
+        if type(tlo) is not long:  # make tlo a long integer
+            tlo, tdummy = two_times_from_one( tlo )
+        if type(thi) is not long:  # make thi a long integer
+            thi, tdummy = two_times_from_one( thi )
         oclo = openclosed[0]
         ochi = openclosed[1]
-        if oclo == 'c':
-            mytimesl = [t for t in self.alltimesl if t >= tlo]
+        if oclo=='c':
+            mytimesl = [ t for t in self.alltimesl if t>=tlo ]
         else:
-            mytimesl = [t for t in self.alltimesl if t > tlo]
-        if ochi == 'c':
-            mytimesl = [t for t in mytimesl if t <= thi]
+            mytimesl = [ t for t in self.alltimesl if t>tlo ]
+        if ochi=='c':
+            mytimesl = [ t for t in mytimesl if t<=thi ]
         else:
-            mytimesl = [t for t in mytimesl if t < thi]
+            mytimesl = [ t for t in mytimesl if t<thi ]
         return mytimesl
 
-    def reduce_inplace(self, min_time, max_time, openclosed='co'):
+    def reduce_inplace( self, min_time, max_time, openclosed='co' ):
         """ For a forecasts object f, f( min_time, max_time ) will reduce the
         scope of f, to forecasts whose start time t has min_time<=t<max_time.
         This is done in place, i.e. any other forecasts in f will be discarded.
         If slice notation were possible for forecasts (it's not because we need
         too many bits to represent time), this function would do the same as
         f = f[min_time : max_time ]
-
+        
         The optional openclosed argument lets you specify the treatment of
         the endpoints min_time, max_time.  The first character should be 'c' if you want
         to include min_time in the new scope of f, or 'o' to exclude it.  Similarly,
@@ -227,15 +215,15 @@ class forecasts():
 
         Times can be the usual long integers, strings, or cdtime component times.
         """
-        mytimesl = self.time_interval_to_list(min_time, max_time, openclosed)
-        self.fcs = [f for f in self.fcs if (f.fctl in mytimesl)]
+        mytimesl = self.time_interval_to_list( min_time, max_time, openclosed )
+        self.fcs = [ f for f in self.fcs if ( f.fctl in mytimesl ) ]
 
-    def close(self):
+    def close( self ):
         self.dataset.close()
         for fc in self.fcs:
             fc.close()
 
-    def __call__(self, varname, forecast_times='All'):
+    def __call__( self, varname, forecast_times='All' ):
         """Reads the specified variable for all the specified forecasts.
         Creates and returns a new variable which is dimensioned by forecast
         as well as the original variable's dimensions.
@@ -259,58 +247,55 @@ class forecasts():
         if forecast_times == 'All':
             varfcs = self.fcs
         else:
-            mytimesl = self.forecast_times_to_list(forecast_times)
-            varfcs = [f for f in self.fcs if (f.fctl in mytimesl)]
-        vars = [fc(varname) for fc in varfcs]
+            mytimesl = self.forecast_times_to_list( forecast_times )
+            varfcs = [ f for f in self.fcs if ( f.fctl in mytimesl ) ]
+        vars = [ fc(varname) for fc in varfcs ]
 
         # Create the variable from the data, with mask:
         v0 = vars[0]
-        a = numpy.asarray([v.data for v in vars])
+        a = numpy.asarray([ v.data for v in vars ])
         if v0._mask == False:
             m = False
-            v = cdms2.tvariable.TransientVariable(a)
+            v = cdms2.tvariable.TransientVariable( a )
         else:
-            m = numpy.asarray([v._mask for v in vars])
-            v = cdms2.tvariable.TransientVariable(
-                a, mask=m, fill_value=v0._fill_value)
+            m = numpy.asarray([ v._mask for v in vars])
+            v = cdms2.tvariable.TransientVariable( a, mask=m, fill_value=v0._fill_value )
 
         # Domain-related attributes:
             # We get the tomain from __getitem__ to make sure that fcs[var] is consistent
             # with fcs(var)
-        fvd = self.__getitem__(varname, varfcs).domain
+        fvd = self.__getitem__(varname,varfcs).domain
         v._TransientVariable__domain = fvd
         # former domain code, not using __getitem:
         # ltvd = len(v0._TransientVariable__domain)
         # v._TransientVariable__domain[1:ltvd+1] = v0._TransientVariable__domain[0:ltvd]
-        # v._TransientVariable__domain[0] = self.forecast_axis( varname, varfcs
-        # )
-        if hasattr(v0, 'coordinates'):
+        # v._TransientVariable__domain[0] = self.forecast_axis( varname, varfcs )
+        if hasattr( v0, 'coordinates' ):
             v.coordinates = 'iforecast ' + v0.coordinates
 
         # Other attributes, all those for which I've seen nontrivial values in a
         # real example (btw, the _isfield one was wrong!) :
         # It would be better to do a list comprehension over v0.attribures.keys(),
-        # if I could be sure that that wouldn't transfer something
-        # inappropriate.
-        if hasattr(v0, 'id'):
+        # if I could be sure that that wouldn't transfer something inappropriate.
+        if hasattr( v0, 'id' ):
             v.id = v0.id
-        if hasattr(v0, 'long_name'):
+        if hasattr( v0, 'long_name' ):
             v.long_name = v0.long_name
-        if hasattr(v0, 'standard_name'):
+        if hasattr( v0, 'standard_name' ):
             v.standard_name = v0.standard_name
-        if hasattr(v0, 'base_name'):
+        if hasattr( v0, 'base_name' ):
             v.base_name = v0.base_name
-        if hasattr(v0, 'units'):
+        if hasattr( v0, 'units' ):
             v.units = v0.units
-        if hasattr(v0, '_isfield'):
+        if hasattr( v0, '_isfield' ):
             v._isfield = v0._isfield
         return v
 
-    def forecast_axis(self, varname, fcss=None):
+    def forecast_axis( self, varname, fcss=None ):
         """returns a tuple (axis,start,length,true_length) where axis is in the
         forecast direction.  If a list of forecasts be specified, the axis' data will
         be limited to them."""
-        if fcss is None:
+        if fcss==None:
             fcss = self.fcs
         axis = None
         domitem1 = None
@@ -319,7 +304,7 @@ class forecasts():
 
         var = self.dataset[varname]
         # ... var is a DatasetVariable, used here just for two of its domain's axes
-        dom = copy.deepcopy(getattr(var, 'domain', []))
+        dom = copy.deepcopy(getattr(var,'domain',[]))
         # ...this 'domain' attribute has an element with an axis, etc.
         # representing all forecasts; so we want to cut it down to match
         # those forecasts in fcss.
@@ -330,33 +315,33 @@ class forecasts():
             # axis is a axis.Axis and the rest of the tuple is int's.
             # I don't know what true_length is, but it doesn't seem to get used
             # anywhere, and is normally the same as length.
-            if getattr(domitem[0], 'id', None) == 'fctau0':
+            if getattr(domitem[0],'id',None)=='fctau0':
                 # Force the axis to match fcss :
                 # More precisely the long int times fcss[i].fctl should match
-                # the axis data. The axis partition and .length need changing
-                # too.
+                # the axis data. The axis partition and .length need changing too.
                 domitem1 = 0
                 domitem2 = len(fcss)
                 domitem3 = len(fcss)
                 axis = copy.copy(domitem[0])
-                axis._data_ = [f.fctl for f in fcss]
+                axis._data_ = [ f.fctl for f in fcss ]
                 axis.length = len(axis._data_)
                 axis.partition = axis.partition[0:axis.length]
                 axis.axis = 'F'
                 axis.standard_name = 'forecast_reference_time'
                 timeaxis = var.getTime()
-                if not hasattr(axis, 'calendar') and timeaxis:
+                if not hasattr(axis,'calendar') and timeaxis:
                     axis.calendar = timeaxis.calendar
+                
+        return ( axis, domitem1, domitem2, domitem3 )
 
-        return (axis, domitem1, domitem2, domitem3)
 
-    def __getitem__(self, varname, fccs=None):
+    def __getitem__( self, varname, fccs=None ):
         """returns whatever the forecast set has that matches the given
         attribute, normally a DatasetVariable.  The optional argument fccs
         is a list of forecasts to be passed on to forecast_axis().
         """
-        if not isinstance(varname, str):
-            raise CDMSError("bad argument to forecasts[]")
+        if type(varname) is not str :
+            raise CDMSError, "bad argument to forecasts[]"
 
         var = self.dataset[varname]
         # var is a DatasetVariable and consists of lots of attributes.
@@ -364,19 +349,20 @@ class forecasts():
         # The attribute which needs to be changed is 'domain' - it will normally
         # have an element with an axis, etc. representing all forecasts; so we
         # want to cut it down to match those forecasts in self.fcs.
-        dom = copy.deepcopy(getattr(var, 'domain', []))
+        dom = copy.deepcopy(getattr(var,'domain',[]))
         for i in range(len(dom)):
             domitem = dom[i]
-            if getattr(domitem[0], 'id', None) == 'fctau0':
-                dom[i] = self.forecast_axis(varname, fccs)
-        setattr(var, 'domain', dom)
-
+            if getattr(domitem[0],'id',None)=='fctau0':
+                dom[i] = self.forecast_axis(varname,fccs)
+        setattr(var,'domain',dom)
+                
         return var
 
     def __repr__(self):
         l = len(self.fcs)
-        if l == 0:
+        if l==0:
             return "<forecasts - None>"
         else:
-            return "<forecasts from %s,...,%s>" % (self.fcs[0].fct, self.fcs[l - 1].fct)
+            return "<forecasts from %s,...,%s>"%(self.fcs[0].fct,self.fcs[l-1].fct)
     __str__ = __repr__
+
diff --git a/Packages/cdms2/Lib/fvariable.py b/Packages/cdms2/Lib/fvariable.py
index 2f41df8a6..4271e12c6 100644
--- a/Packages/cdms2/Lib/fvariable.py
+++ b/Packages/cdms2/Lib/fvariable.py
@@ -1,35 +1,33 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 "CDMS File-based variables."
 import numpy
-from . import typeconv
+import typeconv
+import types
 import re
 
-from .cdmsobj import Max32int
-from .variable import DatasetVariable
-from .error import CDMSError
-from .sliceut import reverseSlice
-from .avariable import AbstractVariable
+from cdmsobj import Max32int
+from variable import DatasetVariable
+from error import CDMSError
+from sliceut import reverseSlice
+from avariable import AbstractVariable
 from cdms2 import Cdunif
 from Cdunif import CdunifError
 
 FileClosed = "Cannot read from closed file, variable: "
 FileClosedWrite = "Cannot write to a closed file, variable: "
 
-
 class FileVariable(DatasetVariable):
-
     "A variable in a single file."
-
-    def __init__(self, parent, varname, cdunifobj=None):
+    def __init__(self,parent,varname,cdunifobj=None):
         DatasetVariable.__init__(self, parent, varname)
         self._obj_ = cdunifobj
         if cdunifobj is not None:
             for attname, attval in cdunifobj.__dict__.items():
                 self.__dict__[attname] = attval
-                self.attributes[attname] = attval
-        val = self.__cdms_internals__ + ['name_in_file', ]
+                self.attributes[attname]=attval
+        val = self.__cdms_internals__+['name_in_file',]
         self.___cdms_internals__ = val
 
     # Initialize the domain
@@ -41,7 +39,7 @@ class FileVariable(DatasetVariable):
             start = 0
             length = len(axis)
             truelen = length
-            self.domain.append((axis, start, length, truelen))
+            self.domain.append((axis,start,length,truelen))
 
     def typecode(self):
         # Compatibility: convert to new typecode
@@ -49,44 +47,43 @@ class FileVariable(DatasetVariable):
         tc = typeconv.convtypecode2(tc).char
         return tc
 
-    def assignValue(self, data):
+    def assignValue(self,data):
         if self.parent is None:
-            raise CDMSError(FileClosedWrite + self.id)
+            raise CDMSError, FileClosedWrite+self.id
         if numpy.ma.isMaskedArray(data):
-            if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask, 0):
-                saveFill = data.fill_value
-                if self.getMissing() is None:
-                    self.setMissing(saveFill)
-                else:
-                    data.set_fill_value(self.getMissing())
+          if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask,0):
+            saveFill = data.fill_value
+            if self.getMissing() is None:
+                self.setMissing(saveFill)
+            else:
+                data.set_fill_value(self.getMissing())
         self._obj_.assignValue(numpy.ma.filled(data))
         if numpy.ma.isMaskedArray(data):
-            if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask, 0):
-                data.set_fill_value(saveFill)
+          if data.mask is not numpy.ma.nomask and not numpy.ma.allclose(data.mask,0):
+            data.set_fill_value(saveFill)
 
-    def expertSlice(self, initslicelist):
+    def expertSlice (self, initslicelist):
         # Handle negative slices
-        revlist = []
-            # Slices to apply to result if reversals needed
+        revlist = []                    # Slices to apply to result if reversals needed
         slist = []                      # Slices with positive strides
         haveReversals = 0               # True iff result array needs reversing
-        i = 0
+        i=0
         for s in initslicelist:
-            if s.step < 0:
+            if s.step<0:
                 axislen = self.shape[i]
-                slist.append(reverseSlice(s, axislen))
-                revlist.append(slice(None, None, -1))
+                slist.append(reverseSlice(s,axislen))
+                revlist.append(slice(None,None,-1))
                 haveReversals = 1
             else:
                 slist.append(s)
-                revlist.append(slice(None, None, 1))
+                revlist.append(slice(None,None,1))
             i += 1
 
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
+            raise CDMSError, FileClosed+self.id
         if self.rank() == 0:
             return self._obj_.getValue()
-        result = self._obj_.getitem(*slist)
+        result = apply(self._obj_.getitem,slist)
 
         # If slices with negative strides were input, apply the appropriate
         # reversals.
@@ -97,43 +94,43 @@ class FileVariable(DatasetVariable):
 
     def __setitem__(self, index, value):
         if self.parent is None:
-            raise CDMSError(FileClosedWrite + self.id)
+            raise CDMSError, FileClosedWrite+self.id
         if numpy.ma.isMaskedArray(value):
-            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
-                saveFill = value.fill_value
-                if self.getMissing() is None:
-                    self.setMissing(saveFill)
-                else:
-                    value.set_fill_value(self.getMissing())
-        self._obj_.setitem(*(index, numpy.ma.filled(value)))
+          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
+            saveFill = value.fill_value
+            if self.getMissing() is None:
+                self.setMissing(saveFill)
+            else:
+                value.set_fill_value(self.getMissing())
+        apply(self._obj_.setitem,(index,numpy.ma.filled(value)))
         if numpy.ma.isMaskedArray(value):
-            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
-                value.set_fill_value(saveFill)
+          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
+            value.set_fill_value(saveFill)
 
     def __setslice__(self, low, high, value):
         if self.parent is None:
-            raise CDMSError(FileClosedWrite + self.id)
+            raise CDMSError, FileClosedWrite+self.id
 
         # Hack to prevent netCDF overflow error on 64-bit architectures
         high = min(Max32int, high)
-        if high == Max32int and self.rank() == 0:
-            high = 1
-
+        if high == Max32int and self.rank()==0:
+          high=1
+        
         if numpy.ma.isMaskedArray(value):
-            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
-                saveFill = value.fill_value
-                if self.getMissing() is None:
-                    self.setMissing(saveFill)
-                else:
-                    value.set_fill_value(self.getMissing())
-        self._obj_.setslice(*(low, high, numpy.ma.filled(value)))
+          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
+            saveFill = value.fill_value
+            if self.getMissing() is None:
+                self.setMissing(saveFill)
+            else:
+                value.set_fill_value(self.getMissing())
+        apply(self._obj_.setslice,(low,high,numpy.ma.filled(value)))
         if numpy.ma.isMaskedArray(value):
-            if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask, 0):
-                value.set_fill_value(saveFill)
+          if value.mask is not numpy.ma.nomask and not numpy.ma.allclose(value.mask,0):
+            value.set_fill_value(saveFill)
 
-    def _getShape(self):
+    def _getShape (self):
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
+            raise CDMSError, FileClosed+self.id
         return self._obj_.shape
 
     # Write external attributes to the file.
@@ -142,15 +139,13 @@ class FileVariable(DatasetVariable):
     # that the value is propagated to the external file.
     def __setattr__(self, name, value):
         if hasattr(self, "parent") and self.parent is None:
-            raise CDMSError(FileClosedWrite + self.id)
+            raise CDMSError, FileClosedWrite+self.id
         if (not name in self.__cdms_internals__) and (value is not None):
             try:
                 setattr(self._obj_, name, value)
             except CdunifError:
-                raise CDMSError(
-                    "Setting %s.%s=%s" %
-                    (self.id, name, repr(value)))
-            self.attributes[name] = value
+                raise CDMSError, "Setting %s.%s=%s"%(self.id,name,`value`)
+            self.attributes[name]=value
         self.__dict__[name] = value
 
     # Delete external file attributes.
@@ -162,23 +157,23 @@ class FileVariable(DatasetVariable):
             try:
                 delattr(self._obj_, name)
             except CdunifError:
-                raise CDMSError("Deleting %s.%s" % (self.id, name))
+                raise CDMSError, "Deleting %s.%s"%(self.id,name)
             del(self.attributes[name])
         del self.__dict__[name]
 
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
-        if self.rank() > 0:
+            raise CDMSError, FileClosed+self.id
+        if self.rank()>0:
             return self.getSlice(Ellipsis, squeeze=squeeze)
         else:
             return self._obj_.getValue()
-
+    
     def __len__(self):
         " Length of first dimension. "
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
+            raise CDMSError, FileClosed+self.id
         return len(self._obj_)
 
 #    def __repr__(self):
@@ -187,4 +182,5 @@ class FileVariable(DatasetVariable):
 #        else:
 #            return "<Variable: %s, file: **CLOSED**>"%self.id
 
-    shape = property(_getShape, None)
+
+    shape = property(_getShape,None)
diff --git a/Packages/cdms2/Lib/gengrid.py b/Packages/cdms2/Lib/gengrid.py
index 5fad339b7..1204733bc 100644
--- a/Packages/cdms2/Lib/gengrid.py
+++ b/Packages/cdms2/Lib/gengrid.py
@@ -1,15 +1,15 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """CDMS Generic Grids"""
 
 import numpy
-# import PropertiedClasses
-from . import bindex
-from .error import CDMSError
-from .grid import LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
-from .hgrid import AbstractHorizontalGrid
-from .axis import allclose
+## import PropertiedClasses
+import bindex
+from error import CDMSError
+from grid import LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
+from hgrid import AbstractHorizontalGrid
+from axis import allclose
 
 MethodNotImplemented = "Method not yet implemented"
 
@@ -19,7 +19,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         """Create a generic grid.
         """
         if latAxis.shape != lonAxis.shape:
-            raise CDMSError('Latitude and longitude axes must have the same shape.')
+            raise CDMSError, 'Latitude and longitude axes must have the same shape.'
         AbstractHorizontalGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
         self._index_ = None
 
@@ -35,17 +35,17 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
     def getMesh(self, transpose=None):
         """Generate a mesh array for the meshfill graphics method.
         'transpose' is for compatibility with other grid types, is ignored."""
-        from . import MV2 as MV
+        import MV2 as MV
         if self._mesh_ is None:
-            LAT = 0
-            LON = 1
+            LAT=0
+            LON=1
             latbounds, lonbounds = self.getBounds()
             if latbounds is None or lonbounds is None:
-                raise CDMSError('No boundary data is available for grid %s'%self.id)
+                raise CDMSError, 'No boundary data is available for grid %s'%self.id
             nvert = latbounds.shape[-1]
-            mesh = numpy.zeros((self.size(), 2, nvert), latbounds.dtype.char)
-            mesh[:, LAT,:] = MV.filled(latbounds)
-            mesh[:, LON,:] = MV.filled(lonbounds)
+            mesh = numpy.zeros((self.size(),2,nvert),latbounds.dtype.char)
+            mesh[:,LAT,:] = MV.filled(latbounds)
+            mesh[:,LON,:] = MV.filled(lonbounds)
             self._mesh_ = mesh
         return self._mesh_
 
@@ -101,9 +101,9 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         gridcenterlon.units = "degrees"
         gridimask = cufile.createVariable("grid_imask", numpy.int, ("grid_size",))
         gridimask.units = "unitless"
-        gridcornerlat = cufile.createVariable("grid_corner_lat", numpy.float, ("grid_size", "grid_corners"))
+        gridcornerlat = cufile.createVariable("grid_corner_lat", numpy.float, ("grid_size","grid_corners"))
         gridcornerlat.units = "degrees"
-        gridcornerlon = cufile.createVariable("grid_corner_lon", numpy.float, ("grid_size", "grid_corners"))
+        gridcornerlon = cufile.createVariable("grid_corner_lon", numpy.float, ("grid_size","grid_corners"))
         gridcornerlon.units = "degrees"
 
         griddims[:] = numpy.array([ngrid], numpy.int32)
@@ -162,8 +162,8 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
                 i = k
             k += 1
 
-        if i == -1:
-            raise RuntimeError('Grid lat/lon domains do not match variable domain')
+        if i==-1:
+            raise RuntimeError, 'Grid lat/lon domains do not match variable domain'
 
         return ((islice, ), (inewaxis, ))
 
@@ -193,10 +193,10 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         lonspec = spec[CoordTypeToLoc[LongitudeType]]
         latlin = numpy.ma.filled(self._lataxis_)
         lonlin = numpy.ma.filled(self._lonaxis_)
-        lonlin = numpy.ma.where(numpy.ma.greater_equal(lonlin, 360.0), lonlin-360.0, lonlin)
+        lonlin = numpy.ma.where(numpy.ma.greater_equal(lonlin,360.0), lonlin-360.0, lonlin)
         points = bindex.intersectHorizontalGrid(latspec, lonspec, latlin, lonlin, index)
-        if len(points) == 0:
-            raise CDMSError('No data in the specified region, longitude=%s, latitude=%s'%(repr(lonspec), repr(latspec)))
+        if len(points)==0:
+            raise CDMSError, 'No data in the specified region, longitude=%s, latitude=%s'%(`lonspec`, `latspec`)
 
         fullmask = numpy.ones(ncell)
         numpy.put(fullmask, points, 0)
@@ -205,7 +205,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         submask = fullmask[imin:imax]
 
         cellid = self.getAxis(0).id
-        indexspecs = {cellid:slice(imin, imax)}
+        indexspecs = {cellid:slice(imin,imax)}
 
         return submask, indexspecs
 
@@ -253,9 +253,9 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
             result = self.clone()
             for i in missing:
                 for item in axes:
-                    if (len(selfaxes[i]) == len(item)) and allclose(selfaxes[i], item):
-                        result._lataxis_.setAxis(i, item)
-                        result._lonaxis_.setAxis(i, item)
+                    if (len(selfaxes[i])==len(item)) and allclose(selfaxes[i], item):
+                        result._lataxis_.setAxis(i,item)
+                        result._lonaxis_.setAxis(i,item)
                         break
                 else:
                     result = None
@@ -268,7 +268,7 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
         if self._flataxes_ is None:
-            from . import MV2 as MV
+            import MV2 as MV
             alat = MV.filled(self.getLatitude())
             alon = MV.filled(self.getLongitude())
             self._flataxes_ = (alat, alon)
@@ -280,11 +280,11 @@ class AbstractGenericGrid(AbstractHorizontalGrid):
         result = self.clone()
         result.id = gridid
         return result
-    shape = property(_getShape, None)
+    shape = property(_getShape,None)
     
-# PropertiedClasses.set_property (AbstractGenericGrid, 'shape',
-# AbstractGenericGrid._getShape, nowrite=1,
-# nodelete=1)
+## PropertiedClasses.set_property (AbstractGenericGrid, 'shape', 
+##                                   AbstractGenericGrid._getShape, nowrite=1,
+##                                   nodelete=1)
 
 class DatasetGenericGrid(AbstractGenericGrid):
 
@@ -295,7 +295,7 @@ class DatasetGenericGrid(AbstractGenericGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<DatasetGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<DatasetGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
 class FileGenericGrid(AbstractGenericGrid):
 
@@ -306,7 +306,7 @@ class FileGenericGrid(AbstractGenericGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<FileGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<FileGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
 class TransientGenericGrid(AbstractGenericGrid):
 
@@ -321,7 +321,7 @@ class TransientGenericGrid(AbstractGenericGrid):
         AbstractGenericGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask)
 
     def __repr__(self):
-        return "<TransientGenericGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<TransientGenericGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
     def toGenericGrid(self, gridid=None):
         if gridid is None:
@@ -338,20 +338,21 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    from .auxcoord import TransientAuxAxis1D
-    from .coord import TransientVirtualAxis
+    import string
+    from auxcoord import TransientAuxAxis1D
+    from coord import TransientVirtualAxis
 
     convention = 'SCRIP'
     if 'S' in fileobj.variables.keys():
         convention = 'NCAR'
-        if whichType == "grid":
+        if whichType=="grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid == "destination":
+        elif whichGrid=="destination":
             gridCornerLatName = 'yv_b'
             gridCornerLonName = 'xv_b'
             gridMaskName = 'mask_b'
@@ -366,14 +367,14 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
             gridCenterLonName = 'xc_a'
             titleName = 'source_grid'
     else:
-        if whichType == "grid":
+        if whichType=="grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid == "destination":
+        elif whichGrid=="destination":
             gridCornerLatName = 'dst_grid_corner_lat'
             gridCornerLonName = 'dst_grid_corner_lon'
             gridMaskName = 'dst_grid_imask'
@@ -399,38 +400,38 @@ def readScripGenericGrid(fileobj, dims, whichType, whichGrid):
         ni = dims[0]
 
     boundsshape = (ni, ncorners)
-    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6] == 'radian':
+    if hasattr(cornerLat, 'units') and string.lower(cornerLat.units)[0:6]=='radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
 
-    iaxis = TransientVirtualAxis("i", ni)
+    iaxis = TransientVirtualAxis("i",ni)
 
-    if gridMaskName in vardict:
+    if vardict.has_key(gridMaskName):
         # SCRIP convention: 0 for invalid data
         # numpy.ma convention: 1 for invalid data
         mask = 1 - fileobj(gridMaskName)
     else:
         mask = None
         
-    if gridCenterLatName in vardict:
+    if vardict.has_key(gridCenterLatName):
         centerLat = fileobj(gridCenterLatName)
-        if hasattr(centerLat, "units") and centerLat.units.lower() == 'radians':
+        if hasattr(centerLat, "units") and string.lower(centerLat.units)=='radians':
             centerLat *= (180.0/numpy.pi)
     else:
-        centerLat = cornerLat[:,:, 0]
+        centerLat = cornerLat[:,:,0]
 
-    if gridCenterLonName in vardict:
+    if vardict.has_key(gridCenterLonName):
         centerLon = fileobj(gridCenterLonName)
-        if hasattr(centerLon, "units") and centerLon.units.lower() == 'radians':
+        if hasattr(centerLon, "units") and string.lower(centerLon.units)=='radians':
             centerLon *= (180.0/numpy.pi)
     else:
-        centerLon = cornerLon[:,:, 0]
+        centerLon = cornerLon[:,:,0]
 
-    if hasattr(fileobj, titleName):
+    if hasattr(fileobj,titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = sgridid.strip().replace(' ', '_')
+        gridid = string.replace(string.strip(gridid), ' ','_')
     else:
-        gridid = "<None>"
+        gridid="<None>"
 
     lataxis = TransientAuxAxis1D(centerLat, axes=(iaxis,), bounds=cornerLat,
                               attributes={'units':'degrees_north'}, id="latitude")
diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index e2612d315..9930f263a 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -1,14 +1,15 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """CDMS Grid objects"""
+import types
 import re
-from .error import CDMSError
+from error import CDMSError
 import numpy #, PropertiedClasses, internattr
 # import regrid2._regrid
 import copy, string, sys
-from .cdmsobj import CdmsObj
-from .axis import TransientAxis, createAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, getAutoBounds, createGaussianAxis, lookupArray, isSubsetVector
+from cdmsobj import CdmsObj
+from axis import TransientAxis, createAxis, createUniformLatitudeAxis, createUniformLongitudeAxis, getAutoBounds, createGaussianAxis, lookupArray, isSubsetVector
 import cdtime
 
 MethodNotImplemented = "Method not yet implemented"
@@ -20,10 +21,10 @@ _classifyGrids = 1                      # Determine the type of grid from the gr
 # (if any). If 'off', the value of .grid_type overrides the classification.
 def setClassifyGrids(mode):
     global _classifyGrids
-    if mode == 'on':
-        _classifyGrids = 1
-    elif mode == 'off':
-        _classifyGrids = 0
+    if mode=='on':
+        _classifyGrids=1
+    elif mode=='off':
+        _classifyGrids=0
 
 # Create a transient rectilinear grid
 def createRectGrid(lat, lon, order="yx", type="generic", mask=None):
@@ -33,7 +34,7 @@ def createRectGrid(lat, lon, order="yx", type="generic", mask=None):
 def createUniformGrid(startLat, nlat, deltaLat, startLon, nlon, deltaLon, order="yx", mask=None):
     lat = createUniformLatitudeAxis(startLat, nlat, deltaLat)
     lon = createUniformLongitudeAxis(startLon, nlon, deltaLon)
-    return createRectGrid(lat, lon, order, "uniform", mask)
+    return createRectGrid(lat,lon,order,"uniform",mask)
 
 # Generate a grid for calculating the global mean. The grid is a single
 # zone covering the range of the input grid
@@ -41,41 +42,41 @@ def createGlobalMeanGrid(grid):
     inlat = grid.getLatitude()
     inlatBounds, inlonBounds = grid.getBounds()
     outlatArray = numpy.array([(inlat[0] + inlat[-1])/2.0])
-    outlatBounds = numpy.array([[inlatBounds[0, 0], inlatBounds[-1, 1]]])
+    outlatBounds = numpy.array([[inlatBounds[0,0], inlatBounds[-1,1]]])
     outlat = createAxis(outlatArray, outlatBounds)
     outlat.units = inlat.units
 
     inlon = grid.getLongitude()
     outlonArray = numpy.array([(inlon[0] + inlon[-1])/2.0])
-    outlonBounds = numpy.array([[inlonBounds[0, 0], inlonBounds[-1, 1]]])
+    outlonBounds = numpy.array([[inlonBounds[0,0], inlonBounds[-1,1]]])
     outlon = createAxis(outlonArray, outlonBounds)
     outlon.units = inlon.units
 
-    return createRectGrid(outlat, outlon, grid.getOrder())
+    return createRectGrid(outlat,outlon,grid.getOrder())
 
 # Generate a grid for zonal averaging. The grid has the same latitudes
 # as the input grid, and a single longitude.
 def createZonalGrid(grid):
     inlat = grid.getLatitude()
     outlatBounds, inlonBounds = grid.getBounds()
-    outlat = createAxis(inlat[:], outlatBounds)
+    outlat = createAxis(inlat[:],outlatBounds)
     outlat.units = inlat.units
 
     inlon = grid.getLongitude()
     outlonArray = numpy.array([(inlon[0] + inlon[-1])/2.0])
-    outlonBounds = numpy.array([[inlonBounds[0, 0], inlonBounds[-1, 1]]])
+    outlonBounds = numpy.array([[inlonBounds[0,0], inlonBounds[-1,1]]])
     outlon = createAxis(outlonArray, outlonBounds)
     outlon.units = inlon.units
 
-    return createRectGrid(outlat, outlon, grid.getOrder())
+    return createRectGrid(outlat,outlon,grid.getOrder())
 
 # Generate a generic (untyped) grid from lat, lon vectors
 def createGenericGrid(latArray, lonArray, latBounds=None, lonBounds=None, order="yx", mask=None):
-    lat = createAxis(latArray, latBounds)
+    lat = createAxis(latArray,latBounds)
     lat.units = "degrees_north"
-    lon = createAxis(lonArray, lonBounds)
+    lon = createAxis(lonArray,lonBounds)
     lon.units = "degrees_east"
-    return createRectGrid(lat, lon, order, "generic", mask)
+    return createRectGrid(lat,lon,order,"generic",mask)
 
 def createGaussianGrid(nlats, xorigin=0.0, order="yx"):
     """ createGaussianGrid(nlats, xorigin=0.0)
@@ -96,7 +97,7 @@ TimeType = 'time'
 CoordinateTypes = [LongitudeType, LatitudeType, VerticalType, TimeType]
 
 # Note: no time dimensions in grids.
-CoordTypeToLoc = {LongitudeType: 0, LatitudeType: 1, VerticalType: 2}
+CoordTypeToLoc = {LongitudeType:0, LatitudeType:1, VerticalType:2}
 
 def defaultRegion():
     """Return a specification for a default (full) region."""
@@ -125,27 +126,27 @@ def setRegionSpecs(grid, coordSpec, coordType, resultSpec):
 
     Note that time coordinate types are not permitted.
     """
-
-    if (coordSpec is None) or (coordSpec == ':'):
+    
+    if (coordSpec is None) or (coordSpec==':'):
         canonSpec = None
-    elif isinstance(coordSpec, tuple):
-        if len(coordSpec) == 2:
-            canonSpec = (coordSpec[0], coordSpec[1], 'cc', None)
-        elif len(coordSpec) == 3:
-            canonSpec = (coordSpec[0], coordSpec[1], coordSpec[2], None)
-        elif len(coordSpec) != 4:
-            raise CDMSError('Invalid coordinate specification: %s'%repr(coordSpec))
-    elif isinstance(coordSpec, (int, float)):
+    elif type(coordSpec) is types.TupleType:
+        if len(coordSpec)==2:
+            canonSpec = (coordSpec[0],coordSpec[1],'cc',None)
+        elif len(coordSpec)==3:
+            canonSpec = (coordSpec[0],coordSpec[1],coordSpec[2],None)
+        elif len(coordSpec)!=4:
+            raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
+    elif type(coordSpec) in [types.IntType, types.FloatType]:
         canonSpec = (coordSpec, coordSpec, 'cc', None)
     else:
-        raise CDMSError('Invalid coordinate specification: %s'%repr(coordSpec))
+        raise CDMSError, 'Invalid coordinate specification: %s'%`coordSpec`
 
     coordLoc = CoordTypeToLoc[coordType]
     if coordLoc is None:
-        raise CDMSError('Invalid coordinate type: %s'%coordType)
+        raise CDMSError, 'Invalid coordinate type: %s'%coordType
 
     if resultSpec[coordLoc] is not None:
-        raise CDMSError('Multiple specifications for coordinate type %s'%coordType)
+        raise CDMSError, 'Multiple specifications for coordinate type %s'%coordType
     resultSpec[coordLoc] = canonSpec
 
 class AbstractGrid (CdmsObj):
@@ -153,14 +154,13 @@ class AbstractGrid (CdmsObj):
     def __init__ (self, node):
         CdmsObj.__init__ (self, node)
         self.id = '<None>' # String identifier
-        if node is not None and hasattr(node, 'id'):
-            self.id = node.id
+        if node is not None and hasattr(node,'id'): self.id = node.id
         self.parent = None #Dataset containing this grid
         self._flataxes_ = None
         self._mesh_ = None
 
     def listall (self, all=None):
-        result = []
+        result=[]
         result.append('Grid has Python id %s.' % hex(id(self)))
         return result
 
@@ -171,27 +171,26 @@ class AbstractGrid (CdmsObj):
 
     def info(self, flag=None, device=None):
         "Write info about slab; include dimension values and weights if flag"
-        if device is None:
-            device = sys.stdout
+        if device is None: device = sys.stdout
         device.write(str(self))
 
     def writeToFile(self, file):
         """Write self to a CdmsFile file, returning CF coordinates attribute, or None if not applicable"""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def subSlice(self, *specs, **keys):
         """Get a subgrid based on an argument list <specs> of slices."""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def hasCoordType(self, coordType):
         """Return 1 iff self has the coordinate type."""
         return 0
 
     def getAxisList(self):
-        axes = []
-        for i in range(len(self._order_)):
-            axes.append(self.getAxis(i))
-        return axes
+      axes =[]
+      for i in range(len(self._order_)):
+        axes.append(self.getAxis(i))
+      return axes
 
     def isClose(self, g):
         """Return 1 if g is 'close enough' to self to be considered equal, 0 if not."""
@@ -207,31 +206,31 @@ class AbstractGrid (CdmsObj):
 
     def clone(self, copyData=1):
         """Make a copy of self."""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def flatAxes(self):
         """Return (flatlat, flatlon) where flatlat is a raveled NumPy array
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def size(self):
         "Return number of cells in the grid"
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def writeScrip(self, cdunifFile):
         "Write a grid to a SCRIP file"
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
 class AbstractRectGrid(AbstractGrid):
     """AbstractRectGrid defines the interface for rectilinear grids:
        grids which can be decomposed into 1-D latitude and longitude axes
     """
-    gridtypes = ['gaussian', 'uniform', 'equalarea', 'generic']
+    gridtypes = ['gaussian','uniform','equalarea','generic']
 
     def __init__ (self, node):
         AbstractGrid.__init__ (self, node)
-        val = self.__cdms_internals__ + ['id', ]
+        val = self.__cdms_internals__ + ['id',]
         self.___cdms_internals__ = val
 
     def listall (self, all=None):
@@ -246,14 +245,14 @@ class AbstractRectGrid(AbstractGrid):
 
     def _getshape (self):
         if self._order_ == "yx":
-            return (len(self._lataxis_), len(self._lonaxis_))
+            return (len(self._lataxis_),len(self._lonaxis_))
         else:
-            return (len(self._lonaxis_), len(self._lataxis_))
+            return (len(self._lonaxis_),len(self._lataxis_))
 
     # Get the n-th axis. naxis is 0 or 1.
     def getAxis(self, naxis):
         ind = self._order_[naxis]
-        if ind == 'x':
+        if ind=='x':
             axis = self.getLongitude()
         else:
             axis = self.getLatitude()
@@ -261,7 +260,7 @@ class AbstractRectGrid(AbstractGrid):
 
     def getBounds(self):
         latbnds, lonbnds = (self._lataxis_.getExplicitBounds(), self._lonaxis_.getExplicitBounds())
-        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1, 2]:
+        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1,2]:
             nlatbnds, nlonbnds = self.genBounds()
             if latbnds is None:
                 latbnds = nlatbnds
@@ -277,10 +276,10 @@ class AbstractRectGrid(AbstractGrid):
         return self._lonaxis_
 
     def getMask(self):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
-    def setMask(self, mask, permanent=0):
-        raise CDMSError(MethodNotImplemented)
+    def setMask(self,mask,permanent=0):
+        raise CDMSError, MethodNotImplemented
 
     def getOrder(self):
         return self._order_
@@ -289,10 +288,8 @@ class AbstractRectGrid(AbstractGrid):
         return self._gridtype_
 
     def setType(self, gridtype):
-        if gridtype == 'linear':
-            gridtype = 'uniform'
-        if gridtype == 'unknown':
-            gridtype = 'generic'
+        if gridtype=='linear': gridtype='uniform'
+        if gridtype=='unknown': gridtype='generic'
         # assert gridtype in AbstractRectGrid.gridtypes, 'Grid type must be one of %s'%`AbstractRectGrid.gridtypes`
         self._gridtype_ = gridtype
 
@@ -304,24 +301,22 @@ class AbstractRectGrid(AbstractGrid):
 
         latBounds, lonBounds = self.getBounds()
         latBounds = (numpy.pi/180.0) * latBounds
-        latWeights = 0.5 * numpy.absolute(numpy.sin(latBounds[:, 1]) - numpy.sin(latBounds[:, 0]))
+        latWeights = 0.5 * numpy.absolute(numpy.sin(latBounds[:,1]) - numpy.sin(latBounds[:,0]))
 
-        lonWeights = numpy.absolute((lonBounds[:, 1] - lonBounds[:, 0]))/360.0
+        lonWeights = numpy.absolute((lonBounds[:,1] - lonBounds[:,0]))/360.0
 
         return latWeights, lonWeights
 
     # Create a transient grid for the index (tuple) intervals.
-    def subGrid(self, latinterval, loninterval):
-        if latinterval is None:
-            latinterval = (0, len(self._lataxis_))
-        if loninterval is None:
-            loninterval = (0, len(self._lonaxis_))
-
-        latobj = self._lataxis_.subaxis(latinterval[0], latinterval[1])
-        lonobj = self._lonaxis_.subaxis(loninterval[0], loninterval[1])
+    def subGrid(self,latinterval, loninterval):
+        if latinterval is None: latinterval = (0, len(self._lataxis_))
+        if loninterval is None: loninterval = (0, len(self._lonaxis_))
+            
+        latobj = self._lataxis_.subaxis(latinterval[0],latinterval[1])
+        lonobj = self._lonaxis_.subaxis(loninterval[0],loninterval[1])
         maskArray = self.getMask()
         if maskArray is not None:
-            if self._order_ == "yx":
+            if self._order_=="yx":
                 submask = maskArray[latinterval[0]:latinterval[1], loninterval[0]:loninterval[1]]
             else:
                 submask = maskArray[loninterval[0]:loninterval[1], latinterval[0]:latinterval[1]]
@@ -329,7 +324,7 @@ class AbstractRectGrid(AbstractGrid):
             submask = None
 
         return TransientRectGrid(latobj, lonobj, self._order_, self._gridtype_, submask)
-
+        
     # Same as subGrid, for coordinates
     def subGridRegion(self, latRegion, lonRegion):
         latInterval = self._lataxis_.mapInterval(latRegion)
@@ -338,7 +333,7 @@ class AbstractRectGrid(AbstractGrid):
 
     # Return a transient grid which is the transpose of this grid
     def transpose(self):
-        if self._order_ == "yx":
+        if self._order_=="yx":
             neworder = "xy"
         else:
             neworder = "yx"
@@ -364,69 +359,65 @@ class AbstractRectGrid(AbstractGrid):
 
         CLOSE_ENOUGH = 1.e-3
         lat = self.getLatitude()
-        if len(lat) == 1:
-            return ('generic', 1, 0)
+        if len(lat)==1:
+            return ('generic',1,0)
 
         latar = lat[:]
-        if lat[0] < lat[-1]:              # increasing?
-            hassouth = (abs(lat[0]+90.0) < 1.e-2)
-            hasnorth = (abs(lat[-1]-90.0) < 1.e-2)
-            if hassouth:
-                latar = latar[1:]
-            if hasnorth:
-                latar = latar[:-1]
+        if lat[0]<lat[-1]:              # increasing?
+            hassouth = (abs(lat[0]+90.0)<1.e-2)
+            hasnorth = (abs(lat[-1]-90.0)<1.e-2)
+            if hassouth: latar = latar[1:]
+            if hasnorth: latar = latar[:-1]
         else:                           # decreasing
-            hassouth = (abs(lat[-1]+90.0) < 1.e-2)
-            hasnorth = (abs(lat[0]-90.0) < 1.e-2)
-            if hassouth:
-                latar = latar[:-1]
-            if hasnorth:
-                latar = latar[1:]
+            hassouth = (abs(lat[-1]+90.0)<1.e-2)
+            hasnorth = (abs(lat[0]-90.0)<1.e-2)
+            if hassouth: latar = latar[:-1]
+            if hasnorth: latar = latar[1:]
         nlats = len(latar)
 
         # Get the related Gaussian latitude
-        gausslatns, wts, bnds = regrid2._regrid.gridattr(len(latar), 'gaussian')
+        gausslatns, wts, bnds = regrid2._regrid.gridattr(len(latar),'gaussian')
         gausslatsn = gausslatns[::-1]
         diffs = latar[1:]-latar[:-1]
-        equalareans, wts, bnds = regrid2._regrid.gridattr(len(latar), 'equalarea')
+        equalareans, wts, bnds = regrid2._regrid.gridattr(len(latar),'equalarea')
         equalareasn = equalareans[::-1]
 
         # Get the Gaussian lats for len+1, in case this is a boundary
-        dumlat, dumwt, bndsplusns = regrid2._regrid.gridattr(len(latar)+1, 'gaussian')
+        dumlat, dumwt, bndsplusns = regrid2._regrid.gridattr(len(latar)+1,'gaussian')
         bndsplussn = bndsplusns[::-1]
 
         # Look for N-S equality
         isoffset = 0
-        if numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatns), CLOSE_ENOUGH)):
+        if numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatns),CLOSE_ENOUGH)):
             actualType = 'gaussian'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatsn), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-gausslatsn),CLOSE_ENOUGH)):
             actualType = 'gaussian'
 
         # Check for zone (offset) variable
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplusns[1:-1]), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplusns[1:-1]),CLOSE_ENOUGH)):
             actualType = 'gaussian'
             isoffset = 1
             nlats = nlats+1
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplussn[1:-1]), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-bndsplussn[1:-1]),CLOSE_ENOUGH)):
             actualType = 'gaussian'
             isoffset = 1
             nlats = nlats+1
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(diffs-diffs[0]), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(diffs-diffs[0]),CLOSE_ENOUGH)):
             actualType = 'uniform'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareans), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareans),CLOSE_ENOUGH)):
             actualType = 'equalarea'
 
-        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareasn), CLOSE_ENOUGH)):
+        elif numpy.alltrue(numpy.less(numpy.absolute(latar[:]-equalareasn),CLOSE_ENOUGH)):
             actualType = 'equalarea'
 
         else:
             actualType = 'generic'
 
-        return (actualType, nlats, isoffset)
+        return (actualType,nlats,isoffset)
 
     # Generate a best guess at grid info within a family of grids (list of grids)
     # Return a tuple (type,coverage,nlats,isoffset, basegrid, latindex) where:
@@ -446,23 +437,21 @@ class AbstractRectGrid(AbstractGrid):
         coverage = 'global'
         basegrid = None
         latindex = None
-        if gridtype == 'generic':
+        if gridtype=='generic':
             # Look for truncated grids: such that grid is a subset of grid2
             found = 0
             for grid2 in gridlist:
-                if self.id == grid2.id:
-                    continue
+                if self.id==grid2.id: continue
                 lat = self.getLatitude()
                 lon = self.getLongitude()
                 lat2 = grid2.getLatitude()
                 lon2 = grid2.getLongitude()
-                if len(lat) > len(lat2) or len(lon) > len(lon2):
-                    continue
-                latIsSubset, latindex = isSubsetVector(lat[:], lat2[:], 1.e-2)
-                lonIsSubset, lonindex = isSubsetVector(lon[:], lon2[:], 1.e-2)
+                if len(lat)>len(lat2) or len(lon)>len(lon2): continue
+                latIsSubset, latindex = isSubsetVector(lat[:],lat2[:],1.e-2)
+                lonIsSubset, lonindex = isSubsetVector(lon[:],lon2[:],1.e-2)
                 if latIsSubset and lonIsSubset:
                     found = 1
-                    if len(lat2) > nlats:
+                    if len(lat2)>nlats:
                         coverage = 'regional'
                     nlats = len(lat2)
                     basegrid = grid2.id
@@ -474,13 +463,13 @@ class AbstractRectGrid(AbstractGrid):
     def genBounds(self):
         import regrid2._regrid
 
-        if hasattr(self, "parent") and self.parent is not None:
+        if hasattr(self,"parent") and self.parent is not None:
             gridfamily = self.parent.grids.values()
         else:
             gridfamily = []
 
         gridtype, coverage, nlats, isoffset, basegrid, latindex = self.classifyInFamily(gridfamily)
-        if _classifyGrids == 0:
+        if _classifyGrids==0:
             gridtypenew = self.getType()
             if gridtypenew in AbstractRectGrid.gridtypes:
                 gridtype = gridtypenew
@@ -488,33 +477,31 @@ class AbstractRectGrid(AbstractGrid):
         # Get latitude bounds
         lat = self.getLatitude()
         ascending = (lat[0] < lat[-1])
-        if gridtype == 'gaussian':
+        if gridtype=='gaussian':
             pts, wts, bnds = regrid2._regrid.gridattr(nlats, 'gaussian')
-            if ascending:
-                bnds = bnds[::-1]
-            latbnds = numpy.zeros((len(lat), 2), numpy.float)
-            latbnds[:, 0] = bnds[:-1]
-            latbnds[:, 1] = bnds[1:]
-            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[0,:]))
-            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[-1,:]))
-        elif gridtype == 'equalarea':
+            if ascending: bnds = bnds[::-1]
+            latbnds = numpy.zeros((len(lat),2),numpy.float)
+            latbnds[:,0] = bnds[:-1]
+            latbnds[:,1] = bnds[1:]
+            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[0,:]))
+            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[-1,:]))
+        elif gridtype=='equalarea':
             pts, wts, bnds = regrid2._regrid.gridattr(nlats, 'equalarea')
-            if ascending:
-                bnds = bnds[::-1]
-            latbnds = numpy.zeros((len(lat), 2), numpy.float)
-            latbnds[:, 0] = bnds[:-1]
-            latbnds[:, 1] = bnds[1:]
-            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[0,:]))
-            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0, latbnds[-1,:]))
+            if ascending: bnds = bnds[::-1]
+            latbnds = numpy.zeros((len(lat),2),numpy.float)
+            latbnds[:,0] = bnds[:-1]
+            latbnds[:,1] = bnds[1:]
+            latbnds[0,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[0,:]))
+            latbnds[-1,:] = numpy.maximum(-90.0, numpy.minimum(90.0,latbnds[-1,:]))
         else:
             latbnds = lat.genGenericBounds()
 
         # Get longitude bounds
         lon = self.getLongitude()
-        if len(lon) > 1:
+        if len(lon)>1:
             lonbnds = lon.genGenericBounds()
         else:
-            lonbnds = numpy.array([[lon[0]-180.0, lon[0]+180.0]], numpy.float)
+            lonbnds = numpy.array([[lon[0]-180.0, lon[0]+180.0]],numpy.float)
 
         return (latbnds, lonbnds)
 
@@ -524,27 +511,27 @@ class AbstractRectGrid(AbstractGrid):
     def getMesh(self):
         """Generate a mesh array for the meshfill graphics method."""
         if self._mesh_ is None:
-            LAT = 0
-            LON = 1
+            LAT=0
+            LON=1
             latbounds, lonbounds = self.getBounds()
             if latbounds is None or lonbounds is None:
-                raise CDMSError('No boundary data is available for grid %s'%self.id)
+                raise CDMSError, 'No boundary data is available for grid %s'%self.id
             ny = len(self._lataxis_)
             nx = len(self._lonaxis_)
             lenmesh = ny*nx
-            mesh = numpy.zeros((lenmesh, 2, 4), latbounds.dtype.char)
-            broadlat = numpy.repeat(latbounds[:, numpy.newaxis,:], nx, axis=1)
-            broadlat.shape = (lenmesh, 2)
-            broadlon = numpy.repeat(lonbounds[numpy.newaxis,:,:], ny, axis=0)
-            broadlon.shape = (lenmesh, 2)
-            mesh[:, LAT, 0] = broadlat[:, 0]
-            mesh[:, LAT, 1] = broadlat[:, 0]
-            mesh[:, LAT, 2] = broadlat[:, 1]
-            mesh[:, LAT, 3] = broadlat[:, 1]
-            mesh[:, LON, 0] = broadlon[:, 0]
-            mesh[:, LON, 1] = broadlon[:, 1]
-            mesh[:, LON, 2] = broadlon[:, 1]
-            mesh[:, LON, 3] = broadlon[:, 0]
+            mesh = numpy.zeros((lenmesh,2,4),latbounds.dtype.char)
+            broadlat = numpy.repeat(latbounds[:,numpy.newaxis,:],nx,axis=1)
+            broadlat.shape = (lenmesh,2)
+            broadlon = numpy.repeat(lonbounds[numpy.newaxis,:,:],ny,axis=0)
+            broadlon.shape=(lenmesh,2)
+            mesh[:,LAT,0] = broadlat[:,0]
+            mesh[:,LAT,1] = broadlat[:,0]
+            mesh[:,LAT,2] = broadlat[:,1]
+            mesh[:,LAT,3] = broadlat[:,1]
+            mesh[:,LON,0] = broadlon[:,0]
+            mesh[:,LON,1] = broadlon[:,1]
+            mesh[:,LON,2] = broadlon[:,1]
+            mesh[:,LON,3] = broadlon[:,0]
             self._mesh_ = mesh
         return self._mesh_
 
@@ -556,7 +543,7 @@ class AbstractRectGrid(AbstractGrid):
         if self._flataxes_ is None:
             alat = self.getLatitude()[:]
             alon = self.getLongitude()[:]
-            alatflat = numpy.repeat(alat[:, numpy.newaxis], len(alon), axis=1)
+            alatflat = numpy.repeat(alat[:,numpy.newaxis], len(alon), axis=1)
             alonflat = numpy.repeat(alon[numpy.newaxis,:], len(alat), axis=0)
             self._flataxes_ = (numpy.ravel(alatflat), numpy.ravel(alonflat))
         return self._flataxes_
@@ -578,8 +565,8 @@ class AbstractRectGrid(AbstractGrid):
         'gridid' is the string identifier of the resulting curvilinear grid object.
         """
 
-        from .coord import TransientVirtualAxis, TransientAxis2D
-        from .hgrid import TransientCurveGrid
+        from coord import TransientVirtualAxis, TransientAxis2D
+        from hgrid import TransientCurveGrid
 
         lat = self._lataxis_[:]
         lon = self._lonaxis_[:]
@@ -612,37 +599,37 @@ class AbstractRectGrid(AbstractGrid):
             ax, ay = lon, lat
             bx, by = blon, blat
             nx, ny = nlon, nlat
-
+            
         centerX = numpy.outer(numpy.ones(ny), ax)
         centerY = numpy.outer(ay, numpy.ones(nx))
 
         # Create corner latitudes (in yx order), ensuring counterclockwise direction
         cy = numpy.zeros((ny, 4), numpy.float)
-        if (by[0, 0] <= by[0, 1]):
+        if (by[0,0]<= by[0,1]):
             incr = 1
         else:
             incr = 0
-        cy[:, 0] = by[:, 1-incr]
-        cy[:, 1] = by[:, 1-incr]
-        cy[:, 2] = by[:, incr]
-        cy[:, 3] = by[:, incr]
-        cornerY = numpy.repeat(cy[:, numpy.newaxis,:], nx, axis=1)
-
+        cy[:,0] = by[:,1-incr]
+        cy[:,1] = by[:,1-incr]
+        cy[:,2] = by[:,incr]
+        cy[:,3] = by[:,incr]
+        cornerY = numpy.repeat(cy[:,numpy.newaxis,:], nx, axis=1)
+        
         # Create corner longitudes (in yx order), ensuring counterclockwise direction
         cx = numpy.zeros((nx, 4), numpy.float)
-        if (bx[0, 0] <= bx[0, 1]):
+        if (bx[0,0]<= bx[0,1]):
             incr = 1
         else:
             incr = 0
-        cx[:, 0] = bx[:, 1-incr]
-        cx[:, 1] = bx[:, incr]
-        cx[:, 2] = bx[:, incr]
-        cx[:, 3] = bx[:, 1-incr]
+        cx[:,0] = bx[:,1-incr]
+        cx[:,1] = bx[:,incr]
+        cx[:,2] = bx[:,incr]
+        cx[:,3] = bx[:,1-incr]
         cornerX = numpy.repeat(cx[numpy.newaxis,:,:], ny, axis=0)
 
-        iaxis = TransientVirtualAxis("i", ny) # First axis
-        jaxis = TransientVirtualAxis("j", nx) # Second axis
-
+        iaxis = TransientVirtualAxis("i",ny) # First axis
+        jaxis = TransientVirtualAxis("j",nx) # Second axis
+        
         centerLat = centerY
         centerLon = centerX
         cornerLat = cornerY
@@ -652,7 +639,7 @@ class AbstractRectGrid(AbstractGrid):
             centerLon = centerY
             cornerLat = cornerX
             cornerLon = cornerY
-
+            
 
         lataxis = TransientAxis2D(centerLat, axes=(iaxis, jaxis), bounds=cornerLat,
                                   attributes={'units':latunits}, id="latitude")
@@ -667,40 +654,39 @@ class AbstractRectGrid(AbstractGrid):
         gengrid = curvegrid.toGenericGrid(gridid=gridid)
         return gengrid
 
-    shape = property(_getshape, None)
-
-# PropertiedClasses.set_property (AbstractRectGrid, 'shape',
-# AbstractRectGrid._getshape,
-# nowrite=1,
-# nodelete=1)
+    shape = property(_getshape,None)
+   
+## PropertiedClasses.set_property (AbstractRectGrid, 'shape', 
+##                                 AbstractRectGrid._getshape, 
+##                                 nowrite=1,
+##                                 nodelete=1)
 
-# internattr.add_internal_attribute (AbstractRectGrid, 'id', 'parent')
+## internattr.add_internal_attribute (AbstractRectGrid, 'id', 'parent')
 
 class RectGrid(AbstractRectGrid):
 
     def __init__(self,parent,rectgridNode=None):
         if rectgridNode is not None and rectgridNode.tag != 'rectGrid':
-            raise CDMSError('Node is not a grid node')
-        AbstractRectGrid.__init__(self, rectgridNode)
+            raise CDMSError, 'Node is not a grid node'
+        AbstractRectGrid.__init__(self,rectgridNode)
         self.parent = parent
 
     # Set pointers to related structural elements: lon, lat axes, order, mask
     def initDomain(self, axisdict, vardict):
-        if self.latitude not in axisdict:
-            raise CDMSError('No such latitude: %s'%repr(self.latitude))
-        if self.longitude not in axisdict:
-            raise CDMSError('No such longitude: %s'%repr(self.longitude))
+        if not axisdict.has_key(self.latitude):
+            raise CDMSError, 'No such latitude: %s'%`self.latitude`
+        if not axisdict.has_key(self.longitude):
+            raise CDMSError, 'No such longitude: %s'%`self.longitude`
         self._lataxis_ = axisdict[self.latitude]
         self._lonaxis_ = axisdict[self.longitude]
         self._order_ = self.order
         self._gridtype_ = self.attributes.get('type')
-        if self._gridtype_ is None:
-            self._gridtype_ = "generic"
-        if hasattr(self, "mask"):
+        if self._gridtype_ is None: self._gridtype_ = "generic"
+        if hasattr(self,"mask"):
             self._maskVar_ = vardict.get(self.mask)
         else:
             self._maskVar_ = None
-
+    
     def getMask(self):
         if self._maskVar_ is None:
             # return numpy.ones(self.shape)
@@ -711,7 +697,7 @@ class RectGrid(AbstractRectGrid):
     def getMaskVar(self):
         return self._maskVar_
 
-# internattr.add_internal_attribute(RectGrid)
+## internattr.add_internal_attribute(RectGrid)
 
 class FileRectGrid(AbstractRectGrid):
 
@@ -721,8 +707,8 @@ class FileRectGrid(AbstractRectGrid):
         self.parent = parent
         self._lataxis_ = latobj
         self._lonaxis_ = lonobj
-        if not order in ["yx", "xy"]:
-            raise CDMSError('Grid order must be "yx" or "xy"')
+        if not order in ["yx","xy"]:
+            raise CDMSError, 'Grid order must be "yx" or "xy"'
         self._order_ = order
         self.setType(gridtype)
         self._maskVar_ = maskobj        # FileVariable of mask
@@ -746,25 +732,24 @@ class FileRectGrid(AbstractRectGrid):
     # Set the mask to array 'mask'. If persistent == 1, modify permanently
     # in the file, else set as a temporary mask.
     def setMask(self,mask,persistent=0):
-        if persistent != 0:
-            raise CDMSError(MethodNotImplemented)
+        if persistent!=0: raise CDMSError, MethodNotImplemented
         if mask is None:
             self._tempMask_ = None
         else:
-            assert isinstance(mask, numpy.ndarray), 'Mask must be a numpy array'
-            assert mask.shape == self.shape, 'Mask must have shape %s'%repr(self.shape)
+            assert type(mask)==numpy.ndarray, 'Mask must be a numpy array'
+            assert mask.shape==self.shape,'Mask must have shape %s'%`self.shape`
             self._tempMask_ = copy.copy(mask)
 
     def getMaskVar(self):
         return self._maskVar_
 
-# internattr.add_internal_attribute(FileRectGrid)
+## internattr.add_internal_attribute(FileRectGrid)
 
 # In-memory rectilinear grid
 class TransientRectGrid(AbstractRectGrid):
     "Grids that live in memory only."
     def __init__(self, latobj, lonobj, order, gridtype, maskarray=None):
-        AbstractRectGrid.__init__(self, None)
+        AbstractRectGrid.__init__(self,None)
         if latobj.__class__ != TransientAxis:
             latobj = TransientAxis(latobj[:], latobj.getBounds())
         if lonobj.__class__ != TransientAxis:
@@ -773,8 +758,8 @@ class TransientRectGrid(AbstractRectGrid):
         self._lataxis_.designateLatitude()
         self._lonaxis_ = lonobj
         self._lonaxis_.designateLongitude()
-        if not order in ["yx", "xy"]:
-            raise CDMSError('Grid order must be "yx" or "xy"')
+        if not order in ["yx","xy"]:
+            raise CDMSError, 'Grid order must be "yx" or "xy"'
         self._order_ = order
         self.setType(gridtype)
         self.setMask(maskarray)        # numpy mask array
@@ -790,17 +775,17 @@ class TransientRectGrid(AbstractRectGrid):
     # with persistent versions, is ignored.
     def setMask(self,mask, persistent=0):
         if mask is not None:
-            if not isinstance(mask, numpy.ndarray):
-                raise CDMSError('Mask must be a numpy array')
+            if type(mask)!=numpy.ndarray:
+               raise CDMSError, 'Mask must be a numpy array'
             if mask.shape != self.shape:
-                raise CDMSError('Mask must have shape %s'%repr(self.shape))
+               raise CDMSError, 'Mask must have shape %s'%`self.shape`
         self._maskArray_ = copy.copy(mask)
 
     def setBounds(self, latBounds, lonBounds):
         self._lataxis_.setBounds(latBounds)
         self._lonaxis_.setBounds(lonBounds)
 
-# internattr.add_internal_attribute(TransientRectGrid)
+## internattr.add_internal_attribute(TransientRectGrid)
 
 def isGrid(grid):
     """
@@ -816,8 +801,9 @@ def writeScripGrid(path, grid, gridTitle=None):
     grid is a CDMS grid object.
     gridTitle is a string ID for the grid.
     """
-
+    
     import Cdunif
-    f = Cdunif.CdunifFile(path, 'w')
+    f = Cdunif.CdunifFile(path,'w')
     grid.writeScrip(f, gridTitle)
     f.close()
+
diff --git a/Packages/cdms2/Lib/gsHost.py b/Packages/cdms2/Lib/gsHost.py
index 4933e7f49..343a3c842 100644
--- a/Packages/cdms2/Lib/gsHost.py
+++ b/Packages/cdms2/Lib/gsHost.py
@@ -5,7 +5,7 @@ A file-like object to access a host file, the single entry point
 to an entire gridspec data file layout.
 
 Dave Kindig and Alex Pletzer, Tech-X (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
@@ -24,8 +24,7 @@ except:
     # raise ImportError, 'Error: could not import pycf'
     print 'Error: could not import pycf'
 
-
-def open(hostfile, mode='r'):
+def open(hostfile, mode = 'r'):
     """
     Open host file
     @param hostfile host file
@@ -35,19 +34,17 @@ def open(hostfile, mode='r'):
     outHostFile = Host(hostfile, mode)
     return outHostFile
 
-
 class Host:
-
     """
     A LibCF/GRIDSPEC host file object. This acts as the single point of entry to
     a GRIDSPEC aggregation. Variables and grids can be requested solely through
-    the Host object, which is a hybrid between a variable and file object.
+    the Host object, which is a hybrid between a variable and file object. 
     Host relies on the libcf shared object. As such, if there is a problem
     consult http://www.unidata.ucar.edu/software/libcf/docs/libcf/ for details
     on building host files and all related GRIDSPEC files.
     """
 
-    def __init__(self, hostfile, mode='r'):
+    def __init__(self, hostfile, mode = 'r'):
         """
         Constructor
         @param hostfile path to the host
@@ -57,41 +54,42 @@ class Host:
         self.__initialize()
         self.uri = hostfile
         self.mode = mode
-
+        
         # Data dir based on location of hostfile
         if mode != 'r':
-            raise CDMSError('Only read mode is supported for host file')
+            raise CDMSError, 'Only read mode is supported for host file'
 
         for sosuffix in '.so', '.dylib', '.dll', '.a':
             self.libcfdll = CDLL(LIBCF + sosuffix)
             if self.libcfdll:
                 break
 
-        if self.libcfdll is None:
-            raise CDMSError('libcf not installed or incorrect path\n  ')
+        if self.libcfdll == None: 
+            raise CDMSError, 'libcf not installed or incorrect path\n  '
 
         libcfdll = self.libcfdll
 
         status = libcfdll.nccf_def_host_from_file(hostfile,
-                                                  byref(self.hostId_ct))
+                                               byref(self.hostId_ct))
         if status != 0:
-            raise CDMSError("ERROR: not a valid host file %s (status=%d)" %
-                            (hostfile, status))
+            raise CDMSError, \
+                "ERROR: not a valid host file %s (status=%d)" % \
+                (hostfile, status)
 
         # Attach global attrs
-        libcfdll.nccf_def_global_from_file(hostfile,
-                                           byref(self.globalId_ct))
+        libcfdll.nccf_def_global_from_file( hostfile, \
+                                            byref(self.globalId_ct))
 
         # get the global attributes from the file
         natts = c_int(-1)
-        attName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
-        attValu_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
-        self.libcfdll.nccf_inq_global_natts(self.globalId_ct, byref(natts))
+        attName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
+        attValu_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
+        self.libcfdll.nccf_inq_global_natts( self.globalId_ct, byref(natts))
         for i in range(natts.value):
-            self.libcfdll.nccf_inq_global_attval(self.globalId_ct,
-                                                 i, attName_ct,
-                                                 attValu_ct)
-            if attName_ct.value not in self.attributes:
+            self.libcfdll.nccf_inq_global_attval(self.globalId_ct, \
+                                                     i, attName_ct, \
+                                                     attValu_ct)
+            if not self.attributes.has_key( attName_ct.value ):
                 self.attributes[attName_ct.value] = attValu_ct.value
 
         self.id = hostfile
@@ -99,28 +97,28 @@ class Host:
         i_ct = c_int()
         status = libcfdll.nccf_inq_host_ngrids(self.hostId_ct, byref(i_ct))
         self.nGrids = i_ct.value
-        status = libcfdll.nccf_inq_host_nstatdatafiles(self.hostId_ct,
-                                                       byref(i_ct))
+        status = libcfdll.nccf_inq_host_nstatdatafiles(self.hostId_ct, \
+                                                           byref(i_ct))
         self.nStatDataFiles = i_ct.value
-        status = libcfdll.nccf_inq_host_ntimedatafiles(self.hostId_ct,
-                                                       byref(i_ct))
+        status = libcfdll.nccf_inq_host_ntimedatafiles(self.hostId_ct, \
+                                                           byref(i_ct))
 
         self.nTimeDataFiles = i_ct.value
-        status = libcfdll.nccf_inq_host_ntimeslices(self.hostId_ct,
-                                                    byref(i_ct))
+        status = libcfdll.nccf_inq_host_ntimeslices(self.hostId_ct, \
+                                                        byref(i_ct))
         self.nTimeSliceFiles = i_ct.value
 
-        fName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
-        gName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
+        fName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
+        gName_ct = c_char_p(" " * (libCFConfig.NC_MAX_NAME+1))
 
-        self.dimensions = {"nGrids": self.nGrids,
+        self.dimensions = {"nGrids": self.nGrids, 
                            "nStatDataFiles": self.nStatDataFiles,
                            "nTimeDataFiles": self.nTimeDataFiles,
-                           "nTimeSliceFiles": self.nTimeSliceFiles}
+                           "nTimeSliceFiles":self.nTimeSliceFiles }
 
         # Mosaic filename (use getMosaic to return the connectivity)
         mosaicFilename = c_char_p(" " * (libCFConfig.NC_MAX_NAME + 1))
-        status = libcfdll.nccf_inq_host_mosaicfilename(self.hostId_ct,
+        status = libcfdll.nccf_inq_host_mosaicfilename(self.hostId_ct, 
                                                        mosaicFilename)
         self.mosaicFilename = mosaicFilename.value
 
@@ -133,9 +131,9 @@ class Host:
         # static data
         for vfindx in range(self.nStatDataFiles):
             for gfindx in range(self.nGrids):
-                status = libcfdll.nccf_inq_host_statfilename(self.hostId_ct,
-                                                             vfindx, gfindx,
-                                                             fName_ct)
+                status = libcfdll.nccf_inq_host_statfilename(self.hostId_ct, 
+                                                          vfindx, gfindx, 
+                                                          fName_ct)
                 statFilenames.append(fName_ct.value)
                 f = cdms2.open(fName_ct.value, 'r')
                 varNames = f.listvariable()
@@ -144,12 +142,12 @@ class Host:
                     # Add coordinate names a local list of coordinates
                     if 'coordinates' in dir(f[vn]):
                         for coord in f[vn].coordinates.split():
-                            if not coord in coordinates:
+                            if not coord in coordinates: 
                                 coordinates.append(coord)
-                    if vn not in self.statVars:
+                    if not self.statVars.has_key(vn):
                         # allocate
-                        self.statVars[vn] = ["" for ig in
-                                             range(self.nGrids)]
+                        self.statVars[vn] = ["" for ig in \
+                                                 range(self.nGrids)] 
 
                     # set file name
                     self.statVars[vn][gfindx] = fName_ct.value
@@ -160,11 +158,11 @@ class Host:
             for tfindx in range(self.nTimeSliceFiles):
                 for gfindx in range(self.nGrids):
                     status = \
-                        libcfdll.nccf_inq_host_timefilename(self.hostId_ct,
-                                                            vfindx,
-                                                            tfindx,
-                                                            gfindx,
-                                                            fName_ct)
+                        libcfdll.nccf_inq_host_timefilename(self.hostId_ct, 
+                                                            vfindx, \
+                                                                tfindx, \
+                                                                gfindx, \
+                                                              fName_ct)
                     timeFilenames.append(fName_ct.value)
                     f = cdms2.open(fName_ct.value, 'r')
                     varNames = f.listvariable()
@@ -172,13 +170,13 @@ class Host:
                         # Add coordinate names a local list of coordinates
                         if 'coordinates' in dir(f[vn]):
                             for coord in f[vn].coordinates.split():
-                                if not coord in coordinates:
+                                if not coord in coordinates: 
                                     coordinates.append(coord)
-                        if vn not in self.timeVars:
+                        if not self.timeVars.has_key(vn):
                             # allocate
                             self.timeVars[vn] = \
-                                [["" for it in range(self.nTimeSliceFiles)]
-                                 for ig in range(self.nGrids)]
+                                [["" for it in range(self.nTimeSliceFiles)] \
+                                     for ig in range(self.nGrids)]
                         # set file name
                         self.timeVars[vn][gfindx][tfindx] = fName_ct.value
                     f.close()
@@ -186,12 +184,12 @@ class Host:
         # Grid names and data. Must come after time and static file dictionaries
         # because they define the coordinates.
         for gfindx in range(self.nGrids):
-            status = libcfdll.nccf_inq_host_gridfilename(self.hostId_ct,
-                                                         gfindx,
-                                                         fName_ct)
-            status = libcfdll.nccf_inq_host_gridname(self.hostId_ct,
-                                                     gfindx,
-                                                     gName_ct)
+            status = libcfdll.nccf_inq_host_gridfilename(self.hostId_ct, 
+                                                      gfindx, 
+                                                      fName_ct)
+            status = libcfdll.nccf_inq_host_gridname(self.hostId_ct, 
+                                                      gfindx, 
+                                                      gName_ct)
 
             varNames = cdms2.open(fName_ct.value, 'r').listvariable()
             for vn in varNames:
@@ -206,22 +204,23 @@ class Host:
         # Populate the variables dictionary, avoid the grids
         self.variables = {}
         for item in self.statVars.keys():
-            self.variables[item] = StaticFileVariable(self, item)
+            self.variables[item] = StaticFileVariable(self, item) 
         for item in self.timeVars.keys():
             self.variables[item] = TimeFileVariable(self, item)
 
+
     def __initialize(self):
         """
-        private method to inititialze the hostObj and for use in reseting
+        private method to inititialze the hostObj and for use in reseting 
         the hostObj on close
         """
 
-        self.mode = ''
+        self.mode     = ''
         self.libcfdll = None
-        self.uri = ''
-        self.id = ''
+        self.uri      = ''
+        self.id       = ''
         self._status_ = ''
-
+        
         # ctypes variables
         self.hostId_ct = c_int(-1)
         self.globalId_ct = c_int(-1)
@@ -229,10 +228,10 @@ class Host:
         # number of grid files
         self.nGrids = 0
 
-        # number of static var files
+        # number of static var files 
         self.nStatDataFiles = 0
 
-        # number of time dependent var files
+        # number of time dependent var files 
         self.nTimeDataFiles = 0
 
         # number of time files
@@ -252,14 +251,14 @@ class Host:
         self.statVars = {}
 
         # global attributes
-        self.attributes = {}
+        self.attributes = {}   
 
     def getMosaic(self):
         """
         Get the mosaic filename
         @return mfn Mosaic filename
         """
-        from .gsMosaic import Mosaic
+        from gsMosaic import Mosaic
         mfn = Mosaic(self.mosaicFilename, "r")
 
         return mfn
@@ -283,10 +282,10 @@ class Host:
         """
         return self.gridName.values()
 
-    def getStatFilenames(self, varName=None):
+    def getStatFilenames(self, varName = None):
         """
         Return a list of static variable filenames
-        @param varName variable name (or None if all the static file names are to
+        @param varName variable name (or None if all the static file names are to 
                        be returned)
         @return list the file names corresponding to varName
         """
@@ -295,7 +294,7 @@ class Host:
         # return all the static var filenames
         return self.statVars.values()
 
-    def getTimeFilenames(self, varName=None):
+    def getTimeFilenames(self, varName = None):
         """
         Return a list of time dependent variable filenames
         @param varName variable name. None for all variables
@@ -312,7 +311,7 @@ class Host:
         @return list of coordinate names
         """
         return self.gridVars.keys()
-
+    
     def getNumGrids(self):
         """
         Get number of grids (tiles)
@@ -323,7 +322,7 @@ class Host:
 
     def getNumStatDataFiles(self):
         """
-        Get number of static data files
+        Get number of static data files 
         @return number static files
         """
         return self.nStatDataFiles
@@ -335,7 +334,7 @@ class Host:
         """
         return self.nTimeDataFiles
 
-    def listvariable(self, gstype=None):
+    def listvariable(self, gstype = None):
         """
         @param type Grid, Static, Time Dependent or None
         @return list of all variables, including static and time dependent, Default = None
@@ -357,9 +356,9 @@ class Host:
         # Raise error
         else:
             text = 'type must be "Static", "Time", None or empty'
-            raise CDMSError(text)
+            raise CDMSError, text
 
-    def listvariables(self, type=None):
+    def listvariables(self, type = None):
         """
         Synonymous to listvariable
         @param type Grid, Static, Time Dependent or None
@@ -374,9 +373,9 @@ class Host:
         @return attributes list
         """
         fName = ""
-        if varName in self.statVars:
+        if self.statVars.has_key(varName):
             fName = self.statVars[varName][0]
-        elif varName in self.timeVars:
+        elif self.timeVars.has_key(varName):
             fName = self.timeVars[varName][0][0]
         if fName:
             var = cdms2.open(fName, 'r')(varName)
@@ -398,12 +397,12 @@ class Host:
         @return [nGrids, (n0, n1, ...)]
         """
         return self.dimensions.keys()
-
+        
     def listglobal(self):
         """
         List global attributes of host file
         @return a list of the global attributes in the file
-        """
+        """ 
         return self.attributes.keys()
 
     def getglobal(self, attName):
@@ -411,10 +410,10 @@ class Host:
         Get the value of the global attribute
         @param [attName] - global attribute name
         @return attribute value
-        """
+        """        
         return self.attributes[attName]
 
-    def listall(self, varName=None, all=None):
+    def listall(self, varName = None, all = None):
         """
         Get info about data from the file.
         @param varName variable name
@@ -422,12 +421,11 @@ class Host:
         @return information about file.
         """
 
-        if varName is None:
-            return None
+        if varName is None: return None 
         var = self.getVariable(varName)
-        return var.listall(all=all)
+        return var.listall(all = all)
 
-    def showall(self, varName=None, all=None, device=None):
+    def showall(self, varName = None, all = None, device = None):
         """
         Get info about data from the file.
         @param varName variable name
@@ -435,12 +433,9 @@ class Host:
         @param device output device
         @return information about file.
         """
-        import sys
-        import string
-        if device is None:
-            device = sys.stdout
-        if varName is None:
-            return None
+        import sys, string
+        if device is None: device=sys.stdout
+        if varName is None: return None 
         var = self.getVariable(varName)
         alist = var.listall(all=all)
         device.write(string.join(alist, "\n"))
@@ -453,22 +448,22 @@ class Host:
         self.__initialize()
         self._status_ = 'closed'
 
-    def __repr__(self):
+    def __repr__(self): 
         """
         Python repr()
         @return res Print statement
         """
         res = "< '%s',  URI: '%s', MODE: '%s', STATUS: '%s',\n libcf: %s >" % \
-            (self.__class__, self.uri, self.mode,
+            ( self.__class__, self.uri, self.mode, 
               self._status_, self.libcfdll)
-        return res
+        return res 
 
     def __del__(self):
         """
         Free the host file from memory
         """
-        if self.hostId_ct.value >= 0:
-            self.libcfdll.nccf_free_host(self.hostId_ct)
+        if self.hostId_ct.value >= 0: 
+            self.libcfdll.nccf_free_host( self.hostId_ct )
         self.hostId_ct.value = -1
 
 # NOTE: There is no __call__ method for host files.
@@ -480,12 +475,12 @@ class Host:
         @return list of cdms2 file variables, one for each grid
         """
         # Static variables
-        if varName in self.statVars:
+        if self.statVars.has_key(varName):
             staticFV = StaticFileVariable(self, varName)
             return staticFV
 
         # Time variables
-        elif varName in self.timeVars:
+        elif self.timeVars.has_key(varName):
             timeVariables = TimeFileVariable(self, varName)
             return timeVariables
 
@@ -513,9 +508,8 @@ class Host:
         @return value
         """
         return self.attributes[name]
-
-#
-
+    
+##############################################################################
 
 def test():
     import sys
@@ -526,20 +520,20 @@ def test():
     from optparse import OptionParser
     parser = OptionParser()
     parser.add_option("-f", "--file", dest="hostFilename",
-                      help="host file name")
+                  help="host file name")
 
     options, args = parser.parse_args()
     if not options.hostFilename:
-        print """need to provide a host file, use -h
+        print """need to provide a host file, use -h 
 to get a full list of options"""
         sys.exit(1)
 
     print 'open file..., create grdspec file object...'
     gf = cdms2.open(options.hostFilename)
-    if gf._status_ == 'closed':
+    if gf._status_ == 'closed': 
         print "File not opened"
         sys.exit(1)
-    print
+    print 
     print "type=", type(gf)
     print 'listvariable...'
     print gf.listvariable()
@@ -558,15 +552,15 @@ to get a full list of options"""
     print 'acess time dependent data...', "V" in gf.listvariables()
     print gf['V'][0].size
 
+
     # Test the mosaic
     print 'getMosaic...', 'getMosaic' in dir(gf)
     mosaic = gf.getMosaic()
-    for c in mosaic.coordinate_names:
+    for c in mosaic.coordinate_names: 
         print c
-    for t in mosaic.tile_contacts:
+    for t in mosaic.tile_contacts: 
         print "%s -> %s" % (t, mosaic.tile_contacts[t])
 
-#
+##############################################################################
 
-if __name__ == "__main__":
-    test()
+if __name__ == "__main__": test()
diff --git a/Packages/cdms2/Lib/gsMosaic.py b/Packages/cdms2/Lib/gsMosaic.py
index 983030222..2592ada8c 100644
--- a/Packages/cdms2/Lib/gsMosaic.py
+++ b/Packages/cdms2/Lib/gsMosaic.py
@@ -3,7 +3,7 @@
 """
 A file-like object to access mosaic.
 Dave Kindig and Alex Pletzer, Tech-X (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
@@ -11,7 +11,7 @@ No guarantee is provided whatsoever. Use at your own risk.
 from re import search, sub
 from ctypes import c_char_p, c_int, CDLL, byref
 
-# numpy
+# numpy 
 from numpy import zeros, reshape
 
 # CDAT
@@ -24,13 +24,12 @@ from cdms2.error import CDMSError
 try:
     from pycf import libCFConfig, __path__
 except:
-    raise ImportError('Error: could not import pycf')
+    raise ImportError, 'Error: could not import pycf'
 
-LIBCFDIR = __path__[0] + "/pylibcf"
-libCF = libCFConfig
+LIBCFDIR  = __path__[0] + "/pylibcf"
+libCF  = libCFConfig
 
-
-def open(uri, mode='r'):
+def open(uri, mode = 'r'):
     """
     Open mosaic file
     @param mosaicfile mosaic file
@@ -41,7 +40,6 @@ def open(uri, mode='r'):
     outMosaicFile = Mosaic(uri, mode)
     return outMosaicFile
 
-
 def getSlab(strg):
     """
     From a string return a tuple of slice objects
@@ -60,29 +58,26 @@ def getSlab(strg):
             step = 1
             startIndex = int(m.group(1))
             endIndex = int(m.group(2))
-            if endIndex < startIndex:
-                step = -1
+            if endIndex < startIndex: step = -1
             slc = slice(startIndex, endIndex, step)
             res.append(slc)
     return tuple(res)
 
-
 class Mosaic:
-
     """
     Define a mosaic.
     """
 
-    def __init__(self, uri, mode='r'):
+    def __init__(self, uri, mode = 'r'):
         """
         Constructor
         @param uri Filename with path
         @param mode read/write. Currently only read is supported
         """
 
-        self.id = uri
-        self.mode = mode
-        self.uri = uri
+        self.id      = uri
+        self.mode    = mode
+        self.uri     = uri
         self._status = 'Open'
 
         self.mosaicId_ct = c_int(-1)
@@ -94,44 +89,41 @@ class Mosaic:
 
         libcfdll = self.lib
 
-        self.file_type = ""
-        self.contact_map = {}
-        self.tile_contacts = {}
+        self.file_type           = ""
+        self.contact_map         = {}
+        self.tile_contacts       = {}
         self.tile_contacts_compl = {}
-        self.coordinate_names = []
-        self.tile_names = []
+        self.coordinate_names    = []
+        self.tile_names          = []
 
-        status = libcfdll.nccf_def_mosaic_from_file(uri, "",
+        status = libcfdll.nccf_def_mosaic_from_file(uri, "", 
                                                     byref(self.mosaicId_ct))
 
         if status != 0:
-            raise CDMSError("ERROR: %s is not a valid mosaic file (status = %d)" %
-                            (uri, status))
+            raise CDMSError, "ERROR: %s is not a valid mosaic file (status = %d)" % \
+                (uri, status)
 
         # Get some sizes
-        nGrids = c_int(-1)
-        ndims = c_int(-1)
-        ncontacts = c_int(-1)
+        nGrids         = c_int(-1)
+        ndims          = c_int(-1)
+        ncontacts      = c_int(-1)
         libcfdll.nccf_inq_mosaic_ndims(self.mosaicId_ct, byref(ndims))
         libcfdll.nccf_inq_mosaic_ngrids(self.mosaicId_ct, byref(nGrids))
         libcfdll.nccf_inq_mosaic_ncontacts(self.mosaicId_ct, byref(ncontacts))
 
         # Build the character arrays
         separator_ct = libCF.CF_TILE_SEPARATOR
-        contact_map_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
-        tile_contact_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
-        tile_name_ct = c_char_p(" " * (libCF.NC_MAX_NAME + 1))
+        contact_map_ct  = c_char_p(" " * (libCF.NC_MAX_NAME+1))
+        tile_contact_ct = c_char_p(" " * (libCF.NC_MAX_NAME+1))
+        tile_name_ct    = c_char_p(" " * (libCF.NC_MAX_NAME+1))
         coord_ct = (c_char_p * ndims.value)()
 
         for iDim in range(ndims.value):
-            coord_ct[iDim] = " " * (libCF.NC_MAX_NAME + 1)
+            coord_ct[iDim] = " " * (libCF.NC_MAX_NAME+1)
 
         # Get the grid names
         for igrid in range(nGrids.value):
-            libcfdll.nccf_inq_mosaic_gridname(
-                self.mosaicId_ct,
-                igrid,
-                tile_name_ct)
+            libcfdll.nccf_inq_mosaic_gridname(self.mosaicId_ct, igrid, tile_name_ct)
             tname = str(tile_name_ct)
             self.tile_names.append(tname)
 
@@ -143,24 +135,24 @@ class Mosaic:
 
         # Get the contact map information
         for iContact in range(ncontacts.value):
-            status = libcfdll.nccf_inq_mosaic_contactmap(self.mosaicId_ct,
-                                                         iContact, contact_map_ct)
-            status = libcfdll.nccf_inq_mosaic_tilecontact(self.mosaicId_ct,
-                                                          iContact, tile_contact_ct)
+            status = libcfdll.nccf_inq_mosaic_contactmap(self.mosaicId_ct, \
+                                                       iContact, contact_map_ct)
+            status = libcfdll.nccf_inq_mosaic_tilecontact(self.mosaicId_ct, \
+                                                        iContact, tile_contact_ct)
 
-            tN1, tN2 = tile_contact_ct.value.split(separator_ct)
+            tN1, tN2             = tile_contact_ct.value.split(separator_ct)
             tileName1, tileName2 = tN1.strip(), tN2.strip()
-            s1, s2 = contact_map_ct.value.split(separator_ct)
+            s1, s2               = contact_map_ct.value.split(separator_ct)
 
             # slice objects
             slab1 = getSlab(s1.strip())
             slab2 = getSlab(s2.strip())
 
             # Create the tile contact dictionary. Non symmetric.
-            if tileName1 not in self.tile_contacts:
+            if not self.tile_contacts.has_key(tileName1):
                 self.tile_contacts[tileName1] = {}
             # The complement to tile_contacts
-            if tileName2 not in self.tile_contacts_compl:
+            if not self.tile_contacts_compl.has_key(tileName2):
                 self.tile_contacts_compl[tileName2] = {}
 
             # Attach the contact map (slab) between the tiles
@@ -192,13 +184,13 @@ class Mosaic:
             for sl in slab:
                 b = sl.start
                 e = sl.stop + 1
-                newsl = slice(max(b - 1, 0), max(e - 1, -1), sl.step)
+                newsl = slice(max(b-1, 0), max(e-1, -1), sl.step)
                 newslab.append(newsl)
             newslabs.append(tuple(newslab))
         slab1, slab2 = newslabs
-
+    
         return (slab1, slab2)
-
+    
     def getSeamGrids(self, coordData):
         """
         Retrieve the seem grids between two cell centered tiles
@@ -211,33 +203,32 @@ class Mosaic:
                 # Get the seam data
                 result.append(self.getSeamData(tn1, tn2, coordData))
 
-                # Get the triangle data. Need to find the three cells
+                # Get the triangle data. Need to find the three cells 
                 # comprising a corner.
                 if tn2 in self.tile_contacts.keys():
-                    t1n = self.tile_contacts[tn1].keys()
-                    t2n = self.tile_contacts[tn2].keys()
+                    t1n = self.tile_contacts[tn1].keys() 
+                    t2n = self.tile_contacts[tn2].keys() 
 
                     # Look for a tile in the main list. Now compare the adjacent
                     # tiles to 1 and 2 until there is match. Now we have tile 3
                     for tn3 in t1n:
                         if tn3 in t1n and tn3 in t2n:
                             cornerIndex = self.getCornerData(tn1, tn2, tn3)
-
                             def getCornerInfo(data, cornerindex):
                                 lon = data.getLongitude()
                                 lat = data.getLatitude()
                                 c1 = data[cornerindex]
                                 n1 = lon[cornerindex]
                                 t1 = lat[cornerindex]
-                                lonatts = {'units': lon.units,
-                                           'standard_name': lon.standard_name}
-                                latatts = {'units': lat.units,
-                                           'standard_name': lat.standard_name}
+                                lonatts = {'units':lon.units, 
+                                           'standard_name':lon.standard_name}
+                                latatts = {'units':lat.units, 
+                                           'standard_name':lat.standard_name}
 
                                 return c1, n1, t1, lonatts, latatts
 
                             def popCorner(d1, d2, d3, dtype):
-                                corner = zeros((2, 2), dtype=dtype)
+                                corner = zeros((2, 2), dtype = dtype)
                                 if 'data' in dir(d1):
                                     corner[0, 0] = d1.data
                                     corner[0, 1] = d2.data
@@ -251,9 +242,7 @@ class Mosaic:
                                 return corner
 
                             c1, n1, t1, lonAtts, latAtts = \
-                                getCornerInfo(
-                                    coordData[tn1],
-                                            cornerIndex[0])
+                                        getCornerInfo(coordData[tn1], cornerIndex[0])
                             c2, n2, t2, lonAtts, latAtts = \
                                 getCornerInfo(coordData[tn2], cornerIndex[1])
                             c3, n3, t3, lonAtts, latAtts = \
@@ -264,27 +253,21 @@ class Mosaic:
                             lon_dtype = coordData[tn1].getLongitude().dtype
                             lat_dtype = coordData[tn1].getLatitude().dtype
                             corner = popCorner(c1, c2, c3, dtype)
-                            lon = popCorner(n1, n2, n3, lon_dtype)
-                            lat = popCorner(t1, t2, t3, lat_dtype)
-                            gridid = 'corner_%d_%d_%d' % (coordData[tn1].gridIndex,
-                                                          coordData[
-                                tn2].gridIndex,
-                                coordData[tn3].gridIndex)
-                            gridAtts = {
-                                'lon': lonAtts,
-                                'lat': latAtts,
-                                'gridid': gridid}
-                            cornerGrid = self.createSeamGrid(
-                                lon, lat, gridAtts)
-
-                            cornerTV = cdms2.createVariable(corner,
-                                                            axes=cornerGrid.getAxisList(
-                                                            ),
-                                                            grid=cornerGrid,
-                                                            attributes=coordData[
-                                                            tn1].attributes,
-                                                            id=gridid)
-
+                            lon    = popCorner(n1, n2, n3, lon_dtype)
+                            lat    = popCorner(t1, t2, t3, lat_dtype)
+                            gridid = 'corner_%d_%d_%d' % (coordData[tn1].gridIndex, \
+                                                           coordData[tn2].gridIndex, 
+                                                           coordData[tn3].gridIndex)
+                            gridAtts = {'lon':lonAtts, 'lat':latAtts, 'gridid':gridid}
+                            cornerGrid = self.createSeamGrid(lon, lat, gridAtts)
+
+                            cornerTV = cdms2.createVariable(corner, 
+                                             axes = cornerGrid.getAxisList(), 
+                                             grid = cornerGrid, 
+                                             attributes = coordData[tn1].attributes, 
+                                             id = gridid)
+                    
+                                
         return (result, cornerTV)
 
     def getCornerData(self, tileName1, tileName2, tileName3):
@@ -293,9 +276,9 @@ class Mosaic:
         @tileName1 Tile name of first grid (tile)
         @tileName2 Tile name of second grid (tile)
         @tileName3 Tile name of third grid (tile)
-        @return tuple of data marking the corners of the corner grid
+        @return tuple of data marking the corners of the corner grid        
         """
-
+        
         # Get the slabs and account for cell centers
         s1, s2 = self.tile_contacts[tileName1][tileName2]
         s3, s4 = self.tile_contacts[tileName1][tileName3]
@@ -310,18 +293,12 @@ class Mosaic:
         c5, c6 = self.getContactCornerIndex(s5, s6)
 
         # Set the tuple containing the corner indices in j, i order.
-        if c1 == 0 and c3 == 1:
-            pair1 = (s1[c1].start, s3[c3].start)
-        if c1 == 1 and c3 == 0:
-            pair1 = (s3[c3].start, s1[c1].start)
-        if c2 == 0 and c5 == 1:
-            pair2 = (s2[c2].start, s5[c5].start)
-        if c2 == 1 and c5 == 0:
-            pair2 = (s5[c5].start, s2[c2].start)
-        if c4 == 0 and c6 == 1:
-            pair3 = (s4[c4].start, s6[c6].start)
-        if c4 == 1 and c6 == 0:
-            pair3 = (s6[c6].start, s4[c4].start)
+        if c1 == 0 and c3 == 1: pair1 = (s1[c1].start, s3[c3].start)
+        if c1 == 1 and c3 == 0: pair1 = (s3[c3].start, s1[c1].start)
+        if c2 == 0 and c5 == 1: pair2 = (s2[c2].start, s5[c5].start)
+        if c2 == 1 and c5 == 0: pair2 = (s5[c5].start, s2[c2].start)
+        if c4 == 0 and c6 == 1: pair3 = (s4[c4].start, s6[c6].start)
+        if c4 == 1 and c6 == 0: pair3 = (s6[c6].start, s4[c4].start)
 
         return (pair1, pair2, pair3)
 
@@ -349,7 +326,7 @@ class Mosaic:
         @return attrs Attributes for eash plus the gridid
         """
         pass
-
+        
     def createSeamGrid(self, x, y, attrs):
         """
         Return the coordinate data associated with variable.
@@ -360,37 +337,34 @@ class Mosaic:
         LONSTR = 'lon'
         LATSTR = 'lat'
 
+
         # Get the dimensions
         xdim = x.shape
         ydim = y.shape
 
-        if xdim != ydim:
-            raise CDMSError("Dimension of coordinates grids don't match")
+        if xdim != ydim: 
+            raise CDMSError, "Dimension of coordinates grids don't match"
 
         nj = xdim[0]
         ni = xdim[1]
 
         # Define the axes, verifying the lon and lat grids
-        jaxis = TransientVirtualAxis("j", nj)
-        iaxis = TransientVirtualAxis("i", ni)
-
-        if search(LONSTR, attrs['lon']['standard_name']):
-            lon = x
-        if search(LONSTR, attrs['lat']['standard_name']):
-            lon = y
-        if search(LATSTR, attrs['lon']['standard_name']):
-            lat = x
-        if search(LATSTR, attrs['lat']['standard_name']):
-            lat = y
-
-        lataxis = TransientAxis2D(lat,
-                                  axes=(jaxis, iaxis),
-                                  attributes=attrs['lat'],
-                                  id=attrs['lat']['standard_name'])
-        lonaxis = TransientAxis2D(lon,
-                                  axes=(jaxis, iaxis),
-                                  attributes=attrs['lon'],
-                                  id=attrs['lon']['standard_name'])
+        jaxis = TransientVirtualAxis("j",nj)
+        iaxis = TransientVirtualAxis("i",ni)
+
+        if search(LONSTR, attrs['lon']['standard_name']): lon = x
+        if search(LONSTR, attrs['lat']['standard_name']): lon = y
+        if search(LATSTR, attrs['lon']['standard_name']): lat = x
+        if search(LATSTR, attrs['lat']['standard_name']): lat = y
+
+        lataxis = TransientAxis2D(lat, 
+                       axes=(jaxis, iaxis), 
+                       attributes=attrs['lat'], 
+                       id=attrs['lat']['standard_name'])
+        lonaxis = TransientAxis2D(lon, 
+                       axes=(jaxis, iaxis), 
+                       attributes=attrs['lon'], 
+                       id=attrs['lon']['standard_name'])
 
         # Define the combined grid
         grid = TransientCurveGrid(lataxis, lonaxis, id=attrs['gridid'])
@@ -406,7 +380,7 @@ class Mosaic:
         """
 
         slab1, slab2 = self.tile_contacts[tileName][otherTileName]
-
+    
         # Convert to cell centered slabs
         slab1, slab2 = self.getCellCenteredSlab(slab1, slab2)
         d1 = inputData[tileName]
@@ -423,12 +397,11 @@ class Mosaic:
 
         data1, lon1, lat1 = createNewVar(inputData[tileName], slab1)
         data2, lon2, lat2 = createNewVar(inputData[otherTileName], slab2)
-
+        
         # Remove dimensions of size 1.
         shape = []
         for d in data1.shape:
-            if d != 1:
-                shape.append(d)
+            if d != 1: shape.append(d)
 
         newshape = tuple(shape + [2])
         shape = tuple(shape)
@@ -442,22 +415,21 @@ class Mosaic:
         newLat[:, 0] = reshape(lat1[:], shape)
         newLat[:, 1] = reshape(lat2, shape)
         gridid = 'seam_tile%d_tile%d' % (data1.gridIndex, data2.gridIndex)
-        gridAtts = {
-            'lon': {'units': l1.units, 'standard_name': l1.standard_name},
-                    'lat':
-                        {'units': t1.units, 'standard_name': t1.standard_name},
-                    'gridid': gridid}
+        gridAtts = {'lon':{'units':l1.units, 'standard_name':l1.standard_name}, \
+                    'lat':{'units':t1.units, 'standard_name':t1.standard_name}, \
+                    'gridid':gridid}
         seamGrid = self.createSeamGrid(newLon, newLat, gridAtts)
 
         dataAtts = {'gridid': gridid}
-        newData = cdms2.createVariable(newVar,
-                                       axes=seamGrid.getAxisList(),
-                                       grid=seamGrid,
-                                       attributes=d1.attributes,
-                                       id=dataAtts['gridid'])
+        newData = cdms2.createVariable(newVar, 
+                         axes = seamGrid.getAxisList(), 
+                         grid = seamGrid, 
+                         attributes = d1.attributes, 
+                         id = dataAtts['gridid'])
 
         return newData
 
+    
     def getCoordinateNames(self):
         """
         Get the coordinate names for a mosaic
@@ -467,7 +439,7 @@ class Mosaic:
 
     def __repr__(self):
         res = "<Mosaic: '%s',  URI: '%s', mode: '%s', status: '%s' >" % \
-            (self.id, self.uri, self.mode, self._status)
+            ( self.id, self.uri, self.mode, self._status)
         return res
 
     def __call__(self):
@@ -476,8 +448,7 @@ class Mosaic:
     def __del__(self):
         self.lib.nccf_free_mosaic(self.mosaicId_ct)
 
-#
-
+#############################################################################
 
 def test():
     import os.path
@@ -489,7 +460,7 @@ def test():
     """
     parser = OptionParser()
     parser.add_option("-f", "--file", dest="mfile",
-                      help="full path to mosaic file")
+                  help="full path to mosaic file")
 
     options, args = parser.parse_args()
     if not options.mfile:
@@ -503,16 +474,12 @@ def test():
     m = open(options.mfile)
 
     print "\nCoordinate Names"
-    for c in m.coordinate_names:
-        print c
+    for c in m.coordinate_names: print c
 
     print "\nTile Contacts"
-    for t in m.tile_contacts:
-        print "%s -> %s" % (t, m.tile_contacts[t])
+    for t in m.tile_contacts: print "%s -> %s" % (t, m.tile_contacts[t])
     print "\nTile Contacts Complement"
-    for t in m.tile_contacts_compl:
-        print "%s -> %s" % (t, m.tile_contacts_compl[t])
+    for t in m.tile_contacts_compl: print "%s -> %s" % (t, m.tile_contacts_compl[t])
     print
 
-if __name__ == "__main__":
-    test()
+if __name__ == "__main__": test()
diff --git a/Packages/cdms2/Lib/gsStaticVariable.py b/Packages/cdms2/Lib/gsStaticVariable.py
index 5a1910fe2..0332e1415 100644
--- a/Packages/cdms2/Lib/gsStaticVariable.py
+++ b/Packages/cdms2/Lib/gsStaticVariable.py
@@ -3,12 +3,13 @@
 """
 A variable-like object extending over multiple tiles
 Dave Kindig and Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import operator
 import cdms2
+import types
 from cdms2.error import CDMSError
 from cdms2.hgrid import AbstractCurveGrid, TransientCurveGrid, FileCurveGrid
 from cdms2.coord import TransientAxis2D, TransientVirtualAxis
@@ -17,7 +18,6 @@ from cdms2.coord import FileAxis2D
 from cdms2.gengrid import FileGenericGrid
 from cdms2.fvariable import FileVariable
 from cdms2.axis import FileAxis
-from functools import reduce
 
 try:
     from pycf import libCFConfig as libcf
@@ -25,7 +25,6 @@ except:
 #    raise ImportError, 'Error: could not import pycf'
     pass
 
-
 def createTransientGrid(gFName, coordinates):
     """
     Return the coordinate data associated with variable.
@@ -37,7 +36,7 @@ def createTransientGrid(gFName, coordinates):
 
     fh = cdms2.open(gFName)
     gridid = None
-    if libcf.CF_GRIDNAME in fh.attributes.keys():
+    if libcf.CF_GRIDNAME in fh.attributes.keys(): 
         gridid = getattr(fh, libcf.CF_GRIDNAME)
     xn, yn = coordinates.split()
 
@@ -48,8 +47,8 @@ def createTransientGrid(gFName, coordinates):
     xdim = x.shape
     ydim = y.shape
 
-    if xdim != ydim:
-        raise CDMSError("Dimension of coordinates grids don't match")
+    if xdim != ydim: 
+        raise CDMSError, "Dimension of coordinates grids don't match"
 
     ni = xdim[1]
     nj = xdim[0]
@@ -57,39 +56,32 @@ def createTransientGrid(gFName, coordinates):
     lonstr = 'lon'
     latstr = 'lat'
 
-    if re.search(lonstr, x.standard_name):
-        lon = x
-    if re.search(lonstr, y.standard_name):
-        lon = y
-    if re.search(latstr, x.standard_name):
-        lat = x
-    if re.search(latstr, y.standard_name):
-        lat = y
+    if re.search(lonstr, x.standard_name): lon = x
+    if re.search(lonstr, y.standard_name): lon = y
+    if re.search(latstr, x.standard_name): lat = x
+    if re.search(latstr, y.standard_name): lat = y
 
     # Define the axes, verifying the lon and lat grids
     iaxis = TransientVirtualAxis("i", ni)
     jaxis = TransientVirtualAxis("j", nj)
 
-    lataxis = TransientAxis2D(lat,
-                              axes=(iaxis, jaxis),
-                              attributes={'units': lat.units},
-                              id=lat.standard_name)
-    lonaxis = TransientAxis2D(lon,
-                              axes=(iaxis, jaxis),
-                              attributes={'units': lon.units},
-                              id=lon.standard_name)
+    lataxis = TransientAxis2D(lat, 
+                   axes=(iaxis, jaxis), 
+                   attributes={'units': lat.units}, 
+                   id=lat.standard_name)
+    lonaxis = TransientAxis2D(lon, 
+                   axes=(iaxis, jaxis), 
+                   attributes={'units': lon.units}, 
+                   id=lon.standard_name)
 
     # Define the combined grid
     grid = TransientCurveGrid(lataxis, lonaxis, id=gridid)
     return grid
 
-
 class StaticVariable:
-
     """
     Constructor
     """
-
     def __init__(self, StaticVariable, hostObj, varName):
         """
         Constructor - Contains methods applicable to both file and transient static variables
@@ -97,12 +89,12 @@ class StaticVariable:
         @param hostObj The host file object
         @param varName for the id
         """
-        StaticVariable.id = varName
+        StaticVariable.id     = varName
         StaticVariable.nGrids = hostObj.nGrids
 
         StaticVariable.vars = []
         if StaticVariable.nGrids > 0:
-            StaticVariable.vars = [None] * StaticVariable.nGrids
+            StaticVariable.vars = [None]*StaticVariable.nGrids
 
     def __getitem__(self, gridIndex):
         """
@@ -160,13 +152,10 @@ class StaticVariable:
             return self.vars[0].typecode()
         return None
 
-
 class StaticFileVariable(StaticVariable):
-
     """
     Static variable extending over multiple grid files
     """
-
     def __init__(self, hostObj, varName):
         """
         Create a list of file variable with grid attached
@@ -185,11 +174,8 @@ class StaticFileVariable(StaticVariable):
             gn = gridFilenames[gridIndex]
 
             # Open the files
-            f = cdms2.open(fn, mode)
-                           # Need f and u because they serve slightly different
-                           # purposes
-            u = CdunifFile(fn, mode)
-                           # f.axes exists while axes is not a part of u
+            f = cdms2.open(fn, mode)   # Need f and u because they serve slightly different purposes
+            u = CdunifFile(fn, mode)   # f.axes exists while axes is not a part of u
 #            u.variables[varName].gridIndex = gridIndex
             g = CdunifFile(gn, mode)
 
@@ -202,22 +188,20 @@ class StaticFileVariable(StaticVariable):
             coordsaux = f._convention_.getAxisAuxIds(u.variables, coords1d)
 
             # Convert the variable into a FileVariable
-            f.variables[varName] = FileVariable(
-                f, varName, u.variables[varName])
+            f.variables[varName] = FileVariable(f, varName, u.variables[varName])
 
             # Add the coordinates to the file
             for coord in coords:
                 f.variables[coord] = g.variables[coord]
                 f.variables[coord] = FileAxis2D(f, coord, g.variables[coord])
-
+            
             # Build the axes
             for key in f.axes.keys():
                 f.axes[key] = FileAxis(f, key, None)
 
             # Set the boundaries
             for coord in coords:
-                bounds = f._convention_.getVariableBounds(
-                    f, f.variables[coord])
+                bounds = f._convention_.getVariableBounds(f, f.variables[coord])
                 f.variables[coord].setBounds(bounds)
 
             # Initialize the domain
@@ -225,35 +209,26 @@ class StaticFileVariable(StaticVariable):
                 var.initDomain(f.axes)
 
             # Add the grid
-            gridkey, lat, lon = f.variables[
-                varName].generateGridkey(f._convention_, f.variables)
+            gridkey, lat, lon = f.variables[varName].generateGridkey(f._convention_, f.variables)
             gridname = "grid_%dx%d" % lat.shape
-# grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
-            grid = FileCurveGrid(
-                lat,
-                lon,
-                gridname,
-                parent=f,
-                maskvar=None)
+#            grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
+            grid = FileCurveGrid(lat, lon, gridname, parent = f, maskvar = None)
             f.variables[varName]._grid_ = grid
             self.vars[gridIndex] = f.variables[varName]
         self._repr_string = "StaticFileVariable"
 
-    def listall(self, all=None):
+    def listall(self, all = None):
         """
         Gain access to cdms2 listall method. Requires a StaticFileVariable
         @param all
         @returns list
         """
-        return self[0].listall(all=all)
-
+        return self[0].listall(all = all)
 
 class StaticTransientVariable(StaticVariable):
-
     """
     Static variable extending over multiple grid files
     """
-
     def __init__(self, hostObj, varName):
         """
         Constructor
@@ -273,17 +248,15 @@ class StaticTransientVariable(StaticVariable):
             # name of the file containing coordinate data
             gFName = gridFilenames[gridIndex]
 
-            fh = cdms2.open(fName, hostObj=hostObj)
+            fh = cdms2.open(fName, hostObj = hostObj)
             gh = cdms2.open(gFName)
 
             vr = fh(varName)
-            vr.gridIndex = gridIndex
+            vr.gridIndex    = gridIndex
 
             grid = None
             if 'coordinates' in vr.attributes.keys():
-                grid = createTransientGrid(
-                    gFName,
-                    vr.attributes['coordinates'])
+                grid = createTransientGrid(gFName, vr.attributes['coordinates'])
             atts = dict(vr.attributes)
             atts.update(gh.attributes)
             if libcf.CF_GRIDNAME in fh.attributes.keys():
@@ -291,19 +264,18 @@ class StaticTransientVariable(StaticVariable):
 
             # Create the variable
             if grid:
-                var = cdms2.createVariable(vr,
-                                           axes=grid.getAxisList(),
-                                           grid=grid,
-                                           attributes=atts,
-                                           id=vr.standard_name)
-            else:
+                var = cdms2.createVariable(vr, 
+                                axes = grid.getAxisList(), 
+                                grid = grid, 
+                                attributes = atts, 
+                                id = vr.standard_name)
+            else: 
                 var = vr
             self.vars[gridIndex] = var
         self._repr_string = "StaticTransientVariable"
 
-
 def test():
     pass
 
-if __name__ == '__main__':
-    test()
+if __name__ == '__main__': test()
+ 
diff --git a/Packages/cdms2/Lib/gsTimeVariable.py b/Packages/cdms2/Lib/gsTimeVariable.py
index 0a0c4d571..1f4268fb7 100644
--- a/Packages/cdms2/Lib/gsTimeVariable.py
+++ b/Packages/cdms2/Lib/gsTimeVariable.py
@@ -23,13 +23,10 @@ from cdms2.fvariable import FileVariable
 from cdms2.axis import FileAxis, TransientAxis
 from cdms2.axis import concatenate as axisConcatenate
 
-
 class TimeAggregatedFileVariable:
-
     """
     Constructor Class for aggregating a time dependant variable across files.
     """
-
     def __init__(self, gridIndex, listOfFVs, hostObj):
         """
         @param gridIndex Index of requested grid
@@ -39,12 +36,10 @@ class TimeAggregatedFileVariable:
         self.fvs = listOfFVs
         self.gridIndex = gridIndex
         self.hostObj = hostObj
-        self.nTimeStepFiles = hostObj.nTimeSliceFiles * \
-            hostObj.nTimeDataFiles * hostObj.nGrids
+        self.nTimeStepFiles = hostObj.nTimeSliceFiles * hostObj.nTimeDataFiles * hostObj.nGrids
         it = self.getTimeAxisIndex(self.fvs[0].getAxisList())
         self.nTimeStepsPerFile = (self.fvs[0].shape)[it]
-        self.nTimeStepsPerVariable = hostObj.nTimeSliceFiles * \
-            self.nTimeStepsPerFile
+        self.nTimeStepsPerVariable = hostObj.nTimeSliceFiles * self.nTimeStepsPerFile
 
     def __call__(self, *args, **kwargs):
         """
@@ -70,7 +65,7 @@ class TimeAggregatedFileVariable:
         @param slc Integer, slice or tuple of slices. If tuple 0 is time
         @return sliced variable
         """
-
+        
         if isinstance(slc, int):
             # return FileVariable
             return self.fvs[slc]
@@ -81,12 +76,10 @@ class TimeAggregatedFileVariable:
             axes = self.fvs[0].getAxisList()
             timeAxisIndex = self.getTimeAxisIndex(axes)
             if timeAxisIndex is None:
-                CDMSError, "No time axis in :\n" + axes
+                CDMSError, "No time axis in :\n"  + axes
             if isinstance(slc[timeAxisIndex], slice):
-                (fileInds, timeStepInds) = self.getTimeFileIndex(
-                    slc[timeAxisIndex])
-                tv = self.createTransientVariableFromIndices(
-                    fileInds, timeStepInds)
+                (fileInds, timeStepInds) = self.getTimeFileIndex(slc[timeAxisIndex])
+                tv = self.createTransientVariableFromIndices(fileInds, timeStepInds)
                 newslc = self.buildSlice(slc, tv.getAxisList())
                 return tv[newslc]
             elif isinstance(slc[timeAxisIndex], int):
@@ -94,15 +87,13 @@ class TimeAggregatedFileVariable:
                 timeIndex = slc[timeAxisIndex] % nTSF
 
                 # Get just the file needed for the index slice requested.
-                tv = self.createTransientVariableFromIndices(
-                    fileIndex, timeIndex)
+                tv = self.createTransientVariableFromIndices(fileIndex, timeIndex)
                 newslc = self.buildSlice(slc, axes)
                 return tv[newslc]
 
         elif isinstance(slc, slice):
             (fileInds, timeStepInds) = self.getTimeFileIndex(slc)
-            tv = self.createTransientVariableFromIndices(
-                fileInds, timeStepInds)
+            tv = self.createTransientVariableFromIndices(fileInds, timeStepInds)
             return tv
 
     def __len__(self):
@@ -124,10 +115,8 @@ class TimeAggregatedFileVariable:
         timI2 = []
         filI2 = []
 
-        if timeslc.step is None:
-            step = 1
-        else:
-            step = timeslc.step
+        if timeslc.step is None: step = 1
+        else: step = timeslc.step
         stop = timeslc.stop
         if timeslc.stop >= nTSV:
             stop = nTSV
@@ -139,7 +128,7 @@ class TimeAggregatedFileVariable:
                 timI1.append(tt[indx])
                 filI1.append(ii[indx])
             else:
-                if ii[indx] == ii[indx - 1]:
+                if ii[indx] == ii[indx-1]:
                     timI1.append(tt[indx])
                     filI1.append(ii[indx])
                 else:
@@ -163,8 +152,7 @@ class TimeAggregatedFileVariable:
         @return the index - None if time not found
         """
         for indx, axis in enumerate(inAxes):
-            if axis.isTime():
-                return indx
+            if axis.isTime(): return indx
             return None
 
     def buildSlice(self, inslc, inAxes):
@@ -180,9 +168,8 @@ class TimeAggregatedFileVariable:
         newslc = []
         for cslc, axis in zip(inslc, inAxes):
             if axis.isTime():
-                if isinstance(cslc, int):
-                    # Omit slice - the new variable has only the shape of the
-                    # grid.
+                if type(cslc) is int:
+                    # Omit slice - the new variable has only the shape of the grid.
                     continue
                 else:
                     newslc.append(slice(None, None, None))
@@ -226,13 +213,13 @@ class TimeAggregatedFileVariable:
         Aggregate a time file variable. Start and End Indices use slice notation.
         @param fileIndices the file indices to aggregate across
         @param timeIndices which time steps with in each file
-        @return aggregated time dep. variable. Has shape of full grid.
+        @return aggregated time dep. variable. Has shape of full grid. 
                 Subset the grid after exiting.
         """
         from numpy import reshape
         firsttime = True
         nTSF = self.nTimeStepsPerFile
-        if not isinstance(fileIndices, int):
+        if type(fileIndices) is not int:
             for files, times in zip(fileIndices, timeIndices):
                 for indx, file in enumerate(files):
                     # Should make these slices.
@@ -244,21 +231,19 @@ class TimeAggregatedFileVariable:
                     # Insert the new time axis.
                     axisTime = self.fvs[file].getTime()
                     timeAxis = TransientAxis([file * nTSF + times[indx]],
-                                             attributes=axisTime.attributes,
-                                             id=axisTime.id)
-                    axes = self.buildAxes(
-                        timeAxis,
-                        self.fvs[file].getAxisList())
+                                              attributes = axisTime.attributes,
+                                              id = axisTime.id)
+                    axes = self.buildAxes(timeAxis, self.fvs[file].getAxisList())
 
                     # shape --> tm1.shape = (1, :, :)
                     tm1 = reshape(cvar, tuple([1] + list(cvar.shape)))
 
                     # Attach needed items
                     var = cdms2.createVariable(tm1,
-                                               axes=axes,
-                                               grid=grid,
-                                               attributes=atts,
-                                               id=cvar.standard_name)
+                            axes = axes,
+                            grid = grid,
+                            attributes = atts,
+                            id = cvar.standard_name)
 
                     # Create cdms2 transient variable
                     if firsttime:
@@ -268,31 +253,26 @@ class TimeAggregatedFileVariable:
                         # insert the new time axis.
                         taA = new.getTime()
                         newTime = axisConcatenate((taA, timeAxis),
-                                                  attributes=axisTime.attributes,
-                                                  id=axisTime.id)
-                        axes = self.buildAxes(
-                            newTime,
-                            self.fvs[file].getAxisList())
+                                                  attributes = axisTime.attributes,
+                                                  id = axisTime.id)
+                        axes = self.buildAxes(newTime, self.fvs[file].getAxisList())
 
                         tmp = MV2concatenate((new, var))
                         new = cdms2.createVariable(tmp,
-                                                   axes=axes,
-                                                   grid=grid,
-                                                   attributes=atts,
-                                                   id=cvar.standard_name)
+                                axes = axes,
+                                grid = grid,
+                                attributes = atts,
+                                id = cvar.standard_name)
 
         else:
             new = self.fvs[fileIndices][timeIndices]
 
         return new
 
-
 class TimeFileVariable:
-
     """
     Construct an aggregated time dependant variable.
     """
-
     def __init__(self, hostObj, varName):
         """
         Create a list of file variable with grid attached
@@ -317,12 +297,8 @@ class TimeFileVariable:
 
                 # Open the files
                 fn = hostObj.timeVars[varName][gridIndex][timeFileIndex]
-                f = cdms2.open(
-                    fn,
-                    mode)   # Need f and u because they serve slightly different purposes
-                u = CdunifFile(
-                    fn,
-                    mode)   # f.axes exists while axes is not a part of u
+                f = cdms2.open(fn, mode)   # Need f and u because they serve slightly different purposes
+                u = CdunifFile(fn, mode)   # f.axes exists while axes is not a part of u
 #                u.variables[varName].gridIndex = gridIndex
 
                 # Turn the coordinates into a list
@@ -334,14 +310,12 @@ class TimeFileVariable:
                 coordsaux = f._convention_.getAxisAuxIds(u.variables, coords1d)
 
                 # Convert the variable into a FileVariable
-                f.variables[varName] = FileVariable(
-                    f, varName, u.variables[varName])
+                f.variables[varName] = FileVariable(f, varName, u.variables[varName])
 
                 # Add the coordinates to the file
                 for coord in coords:
                     f.variables[coord] = g.variables[coord]
-                    f.variables[coord] = FileAxis2D(
-                        f, coord, g.variables[coord])
+                    f.variables[coord] = FileAxis2D(f, coord, g.variables[coord])
 
                 # Build the axes
                 for key in f.axes.keys():
@@ -349,8 +323,7 @@ class TimeFileVariable:
 
                 # Set the boundaries
                 for coord in coords:
-                    bounds = f._convention_.getVariableBounds(
-                        f, f.variables[coord])
+                    bounds = f._convention_.getVariableBounds(f, f.variables[coord])
                     f.variables[coord].setBounds(bounds)
 
                 # Initialize the domain
@@ -358,16 +331,10 @@ class TimeFileVariable:
                     var.initDomain(f.axes)
 
                 # Add the grid
-                gridkey, lat, lon = f.variables[
-                    varName].generateGridkey(f._convention_, f.variables)
+                gridkey, lat, lon = f.variables[varName].generateGridkey(f._convention_, f.variables)
                 gridname = ("grid%d_" % gridIndex) + "%dx%d" % lat.shape
-# grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
-                grid = FileCurveGrid(
-                    lat,
-                    lon,
-                    gridname,
-                    parent=f,
-                    maskvar=None)
+#                grid = FileGenericGrid(lat, lon, gridname, parent = f, maskvar = None)
+                grid = FileCurveGrid(lat, lon, gridname, parent = f, maskvar = None)
                 f.variables[varName]._grid_ = grid
                 vars.append(f.variables[varName])
 
@@ -376,23 +343,23 @@ class TimeFileVariable:
 
         self._repr_string = "TimeFileVariable"
 
-    def listall(self, all=None):
+    def listall(self, all = None):
         """
         Gain access to cdms2 listall method. Requires a TimeFileVariable
         @param all
         @returns list
         """
-        return self[0][0].listall(all=all)
+        return self[0][0].listall(all = all)
 
-    def showall(self, all=None, device=None):
+    def showall(self, all = None, device = None):
         """
         Gain access to cdms2 showall method
         Requires a TimeFileVariable
         @param all
-        @param device
+        @param device 
         @returns list
         """
-        return self[0][0][:].showall(all=all, device=device)
+        return self[0][0][:].showall(all = all, device = device)
 
     def __getitem__(self, gridIndex):
         """
@@ -400,13 +367,11 @@ class TimeFileVariable:
         """
         return self.vars[gridIndex]
 
-#
-# DEPRECIATED - Testing required to fully remove #################
-#
-
+###############################################################################
+############## DEPRECIATED - Testing required to fully remove #################
+###############################################################################
 
 class TimeTransientVariable:
-
     def __init__(self, hostObj, varName, **slicekwargs):
         """
         Constructor
@@ -437,18 +402,15 @@ class TimeTransientVariable:
 
                 for timeFileIndex in range(hostObj.nTimeDataFiles):
 
-                    fName = hostObj.timeDepVars[
-                        varName][
-                            gridIndex][
-                                timeFileIndex]
+                    fName = hostObj.timeDepVars[varName][gridIndex][timeFileIndex]
                     fh = cdms2.open(fName, hostObj=hostObj)
 
                     # TransientVariable
                     var = fh(varName, **slicekwargs)
 
                     # Attach the grid to the variable
-                    grid = cdms2.gsStaticVariable.createTransientGrid(gFName,
-                                                                      var.attributes['coordinates'])
+                    grid = cdms2.gsStaticVariable.createTransientGrid(gFName, \
+                                         var.attributes['coordinates'])
                     axis0 = var.getAxis(0)
                     gridaxes = grid.getAxisList()
                     axes = [axis0] + list(gridaxes)
@@ -458,21 +420,21 @@ class TimeTransientVariable:
                     # Create cdms2 transient variable
                     if timeFileIndex == 0:
                         new = cdms2.createVariable(var,
-                                                   axes=axes,
-                                                   grid=grid,
-                                                   attributes=atts,
-                                                   id=var.standard_name)
+                                axes = axes,
+                                grid = grid,
+                                attributes = atts,
+                                id = var.standard_name)
                     else:
-                        tmp = MV2concatenate((new, var))
+                        tmp =MV2concatenate((new, var))
                         axis0 = tmp.getAxis(0)
                         gridaxes = grid.getAxisList()
                         axes = [axis0, gridaxes[0], gridaxes[1]]
 #                        new.append(tmp)
                         new = cdms2.createVariable(tmp,
-                                                   axes=axes,
-                                                   grid=grid,
-                                                   attributes=atts,
-                                                   id=var.standard_name)
+                                axes = axes,
+                                grid = grid,
+                                attributes = atts,
+                                id = var.standard_name)
                     fh.close()
 
                 # Add the variable to the index
@@ -481,10 +443,10 @@ class TimeTransientVariable:
         self._repr_string = "TimeTransientVariable"
 
 
-#
+###################################################################
 
 def test():
     pass
 
-if __name__ == '__main__':
-    test()
+if __name__ == '__main__': test()
+
diff --git a/Packages/cdms2/Lib/gui.py b/Packages/cdms2/Lib/gui.py
index 44fab4cb6..65e13cd8c 100644
--- a/Packages/cdms2/Lib/gui.py
+++ b/Packages/cdms2/Lib/gui.py
@@ -5,7 +5,6 @@ from cdms import cache
 
 _progressParent = None                  # Parent frame of progress gui
 
-
 def setProgressParent(parent):
     """
     Enable the FTP progress GUI, and set the parent frame.
@@ -13,9 +12,7 @@ def setProgressParent(parent):
     """
     global _progressParent
     _progressParent = parent
-    cache.useWindow()
-                    # Notify cache module that window dialogs should be used.
-
+    cache.useWindow()                   # Notify cache module that window dialogs should be used.
 
 def getProgressParent():
     """
@@ -24,7 +21,6 @@ def getProgressParent():
     """
     return _progressParent
 
-
 def updateProgressGui(blocknum, blocksize, size, prog):
     """
     Callback function for the FTP progress dialog.
@@ -35,28 +31,25 @@ def updateProgressGui(blocknum, blocksize, size, prog):
 
     Return: 0 to signal that a cancel has been received, 1 to continue reading.
     """
-    sizekb = size / 1024
-    percent = min(100, int(100.0 * float(blocknum * blocksize) / float(size)))
-    if percent < 100:
-        noInterrupt = prog.Update(
-            percent, "Read: %3d%% of %dK" %
-            (percent, sizekb))
+    sizekb = size/1024L
+    percent = min(100,int(100.0*float(blocknum*blocksize)/float(size)))
+    if percent<100:
+        noInterrupt = prog.Update(percent,"Read: %3d%% of %dK"%(percent,sizekb))
     else:
         noInterrupt = 1                    # Don't interrupt - finish up cleanly
         prog.Destroy()
-    if noInterrupt == 0:
+    if noInterrupt==0:
         prog.Destroy()
     return noInterrupt
 
-
 class CdProgressDialog(wxProgressDialog):
 
     # <frame> is the parent frame.
     # filename is the file being read.
-
     def __init__(self, frame, filename):
-        wxProgressDialog.__init__(self, "FTP: %s" % filename,
-                                  "Connecting ...",
-                                  100,
-                                  frame,
-                                  wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_REMAINING_TIME)
+        wxProgressDialog.__init__(self,"FTP: %s"%filename,
+                         "Connecting ...",
+                         100,
+                         frame,
+                         wxPD_CAN_ABORT | wxPD_APP_MODAL | wxPD_REMAINING_TIME)
+
diff --git a/Packages/cdms2/Lib/hgrid.py b/Packages/cdms2/Lib/hgrid.py
index aa09ca8c5..56758ad72 100644
--- a/Packages/cdms2/Lib/hgrid.py
+++ b/Packages/cdms2/Lib/hgrid.py
@@ -1,5 +1,5 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 """CDMS HorizontalGrid objects"""
 
@@ -7,20 +7,19 @@ import numpy
 import cdms2
 import os
 import os.path
-# import PropertiedClasses
-from .error import CDMSError
-from .grid import AbstractGrid, LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
-from .coord import TransientVirtualAxis
-from .axis import getAutoBounds, allclose
-import bindex, _bindex
-from functools import reduce
+## import PropertiedClasses
+from error import CDMSError
+from grid import AbstractGrid, LongitudeType, LatitudeType, VerticalType, TimeType, CoordTypeToLoc
+from coord import TransientVirtualAxis
+from axis import getAutoBounds, allclose
+import bindex,_bindex
 
 MethodNotImplemented = "Method not yet implemented"
 
 def _flatten(boundsar):
     boundsshape = boundsar.shape
-    if len(boundsshape) > 2:
-        newshape = (reduce((lambda x, y: x*y), boundsshape[:-1], 1), boundsshape[-1])
+    if len(boundsshape)>2:
+        newshape = (reduce((lambda x,y: x*y), boundsshape[:-1], 1), boundsshape[-1])
         boundsar.shape = newshape
     return boundsar
 
@@ -41,17 +40,17 @@ class AbstractHorizontalGrid(AbstractGrid):
 
     # Generate default bounds
     def genBounds(self):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     # Get the n-th axis. naxis is 0 or 1.
     def getAxis(self, naxis):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getBounds(self):
         """Get the grid cell boundaries, as a tuple (latitudeBounds, longitudeBounds)
         """
         latbnds, lonbnds = (self._lataxis_.getExplicitBounds(), self._lonaxis_.getExplicitBounds())
-        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1, 2]:
+        if (latbnds is None or lonbnds is None) and getAutoBounds() in [1,2]:
             nlatbnds, nlonbnds = self.genBounds()
             if latbnds is None:
                 latbnds = nlatbnds
@@ -77,27 +76,27 @@ class AbstractHorizontalGrid(AbstractGrid):
 
     def getMesh(self):
         """Get the mesh array used by the meshfill plot."""
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def getWeightsArray(self):
         """Return normalized area weights, as an array of the same
         shape as the grid.
         """
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def listall (self, all=None):
-        result = []
+        result=[]
         result.append('Grid has Python id %s.' % hex(id(self)))
         return result
 
-    def setMask(self, mask, permanent=0):
+    def setMask(self,mask,permanent=0):
         self._maskVar_ = mask
 
     def subGridRegion(self, latRegion, lonRegion):
-        raise CDMSError(MethodNotImplemented)
+        raise CDMSError, MethodNotImplemented
 
     def hasCoordType(self, coordType):
-        return ((coordType == LatitudeType) or (coordType == LongitudeType))
+        return ((coordType==LatitudeType) or (coordType==LongitudeType))
 
     def checkConvex(self):
         """Check that each cell of the grid is convex in lon-lat space, with nodes defined counter-clockwise.
@@ -117,10 +116,10 @@ class AbstractHorizontalGrid(AbstractGrid):
         for n0 in range(nnode):
             n1 = (n0+1)%nnode
             n2 = (n1+1)%nnode
-            vec0lon = lonb[:, n1] - lonb[:, n0]
-            vec0lat = latb[:, n1] - latb[:, n0]
-            vec1lon = lonb[:, n2] - lonb[:, n1]
-            vec1lat = latb[:, n2] - latb[:, n1]
+            vec0lon = lonb[:,n1] - lonb[:,n0]
+            vec0lat = latb[:,n1] - latb[:,n0]
+            vec1lon = lonb[:,n2] - lonb[:,n1]
+            vec1lat = latb[:,n2] - latb[:,n1]
             cross = vec0lon*vec1lat - vec0lat*vec1lon
 
             mask = where(less(cross, 0.0), 1, 0)
@@ -171,24 +170,24 @@ class AbstractHorizontalGrid(AbstractGrid):
             for node in range(2*nnode):
                 n0 = node%nnode
                 n1 = (n0+1)%nnode
-                vec0lon = lonb2[k, n1]-lonb2[k, n0]
-                if vec0lon > threshold:
-                    lonb2[k, n1] -= 360.0
-                elif vec0lon < -threshold:
-                    lonb2[k, n1] += 360.0
+                vec0lon = lonb2[k,n1]-lonb2[k,n0]
+                if vec0lon>threshold:
+                    lonb2[k,n1] -= 360.0
+                elif vec0lon<-threshold:
+                    lonb2[k,n1] += 360.0
 
             # If the cross-product test still fails, restore
             # the original values and add to the nonConvexCells list
             for n0 in range(nnode):
                 n1 = (n0+1)%nnode
                 n2 = (n1+1)%nnode
-                vec0lon = lonb2[k, n1] - lonb2[k, n0]
-                vec0lat = latb2[k, n1] - latb2[k, n0]
-                vec1lon = lonb2[k, n2] - lonb2[k, n1]
-                vec1lat = latb2[k, n2] - latb2[k, n1]
+                vec0lon = lonb2[k,n1] - lonb2[k,n0]
+                vec0lat = latb2[k,n1] - latb2[k,n0]
+                vec1lon = lonb2[k,n2] - lonb2[k,n1]
+                vec1lat = latb2[k,n2] - latb2[k,n1]
                 cross = vec0lon*vec1lat - vec0lat*vec1lon
 
-                if cross < 0:
+                if cross<0:
                     lonb2[k] = savelons
                     newbadcells.append(nonConvexCells[k])
                     break
@@ -208,7 +207,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         """Create a curvilinear grid.
         """
         if latAxis.shape != lonAxis.shape:
-            raise CDMSError('Latitude and longitude axes must have the same shape.')
+            raise CDMSError, 'Latitude and longitude axes must have the same shape.'
         AbstractHorizontalGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask, node)
         self._index_ = None
 
@@ -218,7 +217,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         return TransientCurveGrid(newlat, newlon, id=self.id)
 
     def __repr__(self):
-        return "<CurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<CurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
     __str__ = __repr__
 
     def getMesh(self, transpose=None):
@@ -227,20 +226,20 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         latbounds and lonbounds according to the tuple, (1,0,2) in this case.
         """
         if self._mesh_ is None:
-            LAT = 0
-            LON = 1
+            LAT=0
+            LON=1
             latbounds, lonbounds = self.getBounds()
-# following work aronud a numpy.ma bug
-# latbounds=latbounds.filled()
-# lonbounds=lonbounds.filled()
+##             ## following work aronud a numpy.ma bug
+##             latbounds=latbounds.filled()
+##             lonbounds=lonbounds.filled()
             if latbounds is None or lonbounds is None:
-                raise CDMSError('No boundary data is available for grid %s'%self.id)
-            if (transpose is not None) and (transpose[1] == 0):
-                latbounds = numpy.transpose(latbounds, (1, 0, 2))
-                lonbounds = numpy.transpose(lonbounds, (1, 0, 2))
-            mesh = numpy.zeros((self.size(), 2, latbounds.shape[-1]), latbounds.dtype.char)
-            mesh[:, LAT,:] = numpy.reshape(latbounds, (self.size(), latbounds.shape[-1]))
-            mesh[:, LON,:]  = numpy.reshape(lonbounds, (self.size(), latbounds.shape[-1]))
+                raise CDMSError, 'No boundary data is available for grid %s'%self.id
+            if (transpose is not None) and (transpose[1]==0):
+                latbounds = numpy.transpose(latbounds, (1,0,2))
+                lonbounds = numpy.transpose(lonbounds, (1,0,2))
+            mesh = numpy.zeros((self.size(),2,latbounds.shape[-1]),latbounds.dtype.char)
+            mesh[:,LAT,:] = numpy.reshape(latbounds,(self.size(),latbounds.shape[-1]))
+            mesh[:,LON,:]  = numpy.reshape(lonbounds,(self.size(),latbounds.shape[-1]))
             self._mesh_ = mesh
         return self._mesh_
 
@@ -292,9 +291,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         mask.shape = (ngrid,)
 
         clat = numpy.ma.filled(copy.copy(blat))
-        clat.shape = (ngrid, 4)
+        clat.shape = (ngrid,4)
         clon = numpy.ma.filled(copy.copy(blon))
-        clon.shape = (ngrid, 4)
+        clon.shape = (ngrid,4)
 
         # Write the file
         if gridTitle is None:
@@ -310,12 +309,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         gridcenterlon.units = "degrees"
         gridimask = cufile.createVariable("grid_imask", 'i', ("grid_size",))
         gridimask.units = "unitless"
-        gridcornerlat = cufile.createVariable("grid_corner_lat", 'd', ("grid_size", "grid_corners"))
+        gridcornerlat = cufile.createVariable("grid_corner_lat", 'd', ("grid_size","grid_corners"))
         gridcornerlat.units = "degrees"
-        gridcornerlon = cufile.createVariable("grid_corner_lon", 'd', ("grid_size", "grid_corners"))
+        gridcornerlon = cufile.createVariable("grid_corner_lon", 'd', ("grid_size","grid_corners"))
         gridcornerlon.units = "degrees"
 
-        griddims[:] = numpy.array([nj, ni], numpy.int32)
+        griddims[:] = numpy.array([nj,ni], numpy.int32)
         gridcenterlat[:] = centerLat
         gridcenterlon[:] = centerLon
         gridimask[:] = mask
@@ -325,9 +324,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
     def toGenericGrid(self, gridid=None):
 
         import copy
-        from .auxcoord import TransientAuxAxis1D
-        from .coord import TransientVirtualAxis
-        from .gengrid import TransientGenericGrid
+        from auxcoord import TransientAuxAxis1D
+        from coord import TransientVirtualAxis
+        from gengrid import TransientGenericGrid
 
         lat = numpy.ma.filled(self._lataxis_)
         latunits = self._lataxis_.units
@@ -346,11 +345,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             mask.shape = (ngrid,)
 
         cornerLat = numpy.ma.filled(copy.copy(blat))
-        cornerLat.shape = (ngrid, 4)
+        cornerLat.shape = (ngrid,4)
         cornerLon = numpy.ma.filled(copy.copy(blon))
-        cornerLon.shape = (ngrid, 4)
+        cornerLon.shape = (ngrid,4)
 
-        iaxis = TransientVirtualAxis("cell", ngrid)
+        iaxis = TransientVirtualAxis("cell",ngrid)
 
         lataxis = TransientAuxAxis1D(centerLat, axes=(iaxis,), bounds=cornerLat,
                                   attributes={'units':latunits}, id="latitude")
@@ -382,21 +381,21 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         The file, normally a CdmsFile, should already be open for writing
         and will be closed."""
         import time
-        from .tvariable import TransientVariable
+        from tvariable import TransientVariable
         
         # Set attributes
-        if ( hasattr(file, 'Conventions') ):
-            if ( file.Conventions.find('Gridspec') < 0 ):
+        if ( hasattr(file,'Conventions') ):
+            if ( file.Conventions.find('Gridspec')<0 ):
                 file.Conventions = file.Conventions + ' Gridspec-0.0'
         else:
             file.Conventions = 'Gridspec-0.0'
-        if ( hasattr(file, 'gs_filetypes') ):
-            if ( file.gs_filetypes.find('Curvilinear_Tile') < 0 ):
+        if ( hasattr(file,'gs_filetypes') ):
+            if ( file.gs_filetypes.find('Curvilinear_Tile')<0 ):
                 file.gs_filetypes = file.gs_filetypes + ' Curvilinear_Tile'
         else:
             file.gs_filetypes = 'Curvilinear_Tile'
-        t = time.time()
-        id = int((t-int(t))*1.0e9)
+        t=time.time()
+        id=int((t-int(t))*1.0e9)
         file.gs_id = id
         file.gs_originalfilename = os.path.basename( file.id )
 
@@ -412,19 +411,19 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
                        getattr( file, 'history', '' ) + newhistory
 
         # former tile variable and attributes
-        if ( hasattr(self, 'long_name') and self.long_name != None ):
+        if ( hasattr(self,'long_name') and self.long_name!=None ):
             file.long_name = self.long_name
         else:
             file.long_name = 'gridspec_tile'
         # gs_geometryType is no longer required of Gridspec files, but as yet
         # there is no other proposal for describing the geometry (July 2010)
-        if ( hasattr(self, 'gs_geometryType') and self.gs_geometryType != None):
+        if ( hasattr(self,'gs_geometryType') and self.gs_geometryType!=None):
             file.gs_geometryType = self.gs_geometryType
         else:
             file.gs_geometryType = 'spherical'
         # gs_discretizationType is no longer required of Gridspec files, but it's
         # harmless and may come in useful
-        if ( hasattr(self, 'gs_discretizationType') and self.gs_discretizationType != None ):
+        if ( hasattr(self,'gs_discretizationType') and self.gs_discretizationType!=None ):
             file.gs_discretizationType = self.gs_discretizationType
         else:
             file.gs_discretizationType = 'logically_rectangular'
@@ -435,11 +434,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # Set up and write variables.  When written, cdms writes not only the arrays
         # but also their coordinates, e.g. gs_nip.
         
-        x = self._lonaxis_
-        if ( not hasattr(x, 'units') ):
+        x=self._lonaxis_
+        if ( not hasattr(x,'units') ):
             print "Warning, no units found for longitude"
             x.units = 'degree_east'
-        if ( not hasattr(x, 'standard_name') ):
+        if ( not hasattr(x,'standard_name') ):
             print "Warning, no standard_name found for longitude axis"
             x.standard_name = 'longitude'
         if ( x.standard_name == 'geographic_longitude'):
@@ -449,11 +448,11 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # _lonaxis_ is a TransientAxis2D, hence a TransientVariable
         # But I don't know where the attribute _TransientVariable__domain comes from
         
-        y = self._lataxis_
-        if ( not hasattr(y, 'units') ):
+        y=self._lataxis_
+        if ( not hasattr(y,'units') ):
             print "Warning, no units found for latitude"
             y.units = 'degree_north'
-        if ( not hasattr(y, 'standard_name') ):
+        if ( not hasattr(y,'standard_name') ):
             print "Warning, no standard_name found for latitude axis"
             y.standard_name = 'latitude'
         if ( y.standard_name == 'geographic_latitude'):
@@ -461,18 +460,18 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             y.standard_name = 'latitude'
         y.id = file.gs_latv
 
-        if( not hasattr(x, '_TransientVariable__domain') ):
+        if( not hasattr(x,'_TransientVariable__domain') ):
             # There probably doesn't exist enough information to write a correct
             # grid, but this will help.
             x._TransientVariable__domain = [ (x,), (y,) ]
-        x._TransientVariable__domain[0][0].id = 'gs_njp'
-        x._TransientVariable__domain[1][0].id = 'gs_nip'
-        if ( not hasattr(y, '_TransientVariable__domain') ) :
+        x._TransientVariable__domain[0][0].id='gs_njp'
+        x._TransientVariable__domain[1][0].id='gs_nip'
+        if ( not hasattr(y,'_TransientVariable__domain') ) :
             # There probably doesn't exist enough information to write a correct
             # grid, but this will help.
             y._TransientVariable__domain = [ (x,), (y,) ]
-        y._TransientVariable__domain[0][0].id = 'gs_njp'
-        y._TransientVariable__domain[1][0].id = 'gs_nip'
+        y._TransientVariable__domain[0][0].id='gs_njp'
+        y._TransientVariable__domain[1][0].id='gs_nip'
 
         file.write(x)
         file.write(y)
@@ -487,12 +486,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         # The functionality (other than checking gsfile) is now done by the writeg
         # method above.
         if ( not hasattr( self, "gsfile" ) ):
-            self.gsfile = None
-            self.gspath = None
-        if ( self.gsfile != None ):
+            self.gsfile=None
+            self.gspath=None
+        if ( self.gsfile!=None ):
             return ( tcg.gsfile, tcg.gspath )
         else:
-            raise RuntimeError('The libCF/Gridspec API does not provide for writing CurveGrids<<<')
+            raise RuntimeError, 'The libCF/Gridspec API does not provide for writing CurveGrids<<<'
 
     def init_from_gridspec( self, filename ):
         """reads to grid from a Gridspec-compliant file.  The filename should be a
@@ -576,8 +575,8 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
                 j = k
             k += 1
 
-        if i == -1 or j == -1:
-            raise RuntimeError('Grid lat/lon domains do not match variable domain')
+        if i==-1 or j==-1:
+            raise RuntimeError, 'Grid lat/lon domains do not match variable domain'
 
         return ((islice, jslice), (inewaxis, jnewaxis))
 
@@ -585,14 +584,14 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         """Get the grid index"""
         if self._index_ is None:
             # Trying to stick in Stephane Raynaud's patch for autodetection
-            nj, ni = self._lataxis_.shape
+            nj,ni = self._lataxis_.shape
             dlon = numpy.max(self._lonaxis_)-numpy.min(self._lonaxis_)
-            dx = max(dlon/ni, dlon/nj)
+            dx = max(dlon/ni,dlon/nj)
             dlat = numpy.max(self._lataxis_)-numpy.min(self._lataxis_)
-            dy = max(dlat/ni, dlat/nj)
+            dy = max(dlat/ni,dlat/nj)
             latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
             lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
-            _bindex.setDeltas(dx, dy)
+            _bindex.setDeltas(dx,dy)
             self._index_ = bindex.bindexHorizontalGrid(latlin, lonlin)
 
         return self._index_
@@ -615,12 +614,12 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         latlin = numpy.ravel(numpy.ma.filled(self._lataxis_))
         lonlin = numpy.ravel(numpy.ma.filled(self._lonaxis_))
         points = bindex.intersectHorizontalGrid(latspec, lonspec, latlin, lonlin, index)
-        if len(points) == 0:
-            raise CDMSError('No data in the specified region, longitude=%s, latitude=%s'%(repr(lonspec), repr(latspec)))
+        if len(points)==0:
+            raise CDMSError, 'No data in the specified region, longitude=%s, latitude=%s'%(`lonspec`, `latspec`)
 
         fullmask = numpy.ones(ni*nj)
         numpy.put(fullmask, points, 0)
-        fullmask = numpy.reshape(fullmask, (ni, nj))
+        fullmask = numpy.reshape(fullmask, (ni,nj))
         
         iind = points/nj
         jind = points - iind*nj
@@ -629,7 +628,7 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
 
         yid = self.getAxis(0).id
         xid = self.getAxis(1).id
-        indexspecs = {yid:slice(imin, imax), xid:slice(jmin, jmax)}
+        indexspecs = {yid:slice(imin,imax), xid:slice(jmin,jmax)}
 
         return submask, indexspecs
 
@@ -678,9 +677,9 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
             used = []                   # axes already matched
             for i in missing:
                 for item in axes:
-                    if (item not in used) and len(selfaxes[i]) == len(item) and allclose(selfaxes[i], item):
-                        result._lataxis_.setAxis(i, item)
-                        result._lonaxis_.setAxis(i, item)
+                    if (item not in used) and len(selfaxes[i])==len(item) and allclose(selfaxes[i], item):
+                        result._lataxis_.setAxis(i,item)
+                        result._lonaxis_.setAxis(i,item)
                         used.append(item)
                         break
                 else:
@@ -694,18 +693,18 @@ class AbstractCurveGrid(AbstractHorizontalGrid):
         having the same length as the number of cells in the grid, similarly
         for flatlon."""
         if self._flataxes_ is None:
-            from . import MV2 as MV
+            import MV2 as MV
             alat = MV.filled(self.getLatitude())
             alon = MV.filled(self.getLongitude())
             alatflat = numpy.ravel(alat)
             alonflat = numpy.ravel(alon)
             self._flataxes_ = (alatflat, alonflat)
         return self._flataxes_
-    shape = property(_getShape, None)
+    shape = property(_getShape,None)
     
-# PropertiedClasses.set_property (AbstractCurveGrid, 'shape',
-# AbstractCurveGrid._getShape, nowrite=1,
-# nodelete=1)
+## PropertiedClasses.set_property (AbstractCurveGrid, 'shape', 
+##                                   AbstractCurveGrid._getShape, nowrite=1,
+##                                   nodelete=1)
 
 class DatasetCurveGrid(AbstractCurveGrid):
 
@@ -716,7 +715,7 @@ class DatasetCurveGrid(AbstractCurveGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<DatasetCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<DatasetCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
 class FileCurveGrid(AbstractCurveGrid):
 
@@ -727,7 +726,7 @@ class FileCurveGrid(AbstractCurveGrid):
         self.parent = parent
 
     def __repr__(self):
-        return "<FileCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<FileCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
 class TransientCurveGrid(AbstractCurveGrid):
 
@@ -742,7 +741,7 @@ class TransientCurveGrid(AbstractCurveGrid):
         AbstractCurveGrid.__init__(self, latAxis, lonAxis, id, maskvar, tempmask)
 
     def __repr__(self):
-        return "<TransientCurveGrid, id: %s, shape: %s>"%(self.id, repr(self.shape))
+        return "<TransientCurveGrid, id: %s, shape: %s>"%(self.id, `self.shape`)
 
     def toCurveGrid(self, gridid=None):
         if gridid is None:
@@ -759,17 +758,18 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     whichType is the type of file, either "grid" or "mapping"
     if whichType is "mapping", whichGrid is the choice of grid, either "source" or "destination"
     """
-    from .coord import TransientAxis2D
+    import string
+    from coord import TransientAxis2D
 
     if 'S' in fileobj.variables.keys():
-        if whichType == "grid":
+        if whichType=="grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid == "destination":
+        elif whichGrid=="destination":
             gridCornerLatName = 'yv_b'
             gridCornerLonName = 'xv_b'
             gridMaskName = 'mask_b'
@@ -784,14 +784,14 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
             gridCenterLonName = 'xc_a'
             titleName = 'source_grid'
     else:
-        if whichType == "grid":
+        if whichType=="grid":
             gridCornerLatName = 'grid_corner_lat'
             gridCornerLonName = 'grid_corner_lon'
             gridMaskName = 'grid_imask'
             gridCenterLatName = 'grid_center_lat'
             gridCenterLonName = 'grid_center_lon'
             titleName = 'title'
-        elif whichGrid == "destination":
+        elif whichGrid=="destination":
             gridCornerLatName = 'dst_grid_corner_lat'
             gridCornerLonName = 'dst_grid_corner_lon'
             gridMaskName = 'dst_grid_imask'
@@ -814,17 +814,17 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     nj = dims[0]
     gridshape = (ni, nj)
     boundsshape = (ni, nj, ncorners)
-    if hasattr(cornerLat, 'units') and cornerLat.units.lower()[0:6] == 'radian':
+    if hasattr(cornerLat, 'units') and string.lower(cornerLat.units)[0:6]=='radian':
         cornerLat = (cornerLat*(180.0/numpy.pi)).reshape(boundsshape)
         cornerLon = (cornerLon*(180.0/numpy.pi)).reshape(boundsshape)
     else:
         cornerLat = cornerLat.reshape(boundsshape)
         cornerLon = cornerLon.reshape(boundsshape)
 
-    iaxis = TransientVirtualAxis("i", ni)
-    jaxis = TransientVirtualAxis("j", nj)
+    iaxis = TransientVirtualAxis("i",ni)
+    jaxis = TransientVirtualAxis("j",nj)
 
-    if gridMaskName in vardict:
+    if vardict.has_key(gridMaskName):
         # SCRIP convention: 0 for invalid data
         # numpy.ma convention: 1 for invalid data
         mask = 1 - fileobj(gridMaskName)
@@ -832,27 +832,27 @@ def readScripCurveGrid(fileobj, dims, whichType, whichGrid):
     else:
         mask = None
         
-    if gridCenterLatName in vardict:
+    if vardict.has_key(gridCenterLatName):
         centerLat = fileobj(gridCenterLatName).reshape(gridshape)
         gclat = fileobj[gridCenterLatName]
-        if hasattr(gclat, "units") and gclat.units.lower() == 'radians':
+        if hasattr(gclat, "units") and string.lower(gclat.units)=='radians':
             centerLat *= (180.0/numpy.pi)
     else:
-        centerLat = cornerLat[:,:, 0]
+        centerLat = cornerLat[:,:,0]
 
-    if gridCenterLonName in vardict:
+    if vardict.has_key(gridCenterLonName):
         centerLon = fileobj(gridCenterLonName).reshape(gridshape)
         gclon = fileobj[gridCenterLonName]
-        if hasattr(gclon, "units") and gclon.units.lower() == 'radians':
+        if hasattr(gclon, "units") and string.lower(gclon.units)=='radians':
             centerLon *= (180.0/numpy.pi)
     else:
-        centerLon = cornerLon[:,:, 0]
+        centerLon = cornerLon[:,:,0]
 
-    if hasattr(fileobj, titleName):
+    if hasattr(fileobj,titleName):
         gridid = getattr(fileobj, titleName)
-        gridid = gridid.strip().replace(' ', '_')
+        gridid = string.replace(string.strip(gridid), ' ','_')
     else:
-        gridid = "<None>"
+        gridid="<None>"
 
     lataxis = TransientAxis2D(centerLat, axes=(iaxis, jaxis), bounds=cornerLat,
                               attributes={'units':'degrees_north'}, id="latitude")
diff --git a/Packages/cdms2/Lib/internattr.py b/Packages/cdms2/Lib/internattr.py
new file mode 100644
index 000000000..ff6ebeb0c
--- /dev/null
+++ b/Packages/cdms2/Lib/internattr.py
@@ -0,0 +1,157 @@
+"InternalAttributes (implmentation class for CDMS)"
+import types
+import PropertiedClasses
+_PCLASS = PropertiedClasses.PropertiedClass
+class AttributeDict:
+    """An attribute dictionary."""
+    def __init__ (self, owner):
+        self._owner = owner
+    
+    def __getitem__ (self, name):
+        if self.has_key(name):
+            return self._owner.__dict__[name]
+        else:
+            raise KeyError, "%s instance has no external attribute %s" % \
+                   (self._owner.__class__.__name__, name)
+
+    def __setitem__ (self, name, value):
+        if self._owner.is_internal_attribute(name):
+            raise RuntimeError, 'Cannot set internal name in external attribute dictionary.'
+        self._owner.__dict__[name] = value
+
+    def clear (self):
+        self._owner.__dict__.clear()
+
+    def get (self, name, default=None):
+        if self.has_key(name):
+            return self._owner.__dict__[name]
+        else:
+            return default
+
+    def has_key(self, name):
+        d = self._owner.__dict__
+        if d.has_key(name) and not self._owner.is_internal_attribute(name):
+            return 1
+        else:
+            return 0
+
+    def items (self):
+        result = []
+        for name, value in self._owner.__dict__.items():
+            if self._owner.is_internal_attribute(name): continue
+            result.append((name, value))
+        return result
+    
+    def keys (self):
+        result = []
+        for name in self._owner.__dict__.keys():
+            if self._owner.is_internal_attribute(name): continue
+            result.append(name)
+        return result
+
+    def update(self, d):
+        for name, value in d.items():
+            if self._owner.is_internal_attribute(name):
+                raise RuntimeError, "Cannot update attribute dict with internal name"
+        self._owner.__dict__[name] = value
+
+    def values (self):
+        result = []
+        for name, value in self._owner.__dict__.items():
+            if self._owner.is_internal_attribute(name): continue
+            result.append(value)
+        return result
+
+    def __repr__(self):
+        return 'AttributeDict (' + \
+        repr(self._owner.__dict__) + \
+        ')'
+
+    def __str__(self):
+        return str(self._owner.__dict__)
+    
+class InternalAttributesClass (_PCLASS):
+    def _getattributes (self, name):
+        """Return a dictionary-like object of the non-internal attributes."""
+        return AttributeDict(self)
+
+    def is_internal_attribute (self, name):
+        """is_internal_attribute(name) is true if name is internal."""
+        if name[0] == '_' or name in self.__class__._internal:
+            return 1
+        return 0
+
+    def replace_external_attributes(self, newAttributes):
+        """replace_external_attributes(newAttributes)
+           Replace the external attributes with dictionary newAttributes.
+        """
+        if not isinstance(newAttributes, types.DictType) and \
+           not isinstance(newAttributes, AttributeDict):
+            raise ValueError, "Argument must be a dictionary"
+        for n in self.__dict__.keys():
+            if not self.is_internal_attribute(n):
+                del self.__dict__[n]
+        for n, v in newAttributes.items():
+            self.__dict__[n] = v
+
+def initialize_internal_attributes (C):
+    "Prepare a class for life as a child of InternalAttributesClass."
+    if C.__dict__.has_key('_internal'): return
+    if not issubclass(C, InternalAttributesClass):
+        raise ValueError, 'Must be subclass of InternalAttributesClass'
+    PropertiedClasses.initialize_property_class (C)
+    C._internal = []
+    for CP in C.__bases__:
+        if issubclass(CP, InternalAttributesClass):
+            initialize_internal_attributes(CP)
+            for name in CP._internal:
+                C._internal.append(name)
+    
+def add_internal_attribute (C, *aname):
+    """add_internal_attribute (C, name, ...)
+       Make attributes name, ... internal in class C.
+    """
+    initialize_internal_attributes(C)
+    for name in aname:
+        if not name in C._internal:
+            C._internal.append(name)
+
+PropertiedClasses.set_property(InternalAttributesClass, 'attributes', 
+                               InternalAttributesClass._getattributes, 
+                               nowrite=1, nodelete=1)
+
+if __name__ == '__main__':
+    class Test(InternalAttributesClass):
+        def __init__ (self):
+            self.node = None
+            self.parent = None
+            self.__dict__['ro'] = 1
+            self.__hide = 3
+            self._p = 4
+            self.value = 1
+
+    PropertiedClasses.set_property(Test, 'ro', nowrite=1, nodelete=1)
+    add_internal_attribute(Test, 'node', 'parent', 'ro')
+
+    t1 = Test()
+    assert t1.value == 1
+    assert not t1.attributes.has_key('__hide')
+    assert not t1.attributes.has_key('_p')
+    assert t1._p == 4
+    t1.value = 2
+    assert t1.value == 2
+    assert 'value' in t1.attributes.keys()
+    t1.b = t1.value + 1
+    assert t1.b == 3
+    assert t1.b == t1.attributes['b']
+    t1.node = 'me'
+    t1.parent = 'dad'
+    assert t1.node == 'me'
+    assert 'node' not in t1.attributes.keys()
+    assert t1.ro == 1
+    try:
+        t1.ro == 2
+    except AttributeError:
+        pass
+    assert t1.ro == 1
+    print "Test passed."
diff --git a/Packages/cdms2/Lib/mvBaseWriter.py b/Packages/cdms2/Lib/mvBaseWriter.py
index db80b843e..5f099c704 100644
--- a/Packages/cdms2/Lib/mvBaseWriter.py
+++ b/Packages/cdms2/Lib/mvBaseWriter.py
@@ -3,14 +3,13 @@
 """
 Abstract class for writing data into file
 
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 
 Alex Pletzer, Tech-X Corp. (2011)
 """
 
-from . import mvSphereMesh
-
+import mvSphereMesh
 
 class BaseWriter:
 
@@ -18,7 +17,7 @@ class BaseWriter:
         """
         Constructor
         @param var a cdms2 variable
-        @param sphereRadius radius of the sphere upon which the grid will
+        @param sphereRadius radius of the sphere upon which the grid will 
                             be projected
         @param maxElev max elevation/depth normalized to the sphere radius
         """
@@ -27,12 +26,12 @@ class BaseWriter:
 
         self.shape = sphere_mesh.shape
 
-        # there is currently a bug in vizSchema which causes
-        # visit to crash if the leading index is 1, this is
+        # there is currently a bug in vizSchema which causes 
+        # visit to crash if the leading index is 1, this is 
         # a workaround the problem
         if self.shape[0] == 1:
-            self.shape = list(sphere_mesh.shape[1:]) + [1, ]
-
+            self.shape = list(sphere_mesh.shape[1:]) + [1,]
+        
         self.mesh = sphere_mesh.getXYZCoords(sphereRadius)
 
     def write(self, filename):
@@ -40,5 +39,5 @@ class BaseWriter:
         Write data to file. This method is overloaded.
         @param filename file name
         """
-        raise NotImplementedError(
-            'write method not implemented in derived class')
+        raise NotImplementedError, \
+            'write method not implemented in derived class'
diff --git a/Packages/cdms2/Lib/mvCdmsRegrid.py b/Packages/cdms2/Lib/mvCdmsRegrid.py
index 24c169acf..6cc0adf8a 100644
--- a/Packages/cdms2/Lib/mvCdmsRegrid.py
+++ b/Packages/cdms2/Lib/mvCdmsRegrid.py
@@ -5,13 +5,13 @@ David Kindig and Alex Pletzer, Tech-X Corp. (2012)
 This code is provided with the hope that it will be useful.
 No guarantee is provided whatsoever. Use at your own risk.
 """
+import types
 import operator
 import re
 import numpy
 import cdms2
-from .error import CDMSError
+from error import CDMSError
 import regrid2
-from functools import reduce
 
 def _areCellsOk(cornerCoords, mask=None):
     """
@@ -44,8 +44,8 @@ def _areCellsOk(cornerCoords, mask=None):
     # compute area elements in Cartesian space
     lat0 = numpy.array(cornerCoords[0][ :-1,  :-1], numpy.float64)
     lat1 = numpy.array(cornerCoords[0][ :-1, 1:  ], numpy.float64)
-    lat2 = numpy.array(cornerCoords[0][1:, 1:  ], numpy.float64)
-    lat3 = numpy.array(cornerCoords[0][1:,  :-1], numpy.float64)
+    lat2 = numpy.array(cornerCoords[0][1:  , 1:  ], numpy.float64)
+    lat3 = numpy.array(cornerCoords[0][1:  ,  :-1], numpy.float64)
 
     the0 = lat0*numpy.pi/180.
     the1 = lat1*numpy.pi/180.
@@ -53,8 +53,8 @@ def _areCellsOk(cornerCoords, mask=None):
     the3 = lat3*numpy.pi/180.
     lam0 = numpy.array(cornerCoords[1][ :-1,  :-1], numpy.float64)*numpy.pi/180.
     lam1 = numpy.array(cornerCoords[1][ :-1, 1:  ], numpy.float64)*numpy.pi/180.
-    lam2 = numpy.array(cornerCoords[1][1:, 1:  ], numpy.float64)*numpy.pi/180.
-    lam3 = numpy.array(cornerCoords[1][1:,  :-1], numpy.float64)*numpy.pi/180.
+    lam2 = numpy.array(cornerCoords[1][1:  , 1:  ], numpy.float64)*numpy.pi/180.
+    lam3 = numpy.array(cornerCoords[1][1:  ,  :-1], numpy.float64)*numpy.pi/180.
     
     x0, y0, z0 = projectToSphere(the0, lam0)
     x1, y1, z1 = projectToSphere(the1, lam1)
@@ -129,7 +129,7 @@ def _areCellsOk(cornerCoords, mask=None):
     if len(inds[0]) > 0:
         # package the result
         badCellIndices = [(inds[0][i], inds[1][i]) for i in range(len(inds[0]))]
-        bcis1 = [(inds[0][i], inds[1][i]+1) for i in range(len(inds[0]))]
+        bcis1 = [(inds[0][i]  , inds[1][i]+1) for i in range(len(inds[0]))]
         bcis2 = [(inds[0][i]+1, inds[1][i]+1) for i in range(len(inds[0]))]
         bcis3 = [(inds[0][i]+1, inds[1][i]  ) for i in range(len(inds[0]))]
         badCellCoords = [[(cornerCoords[0][badCellIndices[i]], cornerCoords[1][badCellIndices[i]]),
@@ -160,10 +160,10 @@ def _buildBounds(bounds):
         bnd[:-1] = bounds[..., 0]
         bnd[ -1] = bounds[ -1, 1]
     elif len(bndShape) > 1:
-        bnd[:-1, :-1] = bounds[:,:, 0]
-        bnd[:-1,  -1] = bounds[:, -1, 1]
+        bnd[:-1, :-1] = bounds[  :,  :, 0]
+        bnd[:-1,  -1] = bounds[  :, -1, 1]
         bnd[ -1,  -1] = bounds[ -1, -1, 2]
-        bnd[ -1, :-1] = bounds[ -1,:, 3]
+        bnd[ -1, :-1] = bounds[ -1,  :, 3]
 
     return bnd
 
@@ -382,9 +382,8 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
 
         # If LibCF handleCut is True, the bounds are needed to extend the grid
         # close the cut at the top
-        if re.search('LibCF', regridTool, re.I) and 'handleCut' in args:
-            if args['handleCut']:
-                srcBounds = getBoundList(srcCoords)
+        if re.search('LibCF', regridTool, re.I) and args.has_key('handleCut'):
+            if args['handleCut']: srcBounds = getBoundList(srcCoords)
 
         srcCoordsArrays = [numpy.array(sc) for sc in srcCoords]
         dstCoordsArrays = [numpy.array(dc) for dc in dstCoords]
@@ -441,7 +440,7 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
                              **args)
 
         # fill in diagnostic data
-        if 'diag' in args:
+        if args.has_key('diag'):
             self.regridObj.fillInDiagnosticData(diag = args['diag'], rootPe = 0)
 
         # construct the axis list for dstVar
@@ -451,7 +450,7 @@ coordMin = %7.2f, boundMin = %7.2f, coordMax = %7.2f, boundMax = %7.2f
         attrs = {}
         for a in srcVar.attributes:
             v = srcVar.attributes[a]
-            if isinstance(v, basestring):
+            if type(v) is types.StringType:
                 attrs[a] = v
 
         # if the missing value is present in the destination data, set
diff --git a/Packages/cdms2/Lib/mvSphereMesh.py b/Packages/cdms2/Lib/mvSphereMesh.py
index 1a5469c9c..2d4b7abad 100644
--- a/Packages/cdms2/Lib/mvSphereMesh.py
+++ b/Packages/cdms2/Lib/mvSphereMesh.py
@@ -3,24 +3,23 @@
 """
 Class for representing grids on the sphere
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
-from functools import reduce
-
+from types import NoneType
 
 class SphereMesh:
-
+    
     def __init__(self, var, sphereThickness=0.1):
         """
         Constructor
         @param var cdms2 variable
-        @param sphereThickness thickness of the shell in normalized
+        @param sphereThickness thickness of the shell in normalized 
                                sphere radius
         """
-
+       
         self.isRectilinear = True
         self.ndims = 0
         self.elvPositiveDown = False
@@ -35,17 +34,17 @@ class SphereMesh:
 
         # compute the min/max of elevation, needed
         # for normalization
-        if elvs is not None:
+        if type(elvs) != NoneType:
             self.minElv = min(elvs[:])
             self.maxElv = max(elvs[:])
             if hasattr(elvs, 'positive'):
                 if getattr(elvs, 'positive') == 'down':
                     self.elvPositiveDown = True
 
-        # determine the dimensionality and
+        # determine the dimensionality and 
         # whether the grid is rectilinear
         for axis in lons, lats, elvs:
-            if axis is not None:
+            if type(axis) != NoneType:
                 self.ndims += 1
                 if len(axis.shape) != 1:
                     self.isRectilinear = False
@@ -54,43 +53,37 @@ class SphereMesh:
         if self.isRectilinear:
             self.shape = []
             for axis in lons, lats, elvs:
-                if axis is not None:
-                    self.shape.append(len(axis))
+                if type(axis) != NoneType:
+                    self.shape.append( len(axis) )
             self.shape.reverse()
 
         while len(self.shape) < 3:
-            self.shape = [1, ] + list(self.shape)
+            self.shape = [1,] + list(self.shape)
 
         # store lon, lat, elv as a curvilinear grid
         if self.isRectilinear:
             # apply tensore product of axes to generat curvilinear coordinates
-            if elvs is not None:
-                self.elvs = numpy.outer(
-                    numpy.outer(
-                        numpy.ones(self.shape[:0],
-                                   numpy.float32),
-                        elvs),
-                                        numpy.ones(self.shape[0 + 1:], numpy.float32)).reshape(self.shape)
+            if type(elvs) != NoneType:
+                self.elvs = numpy.outer(numpy.outer( numpy.ones(self.shape[:0], numpy.float32), elvs),
+                                        numpy.ones(self.shape[0+1:], numpy.float32)).reshape(self.shape)
             else:
-                self.elvs = numpy.zeros(self.shape, numpy.float32)
-            self.lats = numpy.outer(
-                numpy.outer(numpy.ones(self.shape[:1], numpy.float32), lats),
-                                    numpy.ones(self.shape[1 + 1:], numpy.float32)).reshape(self.shape)
-            self.lons = numpy.outer(
-                numpy.outer(numpy.ones(self.shape[:2], numpy.float32), lons),
-                                    numpy.ones(self.shape[2 + 1:], numpy.float32)).reshape(self.shape)
-
+                self.elvs = numpy.zeros( self.shape, numpy.float32 )
+            self.lats = numpy.outer(numpy.outer( numpy.ones(self.shape[:1], numpy.float32), lats),
+                                    numpy.ones(self.shape[1+1:], numpy.float32)).reshape(self.shape)
+            self.lons = numpy.outer(numpy.outer( numpy.ones(self.shape[:2], numpy.float32), lons),
+                                    numpy.ones(self.shape[2+1:], numpy.float32)).reshape(self.shape)
+    
         else:
             # already in curvilinear form
             self.lons = lons[:]
             self.lats = lats[:]
-            if elvs is not None:
+            if type(elvs) != NoneType:
                 self.elvs = elvs[:]
             else:
-                self.elvs = numpy.zeros(self.shape, numpy.float32)
+                self.elvs = numpy.zeros( self.shape, numpy.float32 )
 
         # reshape as flat arrays
-        sz = reduce(lambda x, y: x * y, self.shape)
+        sz = reduce(lambda x, y: x*y, self.shape)
         self.lons = numpy.reshape(self.lons, (sz,))
         self.lats = numpy.reshape(self.lats, (sz,))
         self.elvs = numpy.reshape(self.elvs, (sz,))
@@ -98,33 +91,32 @@ class SphereMesh:
     def getXYZCoords(self, sphereRadius=1.0):
         """
         Get the curvilinear cartesian coordinates
-        @param sphereRadius radius of sphere
+        @param sphereRadius radius of sphere 
         @return mesh
         """
-        sz = reduce(lambda x, y: x * y, self.shape)
-        rr = sphereRadius * (1.0 + self.elvs)
+        sz = reduce(lambda x, y: x*y, self.shape)
+        rr = sphereRadius*(1.0 + self.elvs)
         diffElv = self.maxElv - self.minElv
-        rr = sphereRadius * numpy.ones(self.lons.shape, numpy.float32)
+        rr = sphereRadius*numpy.ones(self.lons.shape, numpy.float32 )
         if diffElv != 0:
-            coeff = sphereRadius * self.sphereThickness / diffElv
+            coeff = sphereRadius*self.sphereThickness/diffElv
             if self.elvPositiveDown:
                 # depth
-                rr += coeff * (self.maxElv - self.elvs)
+                rr += coeff*(self.maxElv - self.elvs)
             else:
                 # height
-                rr += coeff * (self.elvs - self.minElv)
+                rr += coeff*(self.elvs - self.minElv)
 
-        mesh = numpy.zeros((sz, 3), numpy.float32)
-        cosLats = numpy.cos(self.lats * numpy.pi / 180.)
-        mesh[:, 0] = rr * numpy.cos(self.lons * numpy.pi / 180.) * cosLats
-        mesh[:, 1] = rr * numpy.sin(self.lons * numpy.pi / 180.) * cosLats
-        mesh[:, 2] = rr * numpy.sin(self.lats * numpy.pi / 180.)
+        mesh = numpy.zeros( (sz, 3), numpy.float32 )
+        cosLats = numpy.cos( self.lats*numpy.pi/180. )
+        mesh[:, 0] = rr*numpy.cos(self.lons*numpy.pi/180.)*cosLats
+        mesh[:, 1] = rr*numpy.sin(self.lons*numpy.pi/180.)*cosLats
+        mesh[:, 2] = rr*numpy.sin(self.lats*numpy.pi/180.)
         return mesh
 
-#
+#####################################################################
 # Tests
 
-
 def test2DRect():
     """
     Test data on 2D rectilinear grid
@@ -132,18 +124,17 @@ def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 12, 15
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
-                                   0., nlon, 30. / nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
+                                    0., nlon, 30./nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
-                       sin(5 * pi * lons[:] / 180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect',
+    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
+                       sin(5*pi*lons[:]/180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
                                axes=(lats, lons))
     sphere_mesh = SphereMesh(var, 0.1)
     print sphere_mesh.getXYZCoords()
 
-
 def test2D():
     """
     Test data on 2D curvilinear grid
@@ -153,34 +144,33 @@ def test2D():
     from cdms2.hgrid import TransientCurveGrid
     from numpy import pi, cos, sin
     nlat, nlon = 3, 4
-    dlon, dlat = 60.0 / float(nlon - 1), 30.0 / float(nlat - 1)
-    lons1D = numpy.array([0.0 + i * dlon for i in range(nlon)])
-    lats1D = numpy.array([0.0 + j * dlat for j in range(nlat)])
+    dlon, dlat = 60.0/float(nlon - 1), 30.0/float(nlat - 1)
+    lons1D = numpy.array([0.0 + i*dlon for i in range(nlon)])
+    lats1D = numpy.array([0.0 + j*dlat for j in range(nlat)])
     lons = numpy.outer(numpy.ones((nlat,)), lons1D)
     lats = numpy.outer(lats1D, numpy.ones((nlon,)))
-    data = cos(3 * pi * lats / 180.0) * sin(5 * pi * lons / 180.0)
+    data = cos(3*pi*lats/180.0) * sin(5*pi*lons/180.0)
     # create grid
     iaxis = TransientVirtualAxis("i", nlon)
     jaxis = TransientVirtualAxis("j", nlat)
-    lataxis = TransientAxis2D(lats,
-                              axes=(jaxis, iaxis),
-                              attributes={'units': 'degree_north'},
-                              id='lats')
-    lonaxis = TransientAxis2D(lons,
-                              axes=(jaxis, iaxis),
-                              attributes={'units': 'degree_east'},
-                              id='lons')
-    grid = TransientCurveGrid(lataxis, lonaxis, id='lats_lons')
-
-    var = cdms2.createVariable(data, id='fake_data_2d',
-                               axes=grid.getAxisList(),
-                               grid=grid,
-                               attributes={'coordinates': 'lats lons'},
+    lataxis = TransientAxis2D(lats, 
+                       axes=(jaxis, iaxis), 
+                       attributes={'units': 'degree_north'}, 
+                       id='lats')
+    lonaxis = TransientAxis2D(lons, 
+                       axes=(jaxis, iaxis), 
+                       attributes={'units': 'degree_east'}, 
+                       id='lons')
+    grid =  TransientCurveGrid(lataxis, lonaxis, id='lats_lons')
+
+    var = cdms2.createVariable(data, id='fake_data_2d', 
+                               axes = grid.getAxisList(),
+                               grid = grid,
+                               attributes = {'coordinates': 'lats lons'},
                                )
     sphere_mesh = SphereMesh(var)
     print sphere_mesh.getXYZCoords()
 
-
 def test3DRect():
     """
     Test data on 3d rectilinear grid
@@ -188,30 +178,29 @@ def test3DRect():
     import cdms2
     from numpy import pi, cos, sin, exp
     nelv, nlat, nlon = 3, 4, 5
-    delv, dlon, dlat = 90000. / float(nelv - 1), \
-        60.0 / float(nlon - 1), 30.0 / float(nlat - 1)
-    elvs1D = numpy.array([100000 - i * delv for i in range(nelv)])
-    lons1D = numpy.array([0.0 + i * dlon for i in range(nlon)])
-    lats1D = numpy.array([0.0 + i * dlat for i in range(nlat)])
+    delv, dlon, dlat = 90000./float(nelv-1), \
+        60.0/float(nlon-1), 30.0/float(nlat-1)
+    elvs1D = numpy.array([100000 - i*delv for i in range(nelv)])
+    lons1D = numpy.array([0.0 + i*dlon for i in range(nlon)])
+    lats1D = numpy.array([0.0 + i*dlat for i in range(nlat)])
     # any order should work
-    lons = numpy.zeros((nlon, nlat, nelv), numpy.float32)
-    lats = numpy.zeros((nlon, nlat, nelv), numpy.float32)
-    elvs = numpy.zeros((nlon, nlat, nelv), numpy.float32)
-    data = numpy.zeros((nlon, nlat, nelv), numpy.float32)
+    lons = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
+    lats = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
+    elvs = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
+    data = numpy.zeros( (nlon, nlat, nelv), numpy.float32 )
     for i in range(nlon):
         for j in range(nlat):
             for k in range(nelv):
                 elvs[i, j, k] = elvs1D[k]
                 lats[i, j, k] = lats1D[j]
                 lons[i, j, k] = lons1D[i]
-                data[i, j, k] = cos(3 * pi * lats[i, j, k] / 180.) * \
-                    sin(5 * pi * lons[i, j, k] / 180.) * exp(-elvs[i, j, k])
-    var = cdms2.createVariable(data, id='fake_data_3d_rect',
+                data[i, j, k] = cos(3*pi*lats[i, j, k]/180.) * \
+                    sin(5*pi*lons[i, j, k]/180.) * exp(-elvs[i, j, k])
+    var = cdms2.createVariable(data, id='fake_data_3d_rect', 
                                axes=(elvs, lats, lons))
     sphere_mesh = SphereMesh(var)
     print sphereMesh.getXYZCoords()
 
-
 def test3DposDown():
     """
     Test 3d data with elev positive down. Need to work with 1D axes.
@@ -220,33 +209,31 @@ def test3DposDown():
     import cdms2
     import numpy
     nlev, nlat, nlon = 4, 5, 6
-    dlev, dlat, dlon = 5000. / \
-        float(nlev - 1), 180. / float(nlat - 1), 360. / float(nlon - 1)
+    dlev, dlat, dlon = 5000./float(nlev-1), 180./float(nlat-1), 360./float(nlon-1)
     levs1d = numpy.arange(0., 5001., dlev)
-    lats1d = numpy.array([0. - i * dlat for i in range(nlat)])
-    lons1d = numpy.array([0. - i * dlon for i in range(nlon)])
+    lats1d = numpy.array([0. - i*dlat for i in range(nlat)])
+    lons1d = numpy.array([0. - i*dlon for i in range(nlon)])
     data = numpy.zeros((nlev, nlat, nlon), numpy.float32)
 
     for k in range(nlev):
         for j in range(nlat):
             for i in range(nlon):
-                data[k, j, i] = numpy.cos(3 * numpy.pi * lats1d[j] / 180.) * \
-                    numpy.sin(5 * numpy.pi * lons1d[i] / 180.) * \
+                data[k, j, i] = numpy.cos(3*numpy.pi*lats1d[j]/180.) * \
+                                numpy.sin(5*numpy.pi*lons1d[i]/180.) * \
                                 numpy.exp(-levs1d[k])
 
-    a1 = cdms2.axis.TransientAxis(levs1d, id='levels',
-                                  attributes={'positive': 'down'})
-    a2 = cdms2.axis.TransientAxis(lats1d, id='latitude')
-    a3 = cdms2.axis.TransientAxis(lons1d, id='longitude')
-    var = cdms2.createVariable(data, id='pos_down_3d_data',
-                               axes=(a1, a2, a3))
+    a1 = cdms2.axis.TransientAxis(levs1d, id = 'levels', 
+                                  attributes = {'positive':'down'})
+    a2 = cdms2.axis.TransientAxis(lats1d, id = 'latitude')
+    a3 = cdms2.axis.TransientAxis(lons1d, id = 'longitude')
+    var = cdms2.createVariable(data, id = 'pos_down_3d_data',
+                               axes = (a1, a2, a3))
     sphereMesh = SphereMesh(var)
     aa = sphereMesh.getXYZCoords()
     bb = aa.reshape((4, 5, 6, 3))
-    for i in range(nlev):
-        print levs1d[i], bb[i, 0, 0, :]
+    for i in range(nlev): print levs1d[i], bb[i, 0, 0, :]
 
-if __name__ == '__main__':
+if __name__ == '__main__': 
 #    test2DRect()
 #    test2D()
 #    test3DRect()
diff --git a/Packages/cdms2/Lib/mvVTKSGWriter.py b/Packages/cdms2/Lib/mvVTKSGWriter.py
index fd2327245..353d8b19b 100644
--- a/Packages/cdms2/Lib/mvVTKSGWriter.py
+++ b/Packages/cdms2/Lib/mvVTKSGWriter.py
@@ -3,14 +3,13 @@
 """
 Write data to VTK file format using the structured grid format
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
 import time
-from . import mvBaseWriter
-
+import mvBaseWriter
 
 class VTKSGWriter(mvBaseWriter.BaseWriter):
 
@@ -30,10 +29,10 @@ class VTKSGWriter(mvBaseWriter.BaseWriter):
         npts = self.mesh.shape[0]
         print >> f, 'POINTS %d float' % npts
         for i in range(npts):
-            print >> f, '%f %f %f' % tuple(self.mesh[i, :])
+            print >> f, '%f %f %f' % tuple(self.mesh[i,:])
         n0, n1, n2 = self.shape
         # nodal data
-        print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
+        print >> f, 'POINT_DATA %d' % (n0*n1*n2)
         print >> f, 'SCALARS %s float' % (self.var.id)
         print >> f, 'LOOKUP_TABLE default'
         if n0 > 1:
@@ -44,34 +43,34 @@ class VTKSGWriter(mvBaseWriter.BaseWriter):
         else:
             for j in range(n1):
                 for i in range(n2):
-                    print >> f, '%f' % self.var[j, i]
+                    print >> f, '%f' % self.var[j, i]            
         f.close()
 
 
-#
+######################################################################
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 6, 10
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
-                                   0., nlon, 30. / nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
+                                    0., nlon, 30./nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
-                       sin(5 * pi * lons[:] / 180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect',
+    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
+                       sin(5*pi*lons[:]/180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
                                axes=(lats, lons))
     vw = VTKSGWriter(var)
     vw.write('test2DRect_SG.vtk')
 
-
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VTKSGWriter(var[0, 0:10, 0:20, 0:30])
+    vw = VTKSGWriter(var[0,0:10,0:20,0:30])
     vw.write('test3D_SG.vtk')
 
-if __name__ == '__main__':
+if __name__ == '__main__': 
     test2DRect()
     test3D()
+    
diff --git a/Packages/cdms2/Lib/mvVTKUGWriter.py b/Packages/cdms2/Lib/mvVTKUGWriter.py
index e712efaf7..50dcb1195 100644
--- a/Packages/cdms2/Lib/mvVTKUGWriter.py
+++ b/Packages/cdms2/Lib/mvVTKUGWriter.py
@@ -3,14 +3,13 @@
 """
 Write data to VTK file format using the unstructured grid format
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
 import time
-from . import mvBaseWriter
-
+import mvBaseWriter
 
 class VTKUGWriter(mvBaseWriter.BaseWriter):
 
@@ -27,9 +26,9 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
         npts = self.mesh.shape[0]
         print >> f, 'POINTS %d float' % npts
         for i in range(npts):
-            print >> f, '%f %f %f' % tuple(self.mesh[i, :])
+            print >> f, '%f %f %f' % tuple(self.mesh[i,:])
         n0, n1, n2 = self.shape
-        ncells = (n0 - 1) * (n1 - 1) * (n2 - 1)
+        ncells = (n0 - 1)*(n1 - 1)*(n2 - 1)
         if ncells != 0:
             # 3d
             ntot = ncells * (8 + 1)
@@ -37,17 +36,17 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
             for k in range(n0 - 1):
                 for j in range(n1 - 1):
                     for i in range(n2 - 1):
-                        index = i + n2 * (j + n1 * k)
+                        index = i + n2*(j + n1*k)
                         print >> f, '8 %d %d %d %d %d %d %d %d' % \
-                            (index, index + 1, index + 1 + n2, index + n2,
-                             index + n1 * n2, index + n1 * n2 + 1,
-                             index + n1 * n2 + 1 + n2, index + n1 * n2 + n2)
+                            (index, index+1, index+1+n2, index+n2, 
+                             index+n1*n2, index+n1*n2+1, 
+                             index+n1*n2+1+n2, index+n1*n2+n2)
             print >> f, 'CELL_TYPES %d' % ncells
             for i in range(ncells):
                 # hexahedron
                 print >> f, 12
             # nodal data
-            print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
+            print >> f, 'POINT_DATA %d' % (n0*n1*n2)
             print >> f, 'SCALARS %s float' % (self.var.id)
             print >> f, 'LOOKUP_TABLE default'
             for k in range(n0):
@@ -56,52 +55,52 @@ class VTKUGWriter(mvBaseWriter.BaseWriter):
                         print >> f, '%f' % self.var[k, j, i]
         else:
             # 2d
-            ncells = (n1 - 1) * (n2 - 1)
+            ncells = (n1 - 1)*(n2 - 1)
             ntot = ncells * (4 + 1)
             print >> f, 'CELLS %d %d' % (ncells, ntot)
             for j in range(n1 - 1):
                 for i in range(n2 - 1):
-                    index = i + n2 * j
+                    index = i + n2*j
                     print >> f, '4 %d %d %d %d' % \
-                        (index, index + 1, index + 1 + n2, index + n2)
+                        (index, index+1, index+1+n2, index+n2)
             print >> f, 'CELL_TYPES %d' % ncells
             for i in range(ncells):
                 # quad
                 print >> f, 9
             # nodal data
-            print >> f, 'POINT_DATA %d' % (n0 * n1 * n2)
+            print >> f, 'POINT_DATA %d' % (n0*n1*n2)
             print >> f, 'SCALARS %s float' % (self.var.id)
             print >> f, 'LOOKUP_TABLE default'
             for j in range(n1):
                 for i in range(n2):
-                    print >> f, '%f' % self.var[j, i]
+                    print >> f, '%f' % self.var[j, i]   
         f.close()
 
 
-#
+######################################################################
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 6, 10
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
-                                   0., nlon, 30. / nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
+                                    0., nlon, 30./nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
-                       sin(5 * pi * lons[:] / 180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect',
+    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
+                       sin(5*pi*lons[:]/180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
                                axes=(lats, lons))
     vw = VTKUGWriter(var)
     vw.write('test2DRect.vtk')
 
-
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VTKUGWriter(var[0, 0:10, 0:20, 0:30])
+    vw = VTKUGWriter(var[0,0:10,0:20,0:30])
     vw.write('test3D.vtk')
 
-if __name__ == '__main__':
+if __name__ == '__main__': 
     test2DRect()
     test3D()
+    
diff --git a/Packages/cdms2/Lib/mvVsWriter.py b/Packages/cdms2/Lib/mvVsWriter.py
index 72bb31526..aed81a79f 100644
--- a/Packages/cdms2/Lib/mvVsWriter.py
+++ b/Packages/cdms2/Lib/mvVsWriter.py
@@ -3,15 +3,14 @@
 """
 Write data to VizSchema compliant file
 Alex Pletzer, Tech-X Corp. (2011)
-This code is provided with the hope that it will be useful.
+This code is provided with the hope that it will be useful. 
 No guarantee is provided whatsoever. Use at your own risk.
 """
 
 import numpy
-from . import mvBaseWriter
+import mvBaseWriter
 import re
 
-
 class VsWriter(mvBaseWriter.BaseWriter):
 
     def write(self, filename):
@@ -22,59 +21,58 @@ class VsWriter(mvBaseWriter.BaseWriter):
         try:
             import tables
         except:
-            raise ImportError('You must have pytables installed')
-
+            raise ImportError, 'You must have pytables installed'
+        
         if filename.find('.vsh5') < 0 and filename.find('.h5') < 0:
-            filename += '.vsh5'  # VizSchema hdf5 format
+            filename += '.vsh5' # VizSchema hdf5 format
 
         # open file
         h5file = tables.openFile(filename, 'w')
         # put mesh
         meshid = 'mesh_' + self.var.id
-        mdata = numpy.reshape(self.mesh, self.shape + [3, ])
+        mdata = numpy.reshape(self.mesh, self.shape + [3,])
         mset = h5file.createArray("/", meshid, mdata)
         mset.attrs.vsType = "mesh"
         mset.attrs.vsKind = "structured"
         mset.attrs.vsIndexOrder = "compMinorC"
         # data
-        dset = h5file.createArray("/", self.var.id,
+        dset = h5file.createArray("/", self.var.id, 
                                   numpy.reshape(self.var, self.shape))
         dset.attrs.vsType = "variable"
         dset.attrs.vsMesh = meshid
         # additional attributes
         for a in self.var.attributes:
             # Skip mpi objects
-            if re.match('mpi', a.lower()):
+            if re.match('mpi',a.lower()):
                 continue
             setattr(dset.attrs, a, getattr(self.var, a))
         # close file
         h5file.close()
 
-#
-
+######################################################################
 
 def test2DRect():
     import cdms2
     from numpy import pi, cos, sin
     nlat, nlon = 3, 4
-    grid = cdms2.createUniformGrid(-0.0, nlat, 60. / (nlat - 1),
-                                   0., nlon, 30. / nlon)
+    grid = cdms2.createUniformGrid(-0.0, nlat, 60./(nlat-1), 
+                                    0., nlon, 30./nlon)
     lons = grid.getLongitude()
     lats = grid.getLatitude()
-    data = numpy.outer(cos(3 * pi * lats[:] / 180.0),
-                       sin(5 * pi * lons[:] / 180.0))
-    var = cdms2.createVariable(data, id='fake_data_2d_rect',
+    data = numpy.outer(cos(3*pi*lats[:]/180.0), 
+                       sin(5*pi*lons[:]/180.0))
+    var = cdms2.createVariable(data, id='fake_data_2d_rect', 
                                axes=(lats, lons))
     vw = VsWriter(var)
     vw.write('test2DRect.vsh5')
 
-
 def test3D():
     import cdms2
     var = cdms2.open('sample_data/ta_ncep_87-6-88-4.nc', 'r')('ta')
-    vw = VsWriter(var[0, 0:10, 0:20, 0:30])
+    vw = VsWriter(var[0,0:10,0:20,0:30])
     vw.write('test3D.vsh5')
 
-if __name__ == '__main__':
+if __name__ == '__main__': 
     test2DRect()
     test3D()
+    
diff --git a/Packages/cdms2/Lib/restApi.py b/Packages/cdms2/Lib/restApi.py
index 159d50112..2b5ef1059 100644
--- a/Packages/cdms2/Lib/restApi.py
+++ b/Packages/cdms2/Lib/restApi.py
@@ -1,3 +1,4 @@
+import cdms2
 import urllib2
 import xml.etree.ElementTree
 try:
@@ -5,136 +6,116 @@ try:
 except:
     pass
 import os
-# import bz2
-
+#import bz2
 
 class esgfConnectionException(Exception):
     pass
-
-
 class esgfDatasetException(Exception):
     pass
-
-
 class esgfFilesException(Exception):
     pass
-    # def __init__(self,value):
-    # self.value=value
-    # def __repr__(self):
-    # msg =  "rest API error: %s" % repr(value)
-    # print msg
-    # return msg
-
+    ## def __init__(self,value):
+    ##     self.value=value
+    ## def __repr__(self):
+    ##     msg =  "rest API error: %s" % repr(value)
+    ##     print msg
+    ##     return msg
 
 class FacetConnection(object):
-
-    def __init__(self, host='pcmdi9.llnl.gov'):
-        self.rqst = "http://%s/esg-search/search?facets=*&type=Dataset&limit=1&latest=true" % host
-        self.rqst_count = "http://%s/esg-search/search?facets=*&type=File&limit=0&latest=true" % host
+    def __init__(self,host='pcmdi9.llnl.gov'):
+        self.rqst="http://%s/esg-search/search?facets=*&type=Dataset&limit=1&latest=true" % host
+        self.rqst_count="http://%s/esg-search/search?facets=*&type=File&limit=0&latest=true" % host
         self.EsgfObjectException = esgfConnectionException
-
-    def get_xmlelement(self, facet_param=None):
+    def get_xmlelement(self,facet_param=None):
         try:
-            rqst = self.rqst
+            rqst=self.rqst
             if facet_param:
-                rqst = rqst + '&%s' % facet_param
-            # print rqst
+                rqst=rqst+'&%s'%facet_param        
+            #print rqst
             url = urllib2.urlopen(rqst)
-        except Exception as msg:
-            raise self.EsgfObjectException(msg)
+        except Exception,msg:
+             raise self.EsgfObjectException(msg)
         r = url.read()
         try:
             e = xml.etree.ElementTree.fromstring(r)
             return e
-        except Exception as err:
-            raise self.EsgfObjectException(
-                "Could not interpret server's results: %s" %
-                err)
-
-    def make_facet_dict(self, xmlelement):
-        facet_dict = {}
+        except Exception,err:
+            raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
+    def make_facet_dict(self,xmlelement):
+        facet_dict={}
         for lst in xmlelement.findall('lst'):
-            if lst.get('name') == 'facet_counts':
-                myelement = lst
+            if lst.get('name')=='facet_counts':
+                myelement=lst
                 for node in myelement.findall('lst'):
-                    if node.get('name') == 'facet_fields':
+                    if node.get('name')=='facet_fields':
                         for child in node.getchildren():
-                            facet_name = child.get('name')
-                            facet_dict[facet_name] = []
+                            facet_name=child.get('name')
+                            facet_dict[facet_name]=[]
                             for grandchild in child.getchildren():
-                                facet_dict[facet_name].append(
-                                    "%s (%s)" %
-                                    (str(grandchild.get('name')), str(grandchild.text)))
+                                facet_dict[facet_name].append("%s (%s)"%(str(grandchild.get('name')),str(grandchild.text)))
         return facet_dict
-
-    def get_xmlelement_count(self, facet_param=None):
+    def get_xmlelement_count(self,facet_param=None):
         try:
-            rqst = self.rqst_count
+            rqst=self.rqst_count
             if facet_param:
-                rqst = rqst + '&%s' % facet_param
-                # print rqst
+                rqst=rqst+'&%s'%facet_param        
+                #print rqst
             url = urllib2.urlopen(rqst)
-        except Exception as msg:
-            raise self.EsgfObjectException(msg)
+        except Exception,msg:
+             raise self.EsgfObjectException(msg)
         r = url.read()
         try:
             e = xml.etree.ElementTree.fromstring(r)
             return e
-        except Exception as err:
-            raise self.EsgfObjectException(
-                "Could not interpret server's results: %s" %
-                err)
+        except Exception,err:
+            raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
 
-    def make_facet_dict_count(self, xmlelement):
-        myelementlist = xmlelement.findall('result')
-        count = None
+    def make_facet_dict_count(self,xmlelement):
+        myelementlist=xmlelement.findall('result')
+        count=None
         if len(myelementlist) > 0:
-            myelement = myelementlist[0]
-            count = int(myelement.get('numFound'))
+            myelement=myelementlist[0]
+            count=int(myelement.get('numFound'))
         return count
 
-validSearchTypes = ["Dataset", "File"]  # "ById","ByTimeStamp"]
-
-
+validSearchTypes =  ["Dataset","File"]#"ById","ByTimeStamp"]
 class esgfConnection(object):
-
-    def __init__(self, host, port=80, timeout=15, limit=None, offset=0,
-                 mapping=None, datasetids=None, fileids=None, restPath=None):
-        self.port = port
-        url = str(host).replace("://", "^^^---^^^")
-        sp = url.split("/")
-        host = sp[0].replace("^^^---^^^", "://")
+    def __init__(self,host,port=80,timeout=15,limit=None,offset=0,mapping=None,datasetids=None,fileids=None,restPath=None):
+        self.port=port
+        url=str(host).replace("://","^^^---^^^")
+        sp= url.split("/")
+        host = sp[0].replace("^^^---^^^","://")
         if restPath is None:
             restPath = "/".join(sp[1:])
-            if len(restPath) == 0:
-                self.restPath = "/esg-search/search"
+            if len(restPath)==0:
+                self.restPath="/esg-search/search"
             else:
-                self.restPath = restPath
+                self.restPath=restPath
         else:
-            self.restPath = restPath
-        self.host = host
-        # self.host="esg-datanode.jpl.nasa.gov"
+            self.restPath=restPath
+        self.host=host
+        #self.host="esg-datanode.jpl.nasa.gov"
         self.defaultSearchType = "Dataset"
         self.EsgfObjectException = esgfConnectionException
-        self.validSearchTypes = validSearchTypes
-        self.validSearchTypes = ["Dataset", ]
-        all = self._search("facets=*", searchType=None)
-        # Now figure out the facet fields
+        self.validSearchTypes=validSearchTypes
+        self.validSearchTypes=["Dataset",]
+        all = self._search("facets=*",searchType=None)
+        ## Now figure out the facet fields
         self.serverOrder = []
         for e in all:
-            if e.tag == "lst" and "name" in e.keys() and e.get("name") == "responseHeader":
-                # ok found the Header
+            if e.tag=="lst" and "name" in e.keys() and e.get("name")=="responseHeader":
+                ## ok found the Header
                 for s in e:
-                    if s.get("name") == "params":
-                        params = s
+                    if s.get("name")=="params":
+                        params=s
                         break
-                self.params = {"text": None, "limit": limit, "offset": offset}
-                self.searchableKeys = set(["text", "limit", "offset"])
+                self.params={"text":None,"limit":limit,"offset":offset}
+                self.searchableKeys=set(["text","limit","offset"])
                 for p in params:
-                    if p.get("name") == "facet.field":
+                    if p.get("name")=="facet.field":
                         for f in p:
                             self.serverOrder.append(f.text)
-                            self.params[f.text] = None
+                            self.params[f.text]=None
                             self.searchableKeys.add(f.text)
 
         self.keys = self.params.keys
@@ -142,68 +123,61 @@ class esgfConnection(object):
         self.values = self.params.values
 
         if datasetids is not None:
-            self.datasetids = genutil.StringConstructor(datasetids)
+            self.datasetids=genutil.StringConstructor(datasetids)
         else:
-            self.datasetids = None
+            self.datasetids=None
         if fileids is not None:
-            self.fileids = genutil.StringConstructor(fileids)
+            self.fileids=genutil.StringConstructor(fileids)
             if datasetids is not None:
-                self.fileids.template = self.fileids.template.replace(
-                    "%(datasetid)", self.datasetids.template)
+                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids = genutil.StringConstructor(
-                "%s.%%(filename)" %
-                self.datasetids.template)
+            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
         else:
-            self.fileids = None
-        # self.setMapping(mapping)
-        self.mapping = mapping
-
-    # def setUserOrder(self,value):
-    # self.userOrder=value
-    # def getUserOrder(self):
-    # return self.userOrder
-    # order=property(getUserOrder,setUserOrder)
-    def __getitem__(self, key):
+            self.fileids=None
+        #self.setMapping(mapping)
+        self.mapping=mapping                    
+            
+    ## def setUserOrder(self,value):
+    ##     self.userOrder=value
+    ## def getUserOrder(self):
+    ##     return self.userOrder
+    ## order=property(getUserOrder,setUserOrder)
+    def __getitem__(self,key):
         try:
             val = self.params[key]
         except:
             raise self.EsgfObjectException("Invalid key: %s" % repr(key))
         return val
-
-    def __setitem__(self, key, value):
-        if key not in self.params.keys():
-            raise self.EsgfObjectException(
-                "Invalid key: %s, valid keys are: %s" %
-                (repr(key), repr(self.params.keys())))
-        self.params[key] = value
+    def __setitem__(self,key,value):
+        if not key in self.params.keys():
+            raise self.EsgfObjectException("Invalid key: %s, valid keys are: %s" % (repr(key),repr(self.params.keys())))
+        self.params[key]=value
         return
 
-    def _search(self, search="", searchType=None, stringType=False):
+                            
+    def _search(self,search="",searchType=None,stringType=False):
         if searchType is None:
-            searchType = self.defaultSearchType
-        if searchType not in self.validSearchTypes:
-            raise self.EsgfObjectException(
-                "Valid Search types are: %s" %
-                repr(self.validSearchTypes))
-        while search[0] == "&":
-            search = search[1:]
-        rqst = "%s/?type=%s&%s" % (self.restPath, searchType, search)
-        # print "REQUEST: %s%s" % (self.host,rqst)
-        myhost = str(self.host)
-        myport = str(self.port)
-        if myhost.find("://") > -1:
-            urltype = ""
+            searchType=self.defaultSearchType
+        if not searchType in self.validSearchTypes:
+            raise self.EsgfObjectException("Valid Search types are: %s" % repr(self.validSearchTypes))
+        while search[0]=="&":
+            search=search[1:]
+        rqst = "%s/?type=%s&%s" % (self.restPath,searchType,search)
+        #print "REQUEST: %s%s" % (self.host,rqst)
+        myhost=str(self.host)
+        myport=str(self.port)
+        if myhost.find("://")>-1:
+            urltype=""
         else:
-            urltype = "http://"
+            urltype="http://"
         try:
-            rqst = "%s%s:%s/%s" % (urltype, myhost, myport, rqst)
-            tmp = rqst[6:].replace("//", "/")
-            rqst = rqst[:6] + tmp
-            # print "Request:%s"%rqst
+            rqst="%s%s:%s/%s" % (urltype,myhost,myport,rqst)
+            tmp=rqst[6:].replace("//","/")
+            rqst=rqst[:6]+tmp
+            #print "Request:%s"%rqst
             url = urllib2.urlopen(rqst)
-        except Exception as msg:
-            raise self.EsgfObjectException(msg)
+        except Exception,msg:
+             raise self.EsgfObjectException(msg)
         r = url.read()
         if stringType:
             return r
@@ -211,496 +185,539 @@ class esgfConnection(object):
             try:
                 e = xml.etree.ElementTree.fromstring(r)
                 return e
-            except Exception as err:
-                raise self.EsgfObjectException(
-                    "Could not interpret server's results: %s" %
-                    err)
+            except Exception,err:
+                raise self.EsgfObjectException("Could not interpret server's results: %s" % err)
 
-    def generateRequest(self, stringType=False, **keys):
+    def generateRequest(self,stringType=False,**keys):
         search = ""
-        params = {"limit": self["limit"], "offset": self["offset"]}
+        params={"limit":self["limit"],"offset":self["offset"]}
 
-        # for k in self.keys():
-        # if self[k] is not None and k in self.searchableKeys and k!="type":
-        # params[k]=self[k]
+        ## for k in self.keys():
+        ##     if self[k] is not None and k in self.searchableKeys and k!="type":
+        ##         params[k]=self[k]
 
+        
         for k in keys.keys():
-            if k in ["stringType", "type"]:
+            if k == "stringType":
+                stringType=keys[k]
+                continue
+            elif k == "type":
                 continue
+            ## elif not k in self.searchableKeys:
+            ##     raise self.EsgfObjectException("Invalid key: %s, valid keys are: %s" % (repr(k),repr(self.params.keys())))
             if keys[k] is not None:
-                params[k] = keys[k]
+                params[k]=keys[k]
 
         search = ""
         for k in params.keys():
-            if isinstance(params[k], list):
+            if isinstance(params[k],list):
                 for v in params[k]:
-                    if isinstance(v, str):
-                        v = v.strip()
-                    search += "&%s=%s" % (k, v)
+                    if isinstance(v,str):
+                        v=v.strip()
+                    search+="&%s=%s" % (k,v)
             else:
                 v = params[k]
-                if isinstance(v, str):
-                    v = v.strip()
-                search += "&%s=%s" % (k, v)
+                if isinstance(v,str):
+                    v=v.strip()
+                search+="&%s=%s" % (k,v)
 
-# search = "&".join(map(lambda x : "%s=%s" % (x[0],x[1]), params.items()))
-        search = search.replace(" ", "%20")
+#        search = "&".join(map(lambda x : "%s=%s" % (x[0],x[1]), params.items()))
+        search=search.replace(" ","%20")
         return search
-
-    def request(self, **keys):
-        numFound = 0
+    
+    def request(self,**keys):
+        numFound=0
         cont = True
-        r = []
+        r=[]
         limit = self["limit"]
         while cont:
-            # print "Continuing",limit
-            self["offset"] = numFound
-            if limit is None or limit > 1000:
-                self["limit"] = 1000
+            #print "Continuing",limit
+            self["offset"]=numFound
+            if limit is None or limit>1000:
+                self["limit"]=1000
             search = self.generateRequest(**keys)
-            stringType = keys.get("stringType", False)
-            r.append(self._search(search, stringType=stringType))
-            if numFound == 0:
+            stringType=keys.get("stringType",False)
+            r.append(self._search(search,stringType=stringType))
+            if numFound==0:
                 for s in r[0][:]:
-                    if s.tag == "result":
+                    if s.tag=="result":
                         n = int(s.get("numFound"))
-            numFound += self["limit"]
+            numFound+=self["limit"]
             if limit is None:
-                if numFound >= n:
+                if numFound>=n:
                     cont = False
             else:
-                if numFound >= limit:
-                    cont = False
-            # print "N is:",numFound,n
-        self["limit"] = limit
-        self["offset"] = 0
+                if numFound>=limit:
+                    cont=False
+            #print "N is:",numFound,n
+        self["limit"]=limit
+        self["offset"]=0
         return r
-
-    def extractTag(self, f):
-        out = None
-        if f.tag == "str":
-            out = f.text
-        elif f.tag == "arr":
-            out = []
+    
+    def extractTag(self,f):
+        out=None
+        if f.tag=="str":
+            out=f.text
+        elif f.tag=="arr":
+            out=[]
             for sub in f[:]:
                 out.append(self.extractTag(sub))
-
-        elif f.tag == "float":
+            
+        elif f.tag=="float":
             out = float(f.text)
-        elif f.tag == "int":
+        elif f.tag=="int":
             out = int(f.text)
-        elif f.tag == "date":
-            # Convert to cdtime?
-            out = f.text
+        elif f.tag=="date":
+            ## Convert to cdtime?
+            out =f.text
         else:
-            out = f
-        if isinstance(out, list) and len(out) == 1:
-            out = out[0]
+            out=f
+        if isinstance(out,list) and len(out)==1:
+            out=out[0]
         return out
-
-    def searchDatasets(self, **keys):
+        
+    def searchDatasets(self,**keys):
         resps = self.request(**keys)
-        stringType = keys.get("stringType", False)
+        stringType=keys.get("stringType",False)
         if stringType:
-            return resps
+            return resp
         datasets = []
         for resp in resps:
             for r in resp[:]:
-                if r.tag == "result":
-                    # Ok let's go thru these datasets
+                if r.tag=="result":
+                    ##Ok let's go thru these datasets
                     for d in r[:]:
-                        tmpkeys = {}
+                        #print "************************************************"
+                        tmpkeys={}
                         for f in d[:]:
                             k = f.get("name")
-                            tmpkeys[k] = self.extractTag(f)
-                        if tmpkeys["type"] == "Dataset":
-                            datasets.append(
-                                esgfDataset(host=self.host,
-                                            port=self.port,
-                                            limit=1000,
-                                            offset=0,
-                                            mapping=self.mapping,
-                                            datasetids=self.datasetids,
-                                            fileids=self.fileids,
-                                            keys=tmpkeys,
-                                            originalKeys=keys,
-                                            restPath=self.restPath))
+                            tmpkeys[k]=self.extractTag(f)
+                        if tmpkeys["type"]=="Dataset":
+                            datasetid = tmpkeys["id"]
+                            #print datasetid,self.restPath
+                            #print "KEYS FOR DATASET",keys.keys()
+                            datasets.append(esgfDataset(host=self.host,port=self.port,limit=1000,offset=0,mapping=self.mapping,datasetids=self.datasetids,fileids=self.fileids,keys=tmpkeys,originalKeys=keys,restPath=self.restPath))
         return datasets
 
-
 class esgfDataset(esgfConnection):
-
-    def __init__(self, host=None, port=80, limit=1000, offset=0, mapping=None,
-                 datasetids=None, fileids=None, _http=None, restPath=None, keys={}, originalKeys={}):
+    def __init__(self,host=None,port=80,limit=1000,offset=0,mapping=None,datasetids=None,fileids=None,_http=None,restPath=None,keys={},originalKeys={}):
         if host is None:
             raise esgfDatasetException("You need to pass url")
-        self.host = host
-        # self.host="esg-datanode.jpl.nasa.gov"
-        self.port = port
-        self.defaultSearchType = "File"
+        self.host=host
+        #self.host="esg-datanode.jpl.nasa.gov"
+        self.port=port
+        self.defaultSearchType="File"
         if restPath is None:
-            self.restPath = "/esg-search/search"
+            self.restPath="/esg-search/search"
         else:
-            self.restPath = restPath
+            self.restPath=restPath
         if datasetids is None:
             if "dataset_id_template_" in keys:
-                tmp = keys["dataset_id_template_"]
-                if tmp[:5] == "cmip5":
-                    tmp = tmp.replace("valid_institute", "institute")
-                    tmp = "%(project)" + tmp[5:]
-                self.datasetids = genutil.StringConstructor(
-                    tmp.replace(")s", ")"))
-            elif "project" in keys and keys["project"] == "cmip5":
-                self.datasetids = genutil.StringConstructor(
-                    "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")  # noqa
+                tmp=keys["dataset_id_template_"]
+                if tmp[:5]=="cmip5":
+                    tmp = tmp.replace("valid_institute","institute")
+                    tmp="%(project)"+tmp[5:]
+                self.datasetids = genutil.StringConstructor(tmp.replace(")s",")"))
+            elif "project" in keys and keys["project"]=="cmip5":
+                self.datasetids = genutil.StringConstructor("%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)")
             else:
-                self.datasetids = None
-        if isinstance(datasetids, genutil.StringConstructor):
-            self.datasetids = datasetids
-        elif isinstance(datasetids, str):
-            self.datasetids = genutil.StringConstructor(datasetids)
+                self.datasetids=None
+        if isinstance(datasetids,genutil.StringConstructor):
+            self.datasetids=datasetids
+        elif isinstance(datasetids,str):
+            self.datasetids=genutil.StringConstructor(datasetids)
         if fileids is not None:
-            if isinstance(fileids, genutil.StringConstructor):
-                self.fileids = fileids
+            if isinstance(fileids,genutil.StringConstructor):
+                self.fileids=fileids
             else:
-                self.fileids = genutil.StringConstructor(fileids)
+                self.fileids=genutil.StringConstructor(fileids)
             if self.datasetids is not None:
-                self.fileids.template = self.fileids.template.replace(
-                    "%(datasetid)", self.datasetids.template)
+                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids = genutil.StringConstructor(
-                "%s.%%(filename)" %
-                self.datasetids.template)
+            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
         else:
-            self.fileids = None
-        self.originalKeys = originalKeys
-        self.validSearchTypes = validSearchTypes
-        self.validSearchTypes = ["File", ]
+            self.fileids=None
+        self.originalKeys=originalKeys
+        self.validSearchTypes=validSearchTypes
+        self.validSearchTypes=["File",]
         self.EsgfObjectException = esgfDatasetException
-        self.params = keys
+        self.params=keys
         self.keys = self.params.keys
         self.items = self.params.items
         self.values = self.params.values
-        self.params["limit"] = limit
-        self.params["offset"] = offset
-        self.mapping = mapping
-        self.resp = None
+        #self.id=self["id"]
+        self.params["limit"]=limit
+        self.params["offset"]=offset
+        self.mapping=mapping
+        #print "SEARCHING DS:",originalKeys
+        self.resp=None
         self.cacheTime = None
-
-    def _extractFiles(self, resp, **inKeys):
-        # We need to stick in there the bit from Luca to fill in the matching
-        # key from facet for now it's empty
-        files = []
+#        self.search()
+#        self.remap()
+        
+        ## Ok now we need to "map" this according to the user wishes
+
+           
+
+    ## def mappedItems():
+    ##     mapped=[]
+    ##     mapppoint=self.mapped
+    ##     for k in self.mapping.keys():
+    ##         keys=[]
+    ##         level=[k,mappoint.keys()]
+    ##         mappoint
+    def _extractFiles(self,resp,**inKeys):
+        ## We need to stick in there the bit from Luca to fill in the matching key from facet for now it's empty
+        files=[]
+        skipped = ["type","title","timestamp","service","id","score","file_url","service_type"]
         for r in resp[:]:
-            if r.tag == "result":
+            if r.tag=="result":
                 for d in r[:][:]:
-                    keys = {}
+                    keys={}
                     for f in d[:]:
                         k = f.get("name")
-                        keys[k] = self.extractTag(f)
-                    if keys["type"] == "File":
-                        files.append(esgfFile(**keys))
+                        keys[k]=self.extractTag(f)
+                    if keys["type"]=="File":
+                        ## if self["id"]=="obs4MIPs.NASA-JPL.AIRS.mon":
+                        ##     verbose=True
+                        ## else:
+                        ##     verbose=False
+                        ## #verbose=True
+                        ## if verbose: print "OK",keys["variable"],keys["file_id"],self["id"]
+                        ## if verbose: print "FILEIDS:",self.fileids
+                        ## if verbose: print "Fileids:",self.fileids.template
+                        ## if verbose: print "keys:",keys
+                        ## if self.fileids is not None:
+                        ##     try:
+                        ##         if verbose: print "file:",keys["file_id"],self.fileids.template
+                        ##         k2 = self.fileids.reverse(keys["file_id"])
+                        ##         if verbose: print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@",k2
+                        ##         for k in k2.keys():
+                        ##             keys[k]=k2[k]
+                        ##     except:
+                        ##         if verbose: print "Failed:",ids[i].text,self.fileids.template
+                        ##         pass
+                        ## if verbose: print "KEYS FOR FILE:",keys.keys()
+                        ## if verbose: print "INKEYS:",inKeys.keys()
+                        ## matched = True
+                        ## matchWithKeys = {}
+                        ## for k in self.keys():
+                        ##     if k in self.originalKeys.keys():
+                        ##         matchWithKeys[k]=self.originalKeys[k]
+                        ##     else:
+                        ##         matchWithKeys[k]=self[k]
+                        ## for s in skipped:
+                        ##     try:
+                        ##         matchWithKeys.pop(s)
+                        ##     except:
+                        ##         pass
+                        ## for k in inKeys.keys():
+                        ##     matchWithKeys[k]=inKeys[k]
+                        ## if verbose: print "matching:",matchWithKeys.keys()
+                        ## for k in keys.keys():
+                        ##     if k in matchWithKeys.keys():
+                        ##         if verbose: print "Testing:",k,keys[k]
+                        ##         v = matchWithKeys[k]
+                        ##         if isinstance(v,(str,int,float)):
+                        ##             if verbose: print "\tComparing with:",v
+                        ##             if v != keys[k]:
+                        ##                 matched = False
+                        ##                 if verbose: print "\t\tNOPE"
+                        ##                 break
+                        ##         elif isinstance(v,list):
+                        ##             if verbose: print "\tComparing with (and %i more):%s"%(len(v),v[0]),v
+                        ##             if not keys[k] in v:
+                        ##                 matched = False
+                        ##                 if verbose: print "\t\tNOPE"
+                        ##                 break
+                        ##         else:
+                        ##             print "\twould compare %s with type: %s if I knew how to" % (str(v),type(v))
+                        ## if verbose: print keys["file_id"],matched
+                        ## if matched :
+                        ##     for k in self.keys():
+                        ##         if not k in keys.keys():
+                        ##             keys[k]=self[k]
+                        ##     print "KEYS:",keys
+                            files.append(esgfFile(**keys))
         return files
-
+            
     def info(self):
         print self
 
     def __str__(self):
         st = "Dataset Information\nid: %s\nKeys:\n" % self.id
         for k in self.keys():
-            st += "\t%s : %s\n" % (k, self[k])
+            st+="\t%s : %s\n" % (k,self[k])
         return st
-
+        
     def clearWebCache(self):
         self.resp = None
 
-    def saveCache(self, target="."):
+    def saveCache(self,target="."):
         if self.resp is None:
             return
         if os.path.isdir(target):
-            target = os.path.join(target, "esgfDatasetsCache.pckl")
+            target = os.path.join(target,"esgfDatasetsCache.pckl")
         if os.path.exists(target):
-            f = open(target)
-            # dict=eval(bz2.decompress(f.read()))
-            dict = eval(f.read())
+            f=open(source)
+            #dict=eval(bz2.decompress(f.read()))
+	    dict=eval(f.read())
             f.close()
         else:
-            dict = {}
-        dict[self.id] = [self["timestamp"],
-                         xml.etree.ElementTree.tostring(
-                         self.resp),
-                         self.originalKeys]
-        f = open(target, "w")
-        # f.write(bz2.compress(repr(self.cache)))
+            dict={}
+        dict[self.id]=[self["timestamp"],xml.etree.ElementTree.tostring(self.resp),self.originalKeys]
+        f=open(target,"w")
+        #f.write(bz2.compress(repr(self.cache)))
         f.write(repr(self.cache))
         f.close()
-
-    def loadCache(self, source):
-        if isinstance(source, dict):
-            dict = source
+        
+    def loadCache(self,source):
+        if isinstance(source,dict):
+            dict=source
         else:
             if os.path.isdir(source):
-                source = os.path.join(source, "esgfDatasetsCache.pckl")
+                source = os.path.join(source,"esgfDatasetsCache.pckl")
             if os.path.exists(source):
-                f = open(source)
-                # dict=eval(bz2.decompress(f.read()))
-                dict = eval(f.read())
+                f=open(source)
+                #dict=eval(bz2.decompress(f.read()))
+                dict=eval(f.read())
                 f.close()
             else:
-                dict = {}
-        vals = dict.get(self.id, ["", None, {}])
+                dict={}
+        vals = dict.get(self.id,["",None,{}])
         if vals[1] is not None:
-            self.cacheTime = vals[0]
-            self.resp = xml.etree.ElementTree.fromstring(vals[0])
-            self.originalKeys = vals[1]
-
+            self.cacheTime=vals[0]
+            self.resp=xml.etree.ElementTree.fromstring(vals[0])
+            self.originalKeys=vals[1]
+        
     def clearOriginalQueryCache(self):
-        self.originalKeys = {}
+        self.originalKeys={}
 
     def clear(self):
         self.clearWebCache()
         self.clearOriginalQueryCache()
-
-    def search(self, **keys):
-        # search = self.generateRequest(**keys)
-        stringType = keys.get("stringType", False)
+    
+    def search(self,**keys):
+        #search = self.generateRequest(**keys)
+        stringType=keys.get("stringType",False)
         keys.update(self.originalKeys)
-        st = ""
-        if "limit" not in keys:
-            keys["limit"] = [self["limit"]]
-        if "offset" not in keys:
-            keys["offset"] = [self["offset"]]
+        st=""
+        if not "limit" in keys:
+            keys["limit"]=[self["limit"]]
+        if not "offset" in keys:
+            keys["offset"]=[self["offset"]]
         for k in keys:
-            if k in ["searchString", "stringType", ]:
+            if k in ["searchString","stringType",]:
                 continue
             for v in keys[k]:
-                st += "&%s=%s" % (k, v)
-            # st+="&%s=%s" % (k,keys[k])
-        # if self.resp is None:
-            # self.resp = self._search("dataset_id=%s%s" %
-            # (self["id"],st),stringType=stringType)
-        self.resp = self._search(st, stringType=stringType)
+                st+="&%s=%s"%(k,v)    
+            #st+="&%s=%s" % (k,keys[k])
+        #if self.resp is None:
+            #self.resp = self._search("dataset_id=%s%s" % (self["id"],st),stringType=stringType)
+        self.resp = self._search(st,stringType=stringType)
         if stringType:
             return self.resp
-        return esgfFiles(self._extractFiles(self.resp, **keys), self)
+        return esgfFiles(self._extractFiles(self.resp,**keys),self)
 
 
 class esgfFiles(object):
-
-    def __init__(self, files, parent,
-                 mapping=None, datasetids=None, fileids=None):
-        self._files = files
-        if not isinstance(parent, esgfDataset):
+    def __init__(self,files,parent,mapping=None,datasetids=None,fileids=None):
+        self._files=files
+        if not isinstance(parent,esgfDataset):
             raise esgfFilesException("parent must be an esgfDataset instance")
-        self.parent = parent
+        self.parent=parent
         self.EsgfObjectException = esgfFilesException
         if datasetids is None:
-            datasetids = parent.datasetids
-        if isinstance(datasetids, genutil.StringConstructor):
-            self.datasetids = datasetids
-        elif isinstance(datasetids, str):
-            self.datasetids = genutil.StringConstructor(datasetids)
+            datasetids=parent.datasetids
+        if isinstance(datasetids,genutil.StringConstructor):
+            self.datasetids=datasetids
+        elif isinstance(datasetids,str):
+            self.datasetids=genutil.StringConstructor(datasetids)
         else:
-            self.datasetids = None
+            self.datasetids=None
         if fileids is not None:
-            if isinstance(fileids, genutil.StringConstructor):
-                self.fileids = fileids
+            if isinstance(fileids,genutil.StringConstructor):
+                self.fileids=fileids
             else:
-                self.fileids = genutil.StringConstructor(fileids)
+                self.fileids=genutil.StringConstructor(fileids)
             if self.datasetids is not None:
-                self.fileids.template = self.fileids.template.replace(
-                    "%(datasetid)", self.datasetids.template)
+                self.fileids.template=self.fileids.template.replace("%(datasetid)",self.datasetids.template)
         elif self.datasetids is not None:
-            self.fileids = genutil.StringConstructor(
-                "%s.%%(filename)" %
-                self.datasetids.template)
+            self.fileids=genutil.StringConstructor("%s.%%(filename)" % self.datasetids.template)
         else:
-            self.fileids = parent.fileids
+            self.fileids=parent.fileids
         if mapping is None:
-            mapping = parent.mapping
+            mapping=parent.mapping
         self.setMapping(mapping)
         self.remap()
-        self.projects_dict = {
-            "CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)"  # noqa
-            }
-
-    def __getitem__(self, item):
-        if isinstance(item, int):
+        self.projects_dict = {"CMIP5": "%(project).%(product).%(institute).%(model).%(experiment).%(time_frequency).%(realm).%(cmor_table).%(ensemble)" }
+        
+    def __getitem__(self,item):
+        if isinstance(item,int):
             return self._files[item]
-        elif isinstance(item, str):
+        elif isinstance(item,str):
             for f in self._files:
-                if f["id"] == item:
+                if f["id"]==item:
                     return f
-        elif isinstance(item, slice):
+        elif isinstance(item,slice):
             return self._files[item]
         else:
             raise esgfFilesException("unknown item type: %s" % type(item))
-
-    def __setitem__(self, item):
+    def __setitem__(self,item):
         raise esgfFilesException("You cannot set items")
-
     def __len__(self):
         return len(self._files)
-
     def getMapping(self):
-        if isinstance(self.mapping, genutil.StringConstructor):
+        if isinstance(self.mapping,genutil.StringConstructor):
             return self.mapping.template
         else:
             return self.mapping
-
     def getMappingKeys(self):
-        if isinstance(self.mapping, genutil.StringConstructor):
+        if isinstance(self.mapping,genutil.StringConstructor):
             return self.mapping.keys()
         else:
             return None
-
-    def setMapping(self, mapping):
+    def setMapping(self,mapping):
         if mapping is None:
-            self.mapping = ""
+            self.mapping=""
             if self.datasetids is not None:
-                self.mapping = self.datasetids
+                self.mapping=self.datasetids
             else:
                 for k in self.parent.keys():
-                    if k not in ["limit", "offset", "text"]:
-                        self.mapping += "%%(%s)" % k
+                    if not k in ["limit","offset","text"]:
+                        self.mapping+="%%(%s)" % k
         else:
-            self.mapping = mapping
-        # print "Stage 1 mapping:",self.mapping
-        if not isinstance(self.mapping, genutil.StringConstructor):
+            self.mapping=mapping
+        #print "Stage 1 mapping:",self.mapping
+        if not isinstance(self.mapping,genutil.StringConstructor):
             if self.datasetids is not None:
-                self.mapping = self.mapping.replace(
-                    "%(datasetid)",
-                    self.datasetids.template)
+                self.mapping=self.mapping.replace("%(datasetid)",self.datasetids.template)
             self.mapping = genutil.StringConstructor(self.mapping)
-        # print "Stage 2:",self.mapping.template
-
-        # vk = self.parent.keys()
-        # for k in self.mapping.keys():
-        # ok = False
-        # if self.datasetids is not None:
-        # vk += self.datasetids.keys()
-        # if k in self.datasetids.keys():
-        # ok = True
-        # if self.fileids is not None:
-        # vk+=self.fileids.keys()
-        # if k in self.fileids.keys():
-        # ok = True
-        # if k in self.parent.keys():
-        # ok=True
-        # Ok second to last hope... Matching to datasetids
-        # if isinstance(self.datasetids,genutil.StringConstructor) and ok is False:
-        # try:
-        # mapid = self.datasetids.reverse(self.parent.id)
-        # vk+=mapid.keys()
-        # if k in mapid.keys():
-        # ok = True
-
-        # except:
-        # print "Couldn't map: %s to %s" % (self.parent.id,self.datasetids.template)
-        # pass
-        # if ok is False:
-        # vk = set(vk)
-        # raise self.EsgfObjectException("Invalid mapping key: %s, valid keys
-        # are: %s" % (k,sorted(vk)))
-
-    def remap(self, mapping=None, verbose=False):
+        #print "Stage 2:",self.mapping.template
+
+        ## vk = self.parent.keys()
+        ## for k in self.mapping.keys():
+        ##     ok = False
+        ##     if self.datasetids is not None:
+        ##         vk += self.datasetids.keys()
+        ##         if k in self.datasetids.keys():
+        ##             ok = True
+        ##     if self.fileids is not None:
+        ##         vk+=self.fileids.keys()
+        ##         if k in self.fileids.keys():
+        ##             ok = True
+        ##     if k in self.parent.keys():
+        ##         ok=True
+        ##     ## Ok second to last hope... Matching to datasetids
+        ##     if isinstance(self.datasetids,genutil.StringConstructor) and ok is False:
+        ##         try:
+        ##             mapid = self.datasetids.reverse(self.parent.id)
+        ##             vk+=mapid.keys()
+        ##             if k in mapid.keys():
+        ##                 ok = True
+                        
+        ##         except:
+        ##             #print "Couldn't map: %s to %s" % (self.parent.id,self.datasetids.template)
+        ##             pass
+        ##     if ok is False:
+        ##         vk = set(vk)
+        ##         raise self.EsgfObjectException("Invalid mapping key: %s, valid keys are: %s" % (k,sorted(vk)))
+            
+    def remap(self,mapping=None,verbose=False):
         if mapping is None:
             thismapping = self.mapping
         else:
             thismapping = mapping
-        self.mapped = {}
+        self.mapped={}
         savedmapping = thismapping
-        # print "Remap:",self.mapping.template
-        # if verbose: print "################ REMAPPING: %s: %s
-        # #############################" %
-        # (thismapping.template,repr(thismapping.keys()))
+        #print "Remap:",self.mapping.template
+        ## if verbose: print "################ REMAPPING: %s: %s #############################" % (thismapping.template,repr(thismapping.keys()))
         for f in self._files:
-            mappoint = self.mapped
-            tabs = ""
-            nok = 0
+            mappoint=self.mapped
+            tabs=""
+            nok=0
             nlevels = len(thismapping.keys())
-            # print "This mapping",thismapping.template,nlevels
+            #print "This mapping",thismapping.template,nlevels
             if nlevels == 0:
-                # ok no mapping, let's try to figure this one out
+                ## ok no mapping, let's try to figure this one out
                 if 'dataset_id_template_' in f.keys():
-                    # print "We are good to go"
-                    ds = f['dataset_id_template_'].replace(")s", ")")
+                    #print "We are good to go"
+                    ds = f['dataset_id_template_'].replace(")s",")")
                     thismapping = genutil.StringConstructor(ds)
             for k in thismapping.keys():
-                # if verbose: print tabs,"keys:",k,"File keys:",f.keys()
-                # if k == self.mapping.keys()[0]:
-                # f.matched.keys()
-                # else:
-                # if verbose: print
+                ## if verbose: print tabs,"keys:",k,"File keys:",f.keys()
+                ## if k == self.mapping.keys()[0]:
+                ##     f.matched.keys()
+                ## else:
+                ##     ## if verbose: print
                 if k in f.keys():
-                    # if verbose: print tabs,k,f[k]
-                    nok += 1
+                    ## if verbose: print tabs,k,f[k]
+                    nok+=1
                     cont = f[k]
-                    if not isinstance(cont, (str, int, float)):
+                    if not isinstance(cont,(str,int,float)):
                         break
-                    if cont not in mappoint.keys():
-                        mappoint[cont] = {}
+                    if not cont in mappoint.keys():
+                        mappoint[cont]={}
                 elif k in self.parent.keys():
-                    # if verbose: print tabs,k,f[k]
-                    nok += 1
+                    ## if verbose: print tabs,k,f[k]
+                    nok+=1
                     cont = self[k]
-                    if cont not in mappoint.keys():
-                        mappoint[cont] = {}
-                elif isinstance(self.fileids, genutil.StringConstructor):
+                    if not cont in mappoint.keys():
+                        mappoint[cont]={}
+                elif isinstance(self.fileids,genutil.StringConstructor):
                     try:
                         mapid = self.fileids.reverse(self.parent.id)
+                        ## if verbose:
+                            ## print "MAPID:",k,mapid
                         if k in mapid.keys():
-                            nok += 1
+                            ## if verbose: print tabs,k,mapid[k]
+                            nok+=1
                             cont = mapid[k]
-                            if cont not in mappoint.keys():
-                                mappoint[cont] = {}
+                            if not cont in mappoint.keys():
+                                mappoint[cont]={}
                     except:
                         break
                 else:
                     break
-                mappoint = mappoint[cont]
-                tabs += "\t"
-            tmp = mappoint.get("files", [])
+                mappoint=mappoint[cont]
+                tabs+="\t"
+            tmp = mappoint.get("files",[])
             tmp.append(f)
             mappoint["files"] = tmp
             thismapping = savedmapping
-        # if verbose: print "################ REMAPPED: %s
-        # #############################" % (thismapping,)
-
+        ## if verbose: print "################ REMAPPED: %s #############################" % (thismapping,)
 
 class esgfFile(object):
-
-    def __init__(self, **keys):
-        self.__items__ = keys
+    def __init__(self,**keys):
+        self.__items__=keys
         self.keys = self.__items__.keys
         self.items = self.__items__.items
         self.values = self.__items__.values
 
-        services = []
-        # print "Keys:",self.keys()
-        # print self["url"]
-        S = self["url"]
-        if isinstance(S, str):
-            S = [S, ]
+        services=[]
+        #print "Keys:",self.keys()
+        #print self["url"]
+        S=self["url"]
+        if isinstance(S,str):
+            S=[S,]
         for service in S:
-            url, s2, s1 = service.split("|")
-            setattr(self, s1, url)
+            url,s2,s1 = service.split("|")
+            setattr(self,s1,url)
             services.append(s1)
-        self.services = services
-        self.id = self["id"]
+        self.services=services
+        self.id=self["id"]
 
-    def __getitem__(self, key):
+    def __getitem__(self,key):
         val = self.__items__[key]
         return val
-
-    def __setitem__(self, key, value):
-        self.__items__[key] = value
+    
+    def __setitem__(self,key,value):
+        self.__items__[key]=value
         return
 
     def __str__(self):
-        st = "File Information\nid: %s\nParent Dataset: %s" % (
-            self["id"], self["dataset_id"])
-        st += "Matched keys: %s\n" % (repr(self.__items__))
+        st = "File Information\nid: %s\nParent Dataset: %s" % (self["id"],self["dataset_id"])
+        st+="Matched keys: %s\n" % (repr(self.__items__))
         for service in self.services:
-            st += "service: %s @ %s\n" % (service, getattr(self, service))
+            st+="service: %s @ %s\n" % (service,getattr(self,service))
         return st[:-1]
diff --git a/Packages/cdms2/Lib/selectors.py b/Packages/cdms2/Lib/selectors.py
index 9ec6c6721..b2ee04bc6 100644
--- a/Packages/cdms2/Lib/selectors.py
+++ b/Packages/cdms2/Lib/selectors.py
@@ -1,47 +1,42 @@
 
 """Classes to support easy selection of climate data"""
-from .axis import axisMatches
-from .error import CDMSError
-from .grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
+import string, types, cdtime
+from axis import axisMatches
+from error import CDMSError
+from grid import AbstractRectGrid, defaultRegion, setRegionSpecs, LongitudeType, LatitudeType, TimeType, VerticalType
 
 _debug = 0
-
-
 class SelectorError (CDMSError):
-
     "The exception type for errors in the selector packages"
-
-    def __init__(self, args):
+    def __init__ (self, args):
         self.args = args
 
-
 class Selector:
-
     """Selector class"""
-
-    def __init__(self, *args, **kwargs):
+    def __init__ (self, *args, **kwargs):
         """Positional args are SelectorComponents or Selectors
          Keyword args and their value are passed to kwselect to create
-         selectors. All the selector components are put into the
+         selectors. All the selector components are put into the 
          components list of this Selector, along with all the components
          of any Selector arguments.
         """
         self.__components = []
         self.refine(*args, **kwargs)
         for a in args:
-            if isinstance(a, SelectorComponent):
+            if isinstance(a,SelectorComponent):
                 try:
-                    self.__str__ = a.__str__
+                    self.__str__=a.__str__
                 except:
                     pass
 
-    def components(self):
+
+    def components (self):
         "List of selector components, each an instance of SelectorComponent."
         return self.__components[:]
-
-    def refine(self, *args, **kwargs):
-        """Add components to this selector using the same syntax as the
-         constructor. Ignores non-keyword arguments that are not
+    
+    def refine (self, *args, **kwargs):        
+        """Add components to this selector using the same syntax as the 
+         constructor. Ignores non-keyword arguments that are not 
          SelectorComponents or Selectors.
         """
         for a in args:
@@ -51,27 +46,27 @@ class Selector:
                 self.__components.append(a)
             elif isinstance(a, Selector):
                 for x in a.components():
-                    self.refine(x)
+                    self.refine(x)  
             else:
                 self.refine(positionalComponent(a))
 
         for k, v in kwargs.items():
-            self.refine(kwselect(k, v))
-
-    def __repr__(self):
+               self.refine(kwselect(k, v))
+ 
+    def __repr__ (self):
         s = 'Selector('
         sep = ''
         for c in self.__components:
             s = s + sep + repr(c)
             sep = ', '
         return s + ')'
-
-    def __and__(self, other):
-        """Implements the & operator, which returns
+    
+    def __and__ (self, other):
+        """Implements the & operator, which returns 
            self.clone() refined by other
         """
         if not isinstance(other, Selector):
-            raise SelectorError('Cannot combine Selector with non-selector')
+            raise SelectorError, 'Cannot combine Selector with non-selector'
         s = self.clone()
         s.refine(other)
         return s
@@ -80,7 +75,7 @@ class Selector:
         "Makes a copy of this Selector."
         return Selector(*self.__components)
 
-    def __call__(self, *args, **kwargs):
+    def __call__ (self, *args, **kwargs):
         """Return a new selector consisting of this one refined by the given arguments.
            Arguments are as per the constructor or method refine.
         """
@@ -97,31 +92,31 @@ class Selector:
            Options modify the result of the selection. The options and
            their default values are:
                -- raw = 0: if 1, return an numpy.ma only
-               -- squeeze = 0:  If 1, eliminate any dimensions of length 1
+               -- squeeze = 0:  If 1, eliminate any dimensions of length 1 
                                 from the result.
-               -- order = None: If given, is a string such as
+               -- order = None: If given, is a string such as 
                                 variable.getOrder()
                                 returns. Result is permuted into this order.
-               -- grid = None:  If given, is a grid object; result is
+               -- grid = None:  If given, is a grid object; result is 
                                 regridded onto this grid.
            Each of the components contributes arguments suitable for the
-           subRegion call in class cdms.AbstractVariable. If a component
+           subRegion call in class cdms.AbstractVariable. If a component 
            is to modify the same axis as a previous component, its application
            is postponed. subRegion is called and the result is then fed
-           to each of the components' "post" method. This returns a
+           to each of the components' "post" method. This returns a 
            possibly modified result, which becomes the input to the next
-           component's post method.
+           component's post method. 
 
            This procedure is repeated until no more components are postponed.
            Then the options are applied to the result in the order
-           listed above, and the result is returned.
+           listed above, and the result is returned. 
 
            Execption SelectorError is thrown if the selection is
            impossible.
 
            The result is a TransientVariable and id(variable) <> id(result)
            even if there are no components.
-        """
+        """  
         d = kwargs.copy()
         raw = d.setdefault('raw', 0)
         squeeze = d.setdefault('squeeze', 0)
@@ -130,14 +125,13 @@ class Selector:
         del d['squeeze'], d['grid'], d['order'], d['raw']
         # make the selector
         s = self(*args, **d)
-        return s.unmodified_select(variable,
-                                   squeeze=squeeze,
-                                   order=order,
-                                   grid=grid,
+        return s.unmodified_select(variable, 
+                                   squeeze=squeeze, 
+                                   order=order, 
+                                   grid=grid, 
                                    raw=raw)
 
-    def unmodified_select(
-            self, variable, raw=0, squeeze=0, order=None, grid=None):
+    def unmodified_select(self, variable, raw=0, squeeze=0, order=None, grid=None):
         "Select using this selector without further modification"
         result = variable
         components = self.components()
@@ -149,80 +143,71 @@ class Selector:
             newcomponents = []
             specs = defaultRegion()
             for c in components:
-                if c.specifyGrid(variable, vargrid, specs):  # specs is modified
+                if c.specifyGrid(variable, vargrid, specs): # specs is modified
                     newcomponents.append(c)
             components = newcomponents
             if specs != defaultRegion():
                 vgindices = result.getGridIndices()
                 mask, indexspecs = vargrid.intersect(specs)
                 result = result(**indexspecs)
-                result = result.setMaskFromGridMask(
-                    mask,
-                    vgindices)  # Propagate the grid mask to result
+                result = result.setMaskFromGridMask(mask, vgindices) # Propagate the grid mask to result
 
         # Now select on non-coordinate components.
         while(components):
             axes = result.getAxisList()
-            if _debug:
-                print "Axes:", axes
-            specifications = [':'] * len(axes)
-            confined_by = [None] * len(axes)
-            aux = {}  # for extra state
+            if _debug: print "Axes:", axes
+            specifications = [':']*len(axes)
+            confined_by = [None]*len(axes)
+            aux = {} # for extra state 
             overflow = []
-            if _debug:
-                print "Component list:", components
+            if _debug: print "Component list:", components
             for c in components:
                 if c.specify(result, axes, specifications, confined_by, aux):
-                    if _debug:
-                        print 'Defer ' + repr(c)
+                    if _debug: print 'Defer ' + repr(c)
                     overflow.append(c)
                 elif _debug:
                     print "After applying", c, ":"
-                    print "specifications=", specifications
+                    print  "specifications=", specifications
                     print "Confined_by", confined_by
                     print "aux", aux
                     print "-----------------"
-            if _debug:
+            if _debug: 
                 print 'About to call subRegion:', specifications
             fetched = result.subRegion(*specifications)
             axismap = range(len(axes))
             for c in components:
-                if c in overflow:
-                    continue
-                fetched = c.post(fetched, result, axes, specifications,
+                if c in overflow: continue
+                fetched = c.post(fetched, result, axes, specifications, 
                                  confined_by, aux, axismap)
             if not len(overflow) < len(components):
-                raise SelectorError(
-                    'Internal selector error, infinite loop detected.')
+                raise SelectorError, \
+                  'Internal selector error, infinite loop detected.'
             components = overflow
             result = fetched
 
         if squeeze != 0 or \
            order is not None or \
            grid is not None or \
-           raw != 0 or \
-           result is variable:
-            # result is variable when there are no components, for example.
+           raw !=0 or \
+           result is variable: 
+     # result is variable when there are no components, for example.
             return result.subRegion(squeeze=squeeze, order=order, grid=grid,
                                     raw=raw)
         else:
             return result
-
-
+             
 class SelectorComponent:
-
     """Base class representing selection for a given set of axes.
     """
-
-    def specify(self, slab, axes, specifications, confined_by, aux):
-        """Refine the specification suitable for slab.subRegion
+    def specify (self, slab, axes, specifications, confined_by, aux):
+        """Refine the specification suitable for slab.subRegion 
            Set confined_by to yourself for each axis you confine.
-           If you would normally confine an axis to ':', don't,
+           If you would normally confine an axis to ':', don't, 
            unless you *require* that axis not be confined by other
            components.
-
+           
            Returning:
-              Return 1 if you wish to skip your turn. You'll be called
+              Return 1 if you wish to skip your turn. You'll be called 
               later with the results of the other selectors.
 
               Raise a SelectorError exception if you can't do your job.
@@ -233,10 +218,10 @@ class SelectorComponent:
               Store any info you want in dictionary aux[id(self)]
         """
         return 0
-
+    
     def specifyGrid(self, var, grid, specs):
         """Refine the specification suitable for grid.intersect().
-
+        
         'var' is a variable.
         'grid' is the grid associated with the variable.
         'specs' is the result set of specifications, of the form defined in the grid module.
@@ -250,53 +235,47 @@ class SelectorComponent:
         """
         return 1
 
-    def post(self, fetched, slab, axes,
-             specifications, confined_by, aux, axismap):
+    def post (self, fetched, slab, axes, specifications, confined_by, aux, axismap):
         """Post-process fetched if desired, return new value.
-           Arguments slab, axes, specifications, confined_by, and aux are
-           pre-subRegion call.
-
-           axismap gives the indices of fetched's axes in axes and should
+           Arguments slab, axes, specifications, confined_by, and aux are 
+           pre-subRegion call. 
+           
+           axismap gives the indices of fetched's axes in axes and should 
            be modified as required by this method.  Set axismap[i] to None to
            indicate that you have eliminated an axis.
         """
         return fetched
-
-
+    
 class axisComponent (SelectorComponent):
-
     "A SelectorComponent that confines exactly one axis or coordinate dimension (e.g. latitude)."
-
-    def __init__(self, id, spec):
+    def __init__ (self, id, spec):
         self.id = id
         self.spec = spec
 
-    def specify(self, slab, axes, specifications, confined_by, aux):
+    def specify (self, slab, axes, specifications, confined_by, aux):
         "Do specification for axis self.id; skip if axis not present."
         for i in range(len(axes)):
             if axisMatches(axes[i], self.id):
-                if confined_by[i] is None:
-                    specifications[i] = self.spec
-                    confined_by[i] = self
-                    return 0
-                else:
-                    return 1
+               if confined_by[i] is None:
+                   specifications[i] = self.spec
+                   confined_by[i] = self
+                   return 0
+               else:
+                   return 1
         return 0
 
-    def __repr__(self):
-        s = repr(self.__class__) + \
-            '("' + self.id + '", ' + repr(self.spec) + ')'
+    def __repr__ (self):
+        s = repr(self.__class__)+'("'+self.id+'", '+repr(self.spec) + ')'
         return s
-
-
+        
 class coordinateComponent(axisComponent):
-
     "A SelectorComponent that confines exactly one coordinate dimension (e.g., latitude)"
 
     def __init__(self, id, spec):
         axisComponent.__init__(self, id, spec)
 
     def specifyGrid(self, var, grid, specs):
+
         "Determine if this component confines the grid, and if so set the specs and return 1"
         if grid.hasCoordType(self.id):
             setRegionSpecs(grid, self.spec, self.id, specs)
@@ -304,52 +283,42 @@ class coordinateComponent(axisComponent):
         else:
             return 1
 
-
 class requiredComponent (SelectorComponent):
-
     """Checks to see that a specific id axis must be present."""
-
-    def __init__(self, ids):
+    def __init__ (self, ids):
         """Checks to see that a specific axis or axes must be present.
            Initialize with a sequence of ids.
         """
         self.ids = ids
-
-    def specify(self, slab, axes, specifications, confined_by, aux):
+        
+    def specify (self, slab, axes, specifications, confined_by, aux):
         """Doesn't confine but checks for existance."""
         for id in self.ids:
             for i in range(len(axes)):
                 if axisMatches(axes[i], id):
                     break
             else:
-                raise SelectorError(
-                    'Required axis %s not present in this variable.' %
-                    (id,))
+                raise SelectorError, \
+                      'Required axis %s not present in this variable.' % (id,)
         return 0
 
-
 class indexComponent (axisComponent):
-
-    """An axisComponent that confines exactly one axis by
-       specifying indices.
+    """An axisComponent that confines exactly one axis by 
+       specifying indices. 
     """
-
-    def __init__(self, id, start=None, stop=None, stride=None):
+    def __init__ (self, id, start=None, stop=None, stride=None):
         self.id = id
-        self.spec = slice(start, stop, stride)
-
+        self.spec = slice(start,stop, stride)
 
 class indexedComponent (SelectorComponent):
-
-    """A SelectorComponent that confines exactly one axis
-       whose index is given.
+    """A SelectorComponent that confines exactly one axis  
+       whose index is given. 
     """
-
-    def __init__(self, index, value):
+    def __init__ (self, index, value):
         self.index = index
         self.spec = value
 
-    def specify(self, slab, axes, specifications, confined_by, aux):
+    def specify (self, slab, axes, specifications, confined_by, aux):
         "Do the specification for axis whose index is self.index."
         i = self.index
         if confined_by[i] is None:
@@ -359,17 +328,15 @@ class indexedComponent (SelectorComponent):
         else:
             return 1
 
-
 class positionalComponent (SelectorComponent):
-
     """A SelectorComponent that confines the next axis available.
     """
-
-    def __init__(self, v):
+    def __init__ (self, v):
         self.v = v
 
-    def specify(self, slab, axes, specifications, confined_by, aux):
+    def specify (self, slab, axes, specifications, confined_by, aux):
         "Find the next unconfined axis and confine it."
+        n = 0
         for i in range(len(axes)):
             if confined_by[i] is None:
                 specifications[i] = self.v
@@ -377,74 +344,65 @@ class positionalComponent (SelectorComponent):
                 aux[id(self)] = i
                 return 0
         else:
-            raise SelectorError('positional component cannot be applied, insufficent rank:' +
-                                repr(self))
+            raise SelectorError, \
+            'positional component cannot be applied, insufficent rank:' +\
+             repr(self)
 
-    def __repr__(self):
+    def __repr__ (self):
         s = repr(self.__class__) + '(' + repr(self.v) + ')'
         return s
-
-
-def longitude(*value):
+    
+def longitude (*value):
     "Creates default selector corresponding to keyword longitude = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':':
-        return all
+    if value == ':': return all
     return Selector(coordinateComponent(LongitudeType, value))
-
-
-def latitude(*value):
+    
+def latitude (*value):
     "Creates default selector corresponding to keyword latitude = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':':
-        return all
+    if value == ':': return all
     return Selector(coordinateComponent(LatitudeType, value))
-
-
-def time(*value):
+    
+def time (*value):
     """Creates a default selector corresponding to keyword time=value
     """
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':':
-        return all
+    if value == ':': return all
     return Selector(coordinateComponent(TimeType, value))
 
-
-def level(*value):
+def level (*value):
     "Creates default selector corresponding to keyword level = value"
     if not value:
         return all
     if len(value) == 1:
         value = value[0]
-    if value == ':':
-        return all
+    if value == ':': return all
     return Selector(coordinateComponent(VerticalType, value))
 
-
 def required(values):
     """Creates a selector that requires a certain axis to be present."""
     if values is None:
         return all
-    if isinstance(values, basestring):
+    if isinstance(values, types.StringType):
         values = (values,)
     return Selector(requiredComponent(values))
 
-
-def kwselect(k, value):
+def kwselect (k, value):
     """Turn a keyword/value pair into a SelectorComponent
        The words latitude, longitude, time, and level are
        used to pass value to the routine of the same name.
        Otherise, axis is called using k as the id.
-    """
+    """ 
     kx = k[0:3].lower()
     if kx == 'lat':
         return latitude(value)
@@ -458,25 +416,17 @@ def kwselect(k, value):
         return required(value)
     else:
         return Selector(requiredComponent((k,)), axisComponent(k, value))
-
+    
 all = Selector()
 
-
-def timeslice(start=None, stop=None, stride=None):
+def timeslice (start=None,stop=None,stride=None):
     return Selector(indexComponent('time', start, stop, stride))
-
-
-def latitudeslice(start=None, stop=None, stride=None):
+def latitudeslice (start=None,stop=None,stride=None):
     return Selector(indexComponent('latitude', start, stop, stride))
-
-
-def longitudeslice(start=None, stop=None, stride=None):
+def longitudeslice (start=None,stop=None,stride=None):
     return Selector(indexComponent('longitude', start, stop, stride))
-
-
-def levelslice(start=None, stop=None, stride=None):
+def levelslice (start=None,stop=None,stride=None):
     return Selector(indexComponent('level', start, stop, stride))
-
-
-def setslice(id, start=None, stop=None, stride=None):
+def setslice (id, start=None,stop=None,stride=None):
     return Selector(indexComponent(id, start, stop, stride))
+
diff --git a/Packages/cdms2/Lib/slabinterface.py b/Packages/cdms2/Lib/slabinterface.py
index a0fc8910e..e08b49ea5 100644
--- a/Packages/cdms2/Lib/slabinterface.py
+++ b/Packages/cdms2/Lib/slabinterface.py
@@ -1,67 +1,64 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
-# Further modified to be pure new numpy June 24th 2008
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
+## Further modified to be pure new numpy June 24th 2008
 
 "Read part of the old cu slab interface implemented over CDMS"
 import numpy
-import sys
-from .error import CDMSError
-from .axis import std_axis_attributes
+import string, types, sys
+from error import CDMSError
+from axis import std_axis_attributes
 import cdms2 as cdms
 
-
 class Slab:
-
     """Slab is the cu api
        This is an abstract class to inherit in AbstractVariable
        About axes:
        weight and bounds attributes always set but may be None
-       if bounds are None, getdimattribute returns result of querying the
+       if bounds are None, getdimattribute returns result of querying the 
        axis.
     """
     std_slab_atts = ['filename',
-                     'missing_value',
-                     'comments',
-                     'grid_name',
-                     'grid_type',
-                     'time_statistic',
-                     'long_name',
-                     'units']
-
-    def __init__(self):
+                'missing_value',
+                'comments',
+                'grid_name',
+                'grid_type',
+                'time_statistic',
+                'long_name',
+                'units']
+    def __init__ (self):
         pass
 
-    def getattribute(self, name):
+    def getattribute (self, name):
         "Get the attribute name."
-        defaultdict = {'filename': 'N/A',
-                       'comments': '',
-                       'grid_name': 'N/A',
-                       'grid_type': 'N/A',
-                       'time_statistic': '',
-                       'long_name': '',
-                       'units': ''}
+        defaultdict = {'filename':'N/A',
+                       'comments':'',
+                       'grid_name':'N/A',
+                       'grid_type':'N/A',
+                       'time_statistic':'',
+                       'long_name':'',
+                       'units':''}
         result = None
-        if name in defaultdict.keys() and not hasattr(self, name):
-            if name == 'filename':
-                if (not hasattr(self, 'parent')) or self.parent is None:
+        if name in defaultdict.keys() and not hasattr(self,name):
+            if name=='filename':
+                if (not hasattr(self,'parent')) or self.parent is None:
                     result = ''
                 else:
                     result = self.parent.id
 
-            elif name == 'grid_name':
+            elif name=='grid_name':
                 grid = self.getGrid()
                 if grid is None:
                     result = defaultdict[name]
                 else:
                     result = grid.id
-            elif name == 'grid_type':
+            elif name=='grid_type':
                 grid = self.getGrid()
                 if grid is None:
                     result = defaultdict[name]
-                elif isinstance(grid, cdms.grid.TransientRectGrid):
+                elif isinstance(grid,cdms.grid.TransientRectGrid):
                     result = grid.getType()
-                elif isinstance(grid, cdms.gengrid.AbstractGenericGrid):
+                elif isinstance(grid,cdms.gengrid.AbstractGenericGrid):
                     result = 'GenericGrid'
-                elif isinstance(grid, cdms.hgrid.AbstractCurveGrid):
+                elif isinstance(grid,cdms.hgrid.AbstractCurveGrid):
                     result = 'CurvilinearGrid'
             else:
                 result = defaultdict[name]
@@ -70,23 +67,23 @@ class Slab:
                 result = getattr(self, name)
             except AttributeError:
                 result = None
-
+            
         return result
 
-    def setattribute(self, name, value):
+    def setattribute (self, name, value):
         "Set the attribute name to value."
         setattr(self, name, value)
 
-    def createattribute(self, name, value):
+    def createattribute (self, name, value):
         "Create an attribute and set its name to value."
         setattr(self, name, value)
 
-    def deleteattribute(self, name):
+    def deleteattribute (self, name):
         "Delete the named attribute."
         if hasattr(self, name):
             delattr(self, name)
 
-    def listattributes(self):
+    def listattributes (self):
         "Return a list of attribute names."
         return self.attributes.keys()
 
@@ -95,11 +92,10 @@ class Slab:
         a = self.getAxis(dim)
         result = []
         for x in std_axis_attributes + a.attributes.keys():
-            if x not in result:
-                result.append(x)
+            if not x in result: result.append(x)
         return result
 
-    def getdimattribute(self, dim, field):
+    def getdimattribute (self, dim, field):
         """Get the attribute named field from the dim'th dimension.
          For bounds returns the old cu one-dimensional version.
         """
@@ -125,37 +121,38 @@ class Slab:
                 return g.getWeights()[0]
             elif d.isLongitude():
                 return g.getWeights()[1]
-            else:  # should be impossible, actually
+            else: #should be impossible, actually
                 return numpy.ones(len(d))
 
         elif field == "bounds":
             b = d.getBounds()
             n = b.shape[0]
-            result = numpy.zeros(n + 1, b.dtype.char)
-            result[0:-1] = b[:, 0]
-            result[-1] = b[-1, 1]
+            result = numpy.zeros(n+1, b.dtype.char)
+            result[0:-1] = b[:,0]
+            result[-1] = b[-1,1]
             return result
-        elif field in d.attributes:
+        elif d.attributes.has_key(field):
             return d.attributes[field]
         else:
-            raise CDMSError("No %s attribute on given axis." % field)
-
+            raise CDMSError, "No %s attribute on given axis." % field
+            
+          
     def showdim(self):
-        "Show the dimension attributes and values."
+        "Show the dimension attributes and values." 
         result = []
         for nd in range(self.rank()):
-            result.append('** Dimension ' + str(nd + 1) + ' **')
+            result.append('** Dimension ' + str(nd+1) + ' **')
             result = result + self.getAxis(nd).listall(1)
-        print '\n'.join(result)
+        print string.join(result, '\n')
 
     def listdimnames(self):
         "Return a list of the names of the dimensions."
-        result = []
+        result=[]
         for nd in range(self.rank()):
             result.append(self.getdimattribute(nd, 'name'))
         return result
 
-    def listall(self, all=None):
+    def listall (self, all=None):
         "Get list of info about this slab."
         vname = self.id
         result = []
@@ -165,10 +162,8 @@ class Slab:
         for x in Slab.std_slab_atts:
             result.append(x + ": " + str(self.getattribute(x)))
         for x in self.attributes.keys():
-            if x in Slab.std_slab_atts:
-                continue
-            if x == 'name':
-                continue
+            if x in Slab.std_slab_atts: continue
+            if x == 'name': continue
             result.append(x + ": " + str(self.attributes[x]))
         g = self.getGrid()
         if g is None:
@@ -176,29 +171,29 @@ class Slab:
         else:
             result = result + g.listall(all)
         for nd in range(self.rank()):
-            result.append('** Dimension ' + str(nd + 1) + ' **')
+            result.append('** Dimension ' + str(nd+1) + ' **')
             result = result + self.getAxis(nd).listall(all)
         result.append('*** End of description for %s ***' % vname)
         return result
 
     def info(self, flag=None, device=None):
         "Write info about slab; include dimension values and weights if flag"
-        if device is None:
-            device = sys.stdout
-        device.write('\n'.join(self.listall(all=flag)))
+        if device is None: device = sys.stdout
+        device.write(string.join(self.listall(all=flag), "\n"))
         device.write("\n")
 
-
-def cdms_bounds2cu_bounds(b):
+def cdms_bounds2cu_bounds (b):
     "Bounds are  len(v) by 2 in cdms but len(v)+1 in cu"
-    cub = numpy.ma.zeros(len(b) + 1, numpy.float32)
-    if len(b) > 1:
-        if (b[0, 0] < b[0, 1]) == (b[0, 0] < b[-1, 0]):
-            cub[0] = b[0, 0]
-            cub[1:] = b[:, 1]
+    cub = numpy.ma.zeros(len(b)+1, numpy.float32)
+    b1 = b.astype(numpy.float32)
+    if len(b)>1:
+        if (b[0,0]<b[0,1]) == (b[0,0]<b[-1,0]):
+            cub[0] = b[0,0]
+            cub[1:] = b[:,1]
         else:
-            cub[0] = b[0, 1]
-            cub[1:] = b[:, 0]
+            cub[0] = b[0,1]
+            cub[1:] = b[:,0]
     else:
         cub[:] = b[0]
-    return numpy.array(cub)
+    return numpy.array( cub )
+    
diff --git a/Packages/cdms2/Lib/sliceut.py b/Packages/cdms2/Lib/sliceut.py
index 4b8b55821..e69e83262 100644
--- a/Packages/cdms2/Lib/sliceut.py
+++ b/Packages/cdms2/Lib/sliceut.py
@@ -6,41 +6,41 @@
 # Returns a slice, or None if the intersection is empty.
 
 
-def sliceIntersect(aSlice, interval):
-    p0, p1 = interval
+def sliceIntersect(aSlice,interval):
+    p0,p1 = interval
     i = aSlice.start
     j = aSlice.stop
     k = aSlice.step
     if k is None:
-        k = 1
+        k=1
 
     # If the slice has a negative step, generate the
     # equivalent slice with positive step
-    irev = 0
-    if k < 0:
+    irev=0
+    if k<0:
         k = -k
-        pk = ((j - i + k) / k) * k + i
-        j = i + 1
+        pk = ((j-i+k)/k)*k+i
+        j = i+1
         i = pk
         irev = 1
 
     # Calculate the intersection for an increasing slice
-    px = ((p0 - i + k - 1) / k) * k + i
-    a = max(px, i)
-    b = min(j, p1)
-    if a < b:
-        if k == 1:
-            newSlice = slice(a, b)
+    px = ((p0-i+k-1)/k)*k+i
+    a = max(px,i)
+    b = min(j,p1)
+    if a<b:
+        if k==1:
+            newSlice = slice(a,b)
         else:
-            newSlice = slice(a, b, k)
+            newSlice = slice(a,b,k)
     else:
         newSlice = None
 
     # Reverse the slice if necessary
-    if irev == 1 and newSlice is not None:
-        px = -((-b + a + k) / k * k - a)
-        newSlice = slice(px, a - 1, -k)
-
+    if irev==1 and newSlice is not None:
+        px = -((-b+a+k)/k*k-a)
+        newSlice = slice(px,a-1,-k)
+    
     return newSlice
 
 # Intersect a slice with a partition. The partition is a list of
@@ -50,23 +50,21 @@ def sliceIntersect(aSlice, interval):
 # in the same order as in the partition. If the intersection is empty,
 # the result is an empty list.
 
-
-def slicePartition(aSlice, partition):
+def slicePartition(aSlice,partition):
     result = []
     for interval in partition:
-        intslice = sliceIntersect(aSlice, interval)
+        intslice = sliceIntersect(aSlice,interval)
         if intslice is not None:
-            result.append((interval, intslice))
+            result.append((interval,intslice))
     return result
 
-
 def lenSlice(aSlice):
     "Return the number of values associated with a slice"
 
     step = aSlice.step
     if step is None:
         step = 1
-    if step > 0:
+    if step>0:
         start = aSlice.start
         stop = aSlice.stop
     else:
@@ -74,131 +72,121 @@ def lenSlice(aSlice):
         stop = aSlice.start
         step = -step
 
-    return ((stop - start - 1) / step + 1)
-
+    return ((stop-start-1)/step + 1)
 
-def reverseSlice(s, size):
+def reverseSlice(s,size):
     """For 'reversed' slices (slices with negative stride),
     return an equivalent slice with positive step. For positive
     strides, just return the slice unchanged.
     """
-    if s.step > 0 or s.step is None:
+    if s.step>0 or s.step is None:
         return s
 
     i = s.start
     j = s.stop
     k = s.step
     if i is None:
-        i = size - 1
-    elif i < 0:
-        i = i % size
+        i=size-1
+    elif i<0:
+        i = i%size
     if j is None:
-        j = -1
-    elif -size - 1 < j < 0:
-        j = j % size
-
-    if i < -size or j < -size - 1:
-        raise 'Invalid slice', repr(s)
+        j=-1
+    elif -size-1<j<0:
+        j = j%size
+    
+    if i<-size or j<-size-1:
+        raise 'Invalid slice',`s`
 
     k = -k
-    pk = ((j - i + k) / k) * k + i
-    j = i + 1
-    i = pk % size
+    pk = ((j-i+k)/k)*k+i
+    j = i+1
+    i = pk%size
 
-# if j==size:
-# j = None
+##     if j==size:
+##         j = None
 
-    return slice(i, j, k)
+    return slice(i,j,k)
 
 
-def splitSlice(s, size):
+def splitSlice(s,size):
     """For a 'wraparound' slice, return two equivalent slices
     within the range 0..size-1."""
-    i, j, k = s.start, s.stop, s.step
-    if k > 0:
-        wrap1 = slice(i, size, k)
-        wrap2 = slice((i - size) % k, j - size, k)
+    i,j,k = s.start,s.stop,s.step
+    if k>0:
+        wrap1 = slice(i,size,k)
+        wrap2 = slice((i-size)%k, j-size, k)
     else:
-        wrap1 = slice(i - size, None, k)
-        wrap2 = slice(size + (i - size) % k, j, k)
-    return (wrap1, wrap2)
+        wrap1 = slice(i-size, None, k)
+        wrap2 = slice(size+(i-size)%k, j, k)
+    return (wrap1,wrap2)
 
 
-def splitSliceExt(s, size):
+def splitSliceExt(s,size):
     """
     mf 20010330 --
     For a 'wraparound' slice, return N equivalent slices
     within the range 0...(N*size) N = anything"""
-    i, j, k = s.start, s.stop, s.step
+    i,j,k = s.start,s.stop,s.step
 
     # slice of form [i:] sets j to large int
-    if j > 2000000000:
+    if j>2000000000L:
         j = size
 
-    _debug = 0
-    if(_debug):
-        print "SSSS0: ", i, j, k
-
-    wrap = []
-
-    if k > 0:
-
-        iter = 0
-        if(_debug):
-            print "SSSS1: iter ", iter, j, size, k
-        while(j > 0):
-            if(_debug):
-                print " "
-            if(_debug):
-                print "SSSS2: iter", iter, j, size, k
-            jo = size
-            if(iter > 0):
-                jo = size + 1
-            if(_debug):
-                print "SSSS3: iter", iter, j, jo
-            if(j < size):
-                jo = j
-            if(_debug):
-                print "SSSS4: iter", iter, j, jo
-            wrap.append(slice(i, jo, k))
-            j = j - size
-            i = 0
-            iter = iter + 1
-
+    _debug=0
+    if(_debug): print "SSSS0: ",i,j,k
+
+    wrap=[]
+
+    if k>0:
+
+        iter=0
+        if(_debug): print "SSSS1: iter ",iter,j,size,k
+        while(j>0):
+            if(_debug): print " "
+            if(_debug): print "SSSS2: iter",iter,j,size,k
+            jo=size
+            if(iter>0): jo=size+1
+            if(_debug): print "SSSS3: iter",iter,j,jo
+            if(j<size): jo=j
+            if(_debug): print "SSSS4: iter",iter,j,jo
+            wrap.append(slice(i,jo,k))
+            j=j-size
+            i=0
+            iter=iter+1
+            
     else:
 
-        wraprev = []
-        iter = 0
-        if(_debug):
-            print "SSSS1 neg: iter ", iter, i, j, size, k
-        while(i >= 0):
-            if(_debug):
-                print " "
-            if(_debug):
-                print "SSSS2 neg: iter", iter, i, j, size, k
-            io = size - 1
-            if(_debug):
-                print "SSSS3 neg: iter", iter, i, j, io
-            if(i < size):
-                io = i
-            if(_debug):
-                print "SSSS4 neg: iter", iter, i, j, io
-
-            # mf 20010405 python does not return nothing for
-            # slice(size-1,size-1,-1); force it
-            if(not (io == size - 1 and j == size - 1)):
-                wraprev.append(slice(io, j, k))
-
-            i = i - size
-            j = None
-            iter = iter + 1
+        wraprev=[]
+        iter=0
+        if(_debug): print "SSSS1 neg: iter ",iter,i,j,size,k
+        while(i>=0):
+            if(_debug): print " "
+            if(_debug): print "SSSS2 neg: iter",iter,i,j,size,k
+            io=size-1
+            if(_debug): print "SSSS3 neg: iter",iter,i,j,io
+            if(i<size): io=i
+            if(_debug): print "SSSS4 neg: iter",iter,i,j,io
+            
+            # mf 20010405 python does not return nothing for slice(size-1,size-1,-1); force it
+            if( not ( io==size-1 and j==size-1 ) ):
+                wraprev.append(slice(io,j,k))
+            
+            i=i-size
+            j=None
+            iter=iter+1
         #
         # reverse
         #
-        for k in range(0, len(wraprev)):
-            kk = len(wraprev) - k - 1
+        for k in range(0,len(wraprev)):
+            kk=len(wraprev)-k-1
             wrap.append(wraprev[kk])
-            if(_debug):
-                print "SSSS5 neg: ", kk, wraprev[kk]
+            if(_debug): print "SSSS5 neg: ",kk,wraprev[kk]
 
     return (wrap)
+
+
+
+
+
+
+
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index c78ab81ca..152875adf 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -1,4 +1,4 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
 # Further modified to be pure new numpy June 24th 2008
 
 """
@@ -8,18 +8,19 @@ Contains also the write part of the old cu interface.
 """
 import json
 import re
-from . import typeconv
+import types
+import typeconv
 import numpy
 from numpy import sctype2char
-from .error import CDMSError
-from .avariable import AbstractVariable
+from error import CDMSError
+from avariable import AbstractVariable
 
-from .axis import createAxis, AbstractAxis
-from .grid import createRectGrid, AbstractRectGrid
-from .hgrid import AbstractCurveGrid
-from .gengrid import AbstractGenericGrid
+from axis import createAxis, AbstractAxis
+from grid import createRectGrid, AbstractRectGrid
+from hgrid import AbstractCurveGrid
+from gengrid import AbstractGenericGrid
 
-# dist array support
+# dist array support 
 HAVE_MPI = False
 try:
     from mpi4py import MPI
@@ -30,43 +31,38 @@ except:
 
 id_builtin = id  # built_in gets clobbered by keyword
 
-
 def fromJSON(jsn):
     """ Recreate a TV from a dumped jsn object"""
     D = json.loads(jsn)
 
-    # First recreates the axes
-    axes = []
+    ## First recreates the axes
+    axes=[]
     for a in D["_axes"]:
-        ax = createAxis(
-            numpy.array(a["_values"],
-                        dtype=a["_dtype"]),
-            id=a["id"])
-        for k, v in a.iteritems():
-            if k not in ["_values", "id", "_dtype"]:
-                setattr(ax, k, v)
+        ax = createAxis(numpy.array(a["_values"],dtype=a["_dtype"]),id=a["id"])
+        for k,v in a.iteritems():
+            if not k in ["_values","id","_dtype"]:
+                setattr(ax,k,v)
         axes.append(ax)
-    # Now prep the variable
-    V = createVariable(D["_values"], id=D["id"], typecode=D["_dtype"])
+    ## Now prep the variable
+    V= createVariable(D["_values"],id=D["id"],typecode=D["_dtype"])
     V.setAxisList(axes)
-    for k, v in D.iteritems():
-        if k not in ["id", "_values", "_axes", "_grid", "_fill_value", "_dtype", ]:
-            setattr(V, k, v)
+    for k,v in D.iteritems():
+        if not k in ["id","_values","_axes","_grid","_fill_value","_dtype",]:
+            setattr(V,k,v)
     V.set_fill_value(D["_fill_value"])
     return V
 
 
-class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
-
+class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
     "An in-memory variable."
     variable_count = 0
     _missing = numpy.ma.MaskedArray.fill_value
 
+
     def _getShape(self):
         return self._data.shape
 
-    shape = property(_getShape, None)
-
+    shape = property(_getShape,None)
     def iscontiguous(self):
         return self.flags['CONTIGUOUS']
 
@@ -75,82 +71,79 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         out = numpy.ascontiguousarray(d)
         m = numpy.ma.getmask(self)
         if m is not numpy.ma.nomask:
-            m = numpy.ascontiguousarray(m)
-        out = TransientVariable(out, mask=m, attributes=self.attributes)
+            m= numpy.ascontiguousarray(m)
+        out = TransientVariable(out,mask=m,attributes=self.attributes)
         out.setAxisList(self.getAxisList())
         out.setMissing(self.getMissing())
         return out
-
+    
     ascontiguous = ascontiguousarray
-
+    
     def asma(self):
-        return numpy.ma.array(self._data, mask=self._mask)
-
-    def _update_from(self, obj):
-        numpy.ma.MaskedArray._update_from(self, obj)
-        if not hasattr(self, '___cdms_internals__'):
-            self.__dict__[
-                '___cdms_internals__'] = [
-                    '__cdms_internals__',
-                    '___cdms_internals__',
-                    '_node_',
-                    'parent',
-                    'attributes',
-                    'shape']
-        if not hasattr(self, 'attributes'):
-            self.attributes = {}
-        self._grid_ = getattr(obj, '_grid_', None)
+        return numpy.ma.array(self._data,mask=self._mask)
+    
+    def _update_from(self,obj):
+        numpy.ma.MaskedArray._update_from(self,obj)
+        if not hasattr(self,'___cdms_internals__'):
+            self.__dict__['___cdms_internals__']=['__cdms_internals__','___cdms_internals__','_node_','parent','attributes','shape']
+        if not hasattr(self,'attributes'):
+            self.attributes={}
+        self._grid_ = getattr(obj,'_grid_',None)
         try:
-            for nm, val in obj.__dict__.items():
-                if nm[0] == '_':
+            for nm,val in obj.__dict__.items():
+                if nm[0]=='_':
+##                     print nm
                     pass
+##                     self.__dict__[nm]=val
                 else:
-                    setattr(self, nm, val)
-        except Exception:
+                    setattr(self,nm,val)
+        except Exception,err:
             pass
-        id = getattr(self, 'id', None)
+        id = getattr(self,'id',None)
         if id is None:
-            TransientVariable.variable_count += 1
-            id = 'variable_' + str(TransientVariable.variable_count)
-            self.id = id
-        self.name = getattr(obj, 'name', id)
-        if not hasattr(self, '__domain'):
+            TransientVariable.variable_count+=1
+            id = 'variable_'+str(TransientVariable.variable_count)
+            self.id=id
+        self.name = getattr(obj,'name',id)
+        if not hasattr(self,'__domain'):
             self.initDomain(axes=None)
 
-    def __array_finalize__(self, obj):
-        numpy.ma.MaskedArray.__array_finalize__(self, obj)
+
+    def __array_finalize__(self,obj):
+        numpy.ma.MaskedArray.__array_finalize__(self,obj)
         return
 
-    __mul__ = AbstractVariable.__mul__
-    __rmul__ = AbstractVariable.__rmul__
-    __imul__ = AbstractVariable.__imul__
-    __abs__ = AbstractVariable.__abs__
-    __neg__ = AbstractVariable.__neg__
-    __add__ = AbstractVariable.__add__
-    __iadd__ = AbstractVariable.__iadd__
-    __radd__ = AbstractVariable.__radd__
+    
+    __mul__    = AbstractVariable.__mul__
+    __rmul__   = AbstractVariable.__rmul__
+    __imul__   = AbstractVariable.__imul__
+    __abs__    = AbstractVariable.__abs__
+    __neg__    = AbstractVariable.__neg__
+    __add__    = AbstractVariable.__add__
+    __iadd__   = AbstractVariable.__iadd__
+    __radd__   = AbstractVariable.__radd__
     __lshift__ = AbstractVariable.__lshift__
     __rshift__ = AbstractVariable.__rshift__
-    __sub__ = AbstractVariable.__sub__
-    __rsub__ = AbstractVariable.__rsub__
-    __isub__ = AbstractVariable.__isub__
-    __div__ = AbstractVariable.__div__
-    __rdiv__ = AbstractVariable.__rdiv__
-    __idiv__ = AbstractVariable.__idiv__
-    __pow__ = AbstractVariable.__pow__
-    __eq__ = AbstractVariable.__eq__
-    __ne__ = AbstractVariable.__ne__
-    __lt__ = AbstractVariable.__lt__
-    __le__ = AbstractVariable.__le__
-    __gt__ = AbstractVariable.__gt__
-    __ge__ = AbstractVariable.__ge__
-    __sqrt__ = AbstractVariable.__sqrt__
-
-    def __init__(self, data, typecode=None, copy=1, savespace=0,
+    __sub__    = AbstractVariable.__sub__
+    __rsub__   = AbstractVariable.__rsub__    
+    __isub__   = AbstractVariable.__isub__    
+    __div__    = AbstractVariable.__div__
+    __rdiv__   = AbstractVariable.__rdiv__
+    __idiv__   = AbstractVariable.__idiv__
+    __pow__   = AbstractVariable.__pow__
+    __eq__    = AbstractVariable.__eq__
+    __ne__    = AbstractVariable.__ne__
+    __lt__    = AbstractVariable.__lt__
+    __le__    = AbstractVariable.__le__
+    __gt__    = AbstractVariable.__gt__
+    __ge__    = AbstractVariable.__ge__
+    __sqrt__    = AbstractVariable.__sqrt__
+
+    def __init__(self,data, typecode=None, copy=1, savespace=0, 
                  mask=numpy.ma.nomask, fill_value=None, grid=None,
-                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None,
-                 order=False, no_update_from=False, **kargs):
-        """createVariable (self, data, typecode=None, copy=0, savespace=0,
+                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None, 
+                 order=False, no_update_from=False,**kargs):
+        """createVariable (self, data, typecode=None, copy=0, savespace=0, 
                  mask=None, fill_value=None, grid=None,
                  axes=None, attributes=None, id=None, dtype=None, order=False)
            The savespace argument is ignored, for backward compatibility only.
@@ -158,68 +151,69 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         try:
             if data.fill_value is not None:
                 self._setmissing(data.fill_value)
-                fill_value = data.fill_value
+                fill_value=data.fill_value
         except:
             pass
         if fill_value is not None:
-            self._setmissing(fill_value)
-        if attributes is not None and "_FillValue" in attributes.keys():
-            self._setmissing(attributes["_FillValue"])
+           self._setmissing(fill_value)
+        if attributes is not None  and "_FillValue" in attributes.keys():
+           self._setmissing(attributes["_FillValue"])
 
-        # tile index, None means no mosaic
+        # tile index, None means no mosaic 
         self.tileIndex = None
-
+        
         # Compatibility: assuming old typecode, map to new
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = sctype2char(dtype)
-        if isinstance(data, tuple):
+        if type(data) is types.TupleType:
             data = list(data)
-
-        AbstractVariable.__init__(self)
+        
+        AbstractVariable.__init__ (self)
 
         if isinstance(data, AbstractVariable):
             if not isinstance(data, TransientVariable):
                 data = data.subSlice()
-# if attributes is None: attributes = data.attributes
+##             if attributes is None: attributes = data.attributes
             if axes is None and not no_update_from:
                 axes = map(lambda x: x[0], data.getDomain())
             if grid is None and not no_update_from:
                 grid = data.getGrid()
                 if (grid is not None) and (not isinstance(grid, AbstractRectGrid)) \
-                        and (not grid.checkAxes(axes)):
-                    grid = grid.reconcile(
-                        axes)  # Make sure grid and axes are consistent
+                                      and (not grid.checkAxes(axes)):
+                    grid = grid.reconcile(axes) # Make sure grid and axes are consistent
+
+        ncopy = (copy!=0)
+
 
         # Initialize the geometry
         if grid is not None:
-            copyaxes = 0                  # Otherwise grid axes won't match domain.
+            copyaxes=0                  # Otherwise grid axes won't match domain.
         if axes is not None:
-            self.initDomain(axes, copyaxes=copyaxes)
-        # Note: clobbers the grid, so set the grid after.
+            self.initDomain(axes, copyaxes=copyaxes)           # Note: clobbers the grid, so set the grid after.
         if grid is not None:
             self.setGrid(grid)
-
+ 
         # Initialize the attributes
         if attributes is not None:
             for key, value in attributes.items():
-                if (key in ['shape', 'flat', 'imaginary', 'real'] or key[0] == '_') and key not in ['_FillValue']:
-                    raise CDMSError('Bad key in attributes: ' + key)
+                if (key in ['shape','flat','imaginary','real'] or key[0]=='_') and key not in ['_FillValue']:
+                    raise CDMSError, 'Bad key in attributes: ' + key
                 elif key == 'missing_value':
-                    # ignore if fill value given explicitly
+                    #ignore if fill value given explicitly
                     if fill_value is None:
                         self._setmissing(value)
-                elif key not in ['scale_factor', 'add_offset']:
+                elif key not in ['scale_factor','add_offset']:
                     setattr(self, key, value)
 
         # Sync up missing_value attribute and the fill value.
         self.missing_value = self._getmissing()
         self._FillValue = self._getmissing()
         if id is not None:
-            if not isinstance(id, (unicode, str)):
-                raise CDMSError('id must be a string')
+            if not isinstance(id,(unicode,str)): 
+                raise CDMSError, 'id must be a string'
             self.id = id
-        elif hasattr(data, 'id'):
+        elif hasattr(data,'id'):
             self.id = data.id
 
         if self.id is None:
@@ -234,21 +228,22 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         self.__mpiWindows = {}
         self.__mpiType = self.__getMPIType()
 
+
     def _getmissing(self):
         return self._missing
 
-    def _setmissing(self, value):
-        self._missing = numpy.array(value).astype(self.dtype)
+    def _setmissing(self,value):
+        self._missing=numpy.array(value).astype(self.dtype)
 
-    missing = property(_getmissing, _setmissing)
-    fill_value = property(_getmissing, _setmissing)
-    _FillValue = property(_getmissing, _setmissing)
-    missing_value = property(_getmissing, _setmissing)
+    missing       = property(_getmissing,_setmissing)
+    fill_value    = property(_getmissing,_setmissing)
+    _FillValue    = property(_getmissing,_setmissing)
+    missing_value = property(_getmissing,_setmissing)
 
-    def __new__(cls, data, typecode=None, copy=0, savespace=0,
-                mask=numpy.ma.nomask, fill_value=None, grid=None,
-                axes=None, attributes=None, id=None, copyaxes=1, dtype=None, order=False, **kargs):
-        """createVariable (self, data, typecode=None, copy=0, savespace=0,
+    def __new__(cls, data, typecode=None, copy=0, savespace=0, 
+                 mask=numpy.ma.nomask, fill_value=None, grid=None,
+                 axes=None, attributes=None, id=None, copyaxes=1, dtype=None, order=False,**kargs):
+        """createVariable (self, data, typecode=None, copy=0, savespace=0, 
                  mask=None, fill_value=None, grid=None,
                  axes=None, attributes=None, id=None, dtype=None, order=False)
            The savespace argument is ignored, for backward compatibility only.
@@ -257,67 +252,68 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         if dtype is None and typecode is not None:
             dtype = typeconv.convtypecode2(typecode)
         typecode = sctype2char(dtype)
-        if isinstance(data, tuple):
+        if type(data) is types.TupleType:
             data = list(data)
         if isinstance(data, AbstractVariable):
             if not isinstance(data, TransientVariable):
                 data = data.subSlice()
         if isinstance(data, numpy.ma.MaskedArray):
             try:
-                if fill_value is None:
-                    fill_value = data.fill_value
+                if fill_value is None: fill_value = data.fill_value
             except:
                 pass
 
-        ncopy = (copy != 0)
+        ncopy = (copy!=0)
         if mask is None:
             try:
                 mask = data.mask
-            except Exception:
+            except Exception,err:
                 mask = numpy.ma.nomask
 
         # Handle the case where ar[i:j] returns a single masked value
         if data is numpy.ma.masked:
-            # shape = tuple(len(axes)*[1])
+            #shape = tuple(len(axes)*[1])
             data = numpy.ma.masked.data
-            # data.shape = shape
+            #data.shape = shape
             mask = numpy.ma.masked.mask
-            # mask.shape = shape
-# if data.getattr('mask',None) is not numpy.ma.nomask:
-# mask = data.mask
-# print 'passing:',mask.shape,data.shape,numpy.shape(cls)
+            #mask.shape = shape
+##         if data.getattr('mask',None) is not numpy.ma.nomask:
+##             mask = data.mask
+##         print 'passing:',mask.shape,data.shape,numpy.shape(cls)
         if fill_value is not None:
             fill_value = numpy.array(fill_value).astype(dtype)
         else:
             fill_value = numpy.ma.MaskedArray(1).astype(dtype).item()
 
-        self = numpy.ma.MaskedArray.__new__(cls, data, dtype=dtype,
-                                            copy=ncopy,
-                                            mask=mask,
-                                            fill_value=fill_value,
-                                            subok=False,
-                                            order=order)
 
+        self = numpy.ma.MaskedArray.__new__(cls, data, dtype = dtype,
+                                      copy = ncopy,
+                                      mask = mask,
+                                      fill_value = fill_value,
+                                      subok = False,
+                                      order = order)
+
+            
+        
         return self
 
     # typecode = numpy.ma.array.typecode
     def typecode(self):
         return self.dtype.char
 
-    def assignValue(self, data):
+    def assignValue(self,data):
         self[...] = data
 
     def getValue(self, squeeze=1):
         return self.filled()
 
-    def expertSlice(self, slicelist):
+    def expertSlice (self, slicelist):
         return numpy.ma.MaskedArray.__getitem__(self, slicelist)
 
-    def initDomain(self, axes, copyaxes=1):
-        # lazy evaluation via getAxis to avoid creating axes that aren't ever
-        # used.
+    def initDomain (self, axes, copyaxes=1):
+        # lazy evaluation via getAxis to avoid creating axes that aren't ever used.
         newgrid = None
-        self.__domain = [None] * self.rank()
+        self.__domain = [None]*self.rank()
         if axes is not None:
             flataxes = []
             try:
@@ -330,59 +326,50 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                 elif isinstance(item, AbstractRectGrid) or isinstance(item, AbstractCurveGrid):
                     flataxes.append(item.getAxis(0))
                     flataxes.append(item.getAxis(1))
-                    copyaxes = 0
+                    copyaxes=0
                     newgrid = item
                 elif isinstance(item, AbstractGenericGrid):
                     flataxes.append(item.getAxis(0))
-                    copyaxes = 0
+                    copyaxes=0
                     newgrid = item
                 else:
-                    raise CDMSError(
-                        "Invalid item in axis list:\n" + repr(item))
+                    raise CDMSError, "Invalid item in axis list:\n"+`item`
             if len(flataxes) != self.rank():
-                raise CDMSError("Wrong number of axes to initialize domain.")
+                raise CDMSError, "Wrong number of axes to initialize domain."
             for i in range(len(flataxes)):
                 if flataxes[i] is not None:
-                    if (not flataxes[i].isVirtual()) and copyaxes == 1:
+                    if (not flataxes[i].isVirtual()) and copyaxes==1:
                         self.copyAxis(i, flataxes[i])
                     else:
-                        self.setAxis(
-                            i,
-                            flataxes[i])  # No sense copying a virtual axis.
+                        self.setAxis(i, flataxes[i]) # No sense copying a virtual axis.
             if newgrid is not None:     # Do this after setting the axes, so the grid is consistent
                 self.setGrid(newgrid)
 
     def getDomain(self):
         for i in range(self.rank()):
             if self.__domain[i] is None:
-                self.getAxis(i)  # will force a fill in
+                junk = self.getAxis(i)  # will force a fill in
         return self.__domain
 
-    def getAxis(self, n):
-        if n < 0:
-            n = n + self.rank()
+    def getAxis (self, n):
+        if n < 0: n = n + self.rank()
         if self.__domain[n] is None:
             length = numpy.ma.size(self, n)
-            # axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n),
-            # typecode=numpy.Float))
-            axis = createAxis(
-                numpy.ma.arange(numpy.ma.size(self, n), dtype=numpy.float_))
+            # axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n), typecode=numpy.Float))
+            axis = createAxis(numpy.ma.arange(numpy.ma.size(self, n), dtype=numpy.float_))
             axis.id = "axis_" + str(n)
             self.__domain[n] = (axis, 0, length, length)
         return self.__domain[n][0]
-
-    def setAxis(self, n, axis, savegrid=0):
+        
+    def setAxis (self, n, axis, savegrid=0):
         """Set n axis of self to a copy of axis. (0-based index)
         """
-        if n < 0:
-            n = n + self.rank()
+        if n < 0: n = n + self.rank()
         axislen = self.shape[n]
-        if len(axis) != axislen:
-            raise CDMSError(
-                "axis length %d does not match corresponding dimension %d" %
-                (len(axis), axislen))
+        if len(axis)!=axislen:
+            raise CDMSError,"axis length %d does not match corresponding dimension %d"%(len(axis),axislen)
         if not isinstance(axis, AbstractAxis):
-            raise CDMSError("copydimension, other not a slab.")
+            raise CDMSError,"copydimension, other not a slab."
         self.__domain[n] = (axis, 0, len(axis), len(axis))
 
     def setAxisList(self, axislist):
@@ -390,28 +377,27 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         for i in range(len(axislist)):
             self.setAxis(i, axislist[i])
 
-    def copyAxis(self, n, axis):
+    def copyAxis (self, n, axis):
         """Set n axis of self to a copy of axis. (0-based index)
            Invalidates grid.
         """
-        if n < 0:
-            n = n + self.rank()
+        if n < 0: n = n + self.rank()
         if not isinstance(axis, AbstractAxis):
-            raise CDMSError("copydimension, other not an axis.")
+            raise CDMSError,"copydimension, other not an axis."
         isGeneric = [False]
         b = axis.getBounds(isGeneric)
         mycopy = createAxis(axis[:], b, genericBounds=isGeneric[0])
         mycopy.id = axis.id
         for k, v in axis.attributes.items():
-            setattr(mycopy, k, v)
-        self.setAxis(n, mycopy)
-
-    def copyDomain(self, other):
+           setattr(mycopy, k, v)
+        self.setAxis (n, mycopy)
+   
+    def copyDomain (self, other):
         "Set the axes and grid by copying variable other."
         if not isinstance(other, AbstractVariable):
-            raise CDMSError("copyDomain, other not a variable.")
+            raise CDMSError,"copyDomain, other not a variable."
         if self.rank() != other.rank():
-            raise CDMSError("copyDomain, ranks do not match.")
+            raise CDMSError, "copyDomain, ranks do not match."
         for i in range(self.rank()):
             self.copyAxis(i, other.getAxis(i))
         self.setGrid(other.getGrid())
@@ -422,24 +408,22 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
             for i in range(self.rank()):
                 ax = self.getAxis(i)
                 if ax.isLatitude():
-                    order = order + 'y'
+                    order = order+'y'
                     lat = ax
                 elif ax.isLongitude():
-                    order = order + 'x'
+                    order = order+'x'
                     lon = ax
-                if len(order) == 2:
-                    break
+                if len(order)==2: break
 
-            if order in ['yx', 'xy']:
-                self._grid_ = createRectGrid(lat, lon, order)
+            if order in ['yx','xy']:
+                self._grid_ = createRectGrid(lat,lon,order)
         return self._grid_
 
-    def astype(self, tc):
+    def astype (self, tc):
         "return self as array of given type."
-        maresult = numpy.ma.MaskedArray.astype(self, tc)
-        return TransientVariable(
-            maresult, copy=0, axes=self.getAxisList(), fill_value=self.fill_value,
-            attributes=self.attributes, id=self.id, grid=self.getGrid())
+        maresult = numpy.ma.MaskedArray.astype(self,tc)
+        return TransientVariable(maresult, copy=0, axes=self.getAxisList(), fill_value=self.fill_value,
+                                 attributes=self.attributes, id=self.id, grid=self.getGrid())
 
     def setMaskFromGridMask(self, mask, gridindices):
         """Set the mask for self, given a grid mask and the variable domain
@@ -455,18 +439,18 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                 shapeprep.append(self.shape[i])
 
         # Broadcast mask
-        if tprep != []:
+        if tprep!=[]:
             newshape = tuple(shapeprep + list(mask.shape))
             bigmask = numpy.resize(mask, newshape)
 
             # Generate the tranpose vector
             t = tuple(tprep + list(gridindices))
-            tinv = [0] * len(t)
+            tinv = [0]*len(t)
             for i in range(len(t)):
                 tinv[t[i]] = i
 
             # And reshape to fit the variable
-            if tinv != range(len(tinv)):
+            if tinv!=range(len(tinv)):
                 bigmask = numpy.transpose(bigmask, tuple(tinv))
 
         else:
@@ -481,25 +465,25 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         return result
 
 # Old cu interface
-    def copydimension(self, idim, other, jdim):
-        """Set idim dimension of self to variable other's jdim'th
+    def copydimension (self, idim, other, jdim):
+        """Set idim dimension of self to variable other's jdim'th 
            This is for old cu compatibility. Use copyAxis for new code.
         """
         if not isinstance(other, AbstractVariable):
-            raise CDMSError("copydimension, other not a variable.")
+            raise CDMSError,"copydimension, other not a variable."
         a = other.getAxis(jdim)
         self.copyAxis(idim, a)
 
     def setdimattribute(self, dim, field, value):
         "Set the attribute named field from the dim'th dimension."
         if dim < 0 or dim >= self.rank():
-            raise CDMSError("setdimattribute, dim out of bounds.")
+            raise CDMSError, "setdimattribute, dim out of bounds."
         d = self.getAxis(dim)
         if field == "name":
-            if not isinstance(value, basestring):
-                raise CDMSError("setdimattribute: name not a string")
+            if not type(value) == types.StringType:
+               raise CDMSError, "setdimattribute: name not a string"
             d.id = value
-
+            
         elif field == "values":
             # note -- invalidates grid, may break old code.
             a = createAxis(numpy.ma.filled(value[:]))
@@ -509,30 +493,29 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
             self.setAxis(dim, a)
 
         elif field == "units":
-            if not isinstance(value, basestring):
-                raise CDMSError("setdimattribute: units not a string")
+            if not type(value) == types.StringType:
+               raise CDMSError, "setdimattribute: units not a string"
             d.units = value
 
         elif field == "weights":
             # Well, you can't really do this without modifying the grid
-            raise CDMSError("setdimattribute weights not implemented.")
+            raise CDMSError, "setdimattribute weights not implemented."
 
         elif field == "bounds":
             if value is None:
-                d.setBounds(None)
+               d.setBounds(None)
             else:
-                b = numpy.ma.filled(value)
-                if numpy.ma.rank(b) == 2:
-                    d.setBounds(b)
-                elif numpy.ma.rank(b) == 1:
-                    b1 = numpy.zeros((len(b) - 1, 2), b.dtype.char)
-                    b1[:, 0] = b[:-1]
-                    b1[:, 1] = b[1:]
-                    d.setBounds(b1)
-                else:
-                    raise CDMSError(
-                        "setdimattribute, bounds improper shape: " +
-                        b.shape)
+               b = numpy.ma.filled(value)
+               if numpy.ma.rank(b) == 2:
+                   d.setBounds(b)
+               elif numpy.ma.rank(b) == 1:
+                   b1 = numpy.zeros((len(b)-1,2), b.dtype.char)
+                   b1[:,0] = b[:-1]
+                   b1[:,1] = b[1:]
+                   d.setBounds(b1)
+               else:
+                   raise CDMSError, \
+                   "setdimattribute, bounds improper shape: " + b.shape
         else:
             setattr(d, field, value)
 
@@ -543,57 +526,57 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         result = createVariable(self, copy=copyData)
         return result
 
-    def dumps(self, *args, **kargs):
-        # Probably need something for curv/gen grids
+    def dumps(self,*args,**kargs):
+        ## Probably need something for curv/gen grids
         """ Dumps Variable to a jason object, args are passed directly to json.dump"""
-        J = {}
-        for k, v in self.attributes.iteritems():
-            if k == "autoApiInfo":
+        J={}
+        for k,v in self.attributes.iteritems():
+            if k=="autoApiInfo":
                 continue
-            J[k] = v
-        J['id'] = self.id
-        axes = []
+            J[k]=v
+        J['id']=self.id
+        axes=[]
         for a in self.getAxisList():
-            ax = {}
-            for A, v in a.attributes.iteritems():
-                ax[A] = v
-            ax['id'] = a.id
-            ax["_values"] = a[:].tolist()
-            ax["_dtype"] = a[:].dtype.char
+            ax={}
+            for A,v in a.attributes.iteritems():
+                ax[A]=v
+            ax['id']=a.id
+            ax["_values"]=a[:].tolist()
+            ax["_dtype"]=a[:].dtype.char
             axes.append(ax)
-        J["_axes"] = axes
-        J["_values"] = self[:].filled(self.fill_value).tolist()
-        J["_fill_value"] = float(self.fill_value)
-        J["_dtype"] = self.typecode()
-        J["_grid"] = None  # self.getGrid()
-        return json.dumps(J, *args, **kargs)
+        J["_axes"]=axes
+        J["_values"]=self[:].filled(self.fill_value).tolist()
+        J["_fill_value"]=float(self.fill_value)
+        J["_dtype"]=self.typecode()
+        J["_grid"]=None #self.getGrid()
+        return json.dumps(J,*args,**kargs)
 
     def isEncoded(self):
         "Transient variables are not encoded"
         return 0
 
-    def __len__(self):
+    def __len__ (self):
         "Length of first dimension"
-        if self.rank() > 0:
-            (axis, start, length, true_length) = self.getDomain()[0]
+        if self.rank()>0:
+            (axis,start,length,true_length) = self.getDomain()[0]
         else:
             length = 0
         return length
 
-    def __str__(self):
+    def __str__ (self):
         return numpy.ma.MaskedArray.__str__(self)
 
-    def __repr__(self):
+    def __repr__ (self):
         return self.id + '\n' + numpy.ma.MaskedArray.__repr__(self) + '\n'
 
     def set_fill_value(self, value):
         "Set missing value attribute and fill value"
         AbstractVariable.setMissing(self, value)
-        # self.__dict__['_fill_value'] = self.missing_value
-        # Fix submitted by Ghislain Picard, this was broken with numpy 1.5
-        numpy.ma.MaskedArray.set_fill_value(self, value)
+        #self.__dict__['_fill_value'] = self.missing_value
+        ## Fix submitted by Ghislain Picard, this was broken with numpy 1.5
+        numpy.ma.MaskedArray.set_fill_value(self,value)
 
-    def setMissing(self, value):
+    def setMissing (self, value):
         "Set missing value attribute and fill value"
         self.set_fill_value(value)
 
@@ -614,7 +597,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         """
         return self.tileIndex
 
-    def toVisit(self, filename, format='Vs', sphereRadius=1.0,
+    def toVisit(self, filename, format='Vs', sphereRadius=1.0, 
                 maxElev=0.1):
         """
         Save data to file for postprocessing by the VisIt visualization tool
@@ -623,20 +606,21 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
         sphereRadius: radius of the earth
         maxElev: maximum elevation for representation on the sphere
         """
-        from . import mvVTKSGWriter
-        from . import mvVsWriter
+        import mvSphereMesh
+        import mvVTKSGWriter
+        import mvVsWriter
         try:
             # required by mvVsWriter
-            import tables  # noqa
+            import tables
         except:
-            # fall back
+            # fall back 
             format = 'VTK'
 
         def generateTimeFileName(filename, tIndex, tIndexMax, suffix):
-            ndigits = len('%d' % tIndexMax)
-            itdigits = len('%d' % tIndex)
-            tiStr = '0' * (ndigits - itdigits) + ('%d' % tIndex)
-            return re.sub(r'\.' + suffix, '_%s.%s' % (tiStr, suffix),
+            ndigits = len('%d'%tIndexMax)
+            itdigits = len('%d'%tIndex)
+            tiStr = '0'*(ndigits-itdigits) + ('%d'%tIndex)
+            return re.sub(r'\.' + suffix, '_%s.%s' % (tiStr, suffix), 
                           filename)
 
         # determine whether data are time dependent
@@ -650,48 +634,46 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                 counter += 1
                 if axis == 'time':
                     timeIndex = counter
-
-        if timeAxis is None or timeIndex == -1:
+        
+        if timeAxis == None or timeIndex == -1:
             # static data
             if format == 'VTK':
                 vw = mvVTKSGWriter.VTKSGWriter(self, maxElev)
-                if filename.find('.vtk') == -1:
+                if filename.find('.vtk') == -1: 
                     filename += '.vtk'
                 vw.write(filename)
             else:
                 vw = mvVsWriter.VsWriter(self, maxElev)
-                if filename.find('.vsh5') == -1:
+                if filename.find('.vsh5') == -1: 
                     filename += '.vsh5'
                 vw.write(filename)
         else:
             # time dependent data
             tIndexMax = len(timeAxis)
             for tIndex in range(tIndexMax):
-                sliceOp = 'self[' + (
-                    ':,' * timeIndex) + ('%d,' %
-                                         tIndex) + '...]'
+                sliceOp = 'self[' + (':,'*timeIndex) + ('%d,'%tIndex) + '...]'
                 var = eval(sliceOp)
                 if format == 'VTK':
                     if filename.find('.vtk') == -1:
                         filename += '.vtk'
-                    tFilename = generateTimeFileName(filename,
+                    tFilename = generateTimeFileName(filename, 
                                                      tIndex, tIndexMax, 'vtk')
                     vw = mvVTKSGWriter.VTKSGWriter(var, maxElev)
                     vw.write(tFilename)
                 else:
                     if filename.find('.h5') == -1:
                         filename += '.h5'
-                    tFilename = generateTimeFileName(filename,
+                    tFilename = generateTimeFileName(filename, 
                                                      tIndex, tIndexMax, 'h5')
                     vw = mvVsWriter.VsWriter(var, maxElev)
                     vw.write(tFilename)
-
-    # Following are distributed array methods, they require mpi4py
+       
+    # Following are distributed array methods, they require mpi4py 
     # to be installed
 
     def setMPIComm(self, comm):
         """
-        Set the MPI communicator. This is a no-op if MPI
+        Set the MPI communicator. This is a no-op if MPI 
         is not available.
         """
         if HAVE_MPI:
@@ -718,7 +700,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
     def exposeHalo(self, ghostWidth=1):
         """
         Expose the halo to other processors. The halo is the region
-        within the local MPI data domain that is accessible to other
+        within the local MPI data domain that is accessible to other 
         processors. The halo encompasses the edge of the data region
         and has thickness ghostWidth.
 
@@ -735,8 +717,9 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                     # given direction, a 1 represents a layer of
                     # thickness ghostWidth on the high index side,
                     # -1 on the low index side.
-                    winId = tuple([0 for i in range(dim)] + [drect] +
-                                  [0 for i in range(dim + 1, ndims)])
+                    winId = tuple( [0 for i in range(dim) ] \
+                                   + [drect] + \
+                                   [0 for i in range(dim+1, ndims) ] )
 
                     slce = slice(0, ghostWidth)
                     if drect == 1:
@@ -745,19 +728,19 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
                     slab = self.__getSlab(dim, slce)
 
                     # create the MPI window
-                    dataSrc = numpy.zeros(self[slab].shape, self.dtype)
-                    dataDst = numpy.zeros(self[slab].shape, self.dtype)
+                    dataSrc = numpy.zeros(self[slab].shape, self.dtype) 
+                    dataDst = numpy.zeros(self[slab].shape, self.dtype) 
                     self.__mpiWindows[winId] = {
                         'slab': slab,
                         'dataSrc': dataSrc,
                         'dataDst': dataDst,
                         'window': MPI.Win.Create(dataSrc, comm=self.__mpiComm),
-                    }
-
+                        }
+                
     def getHaloEllipsis(self, side):
         """
-        Get the ellipsis for a given halo side.
-
+        Get the ellipsis for a given halo side. 
+        
         side - a tuple of zeros and one +1 or -1.  To access
                the "north" side for instance, set side=(1, 0),
                (-1, 0) to access the south side, (0, 1) the east
@@ -765,7 +748,7 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
 
         Return none if halo was not exposed (see exposeHalo)
         """
-        if HAVE_MPI and side in self.__mpiWindows:
+        if HAVE_MPI and self.__mpiWindows.has_key(side):
             return self.__mpiWindows[side]['slab']
         else:
             return None
@@ -773,20 +756,20 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
     def fetchHaloData(self, pe, side):
         """
         Fetch the halo data from another processor. The halo side
-        is a subdomain of the halo that is exposed to other
+        is a subdomain of the halo that is exposed to other 
         processors. It is an error to call this method when
         MPI is not enabled. This is a collective method (must
         be called by all processes), which involves synchronization
         of data among all processors.
 
         pe       -  processor owning the halo data. This is a no
-                    operation when pe is None.
+                    operation when pe is None. 
         side     -  a tuple of zeros and one +1 or -1.  To access
                     the "north" side for instance, set side=(1, 0),
                     (-1, 0) to access the south side, (0, 1) the east
-                    side, etc.
+                    side, etc. 
 
-        Note: collective, all procs must invoke this method. If some
+        Note: collective, all procs must invoke this method. If some 
         processors should not fetch then pass None for pe.
         """
         if HAVE_MPI:
@@ -799,37 +782,37 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
             dataSrc[...] = self[slab]
 
             win = iw['window']
-            win.Fence()  # get the data ready
+            win.Fence() # get the data ready
             if pe is not None:
-                win.Get([dataDst, self.__mpiType], pe)
-            win.Fence()  # make sure the communication completed
+                win.Get( [dataDst, self.__mpiType], pe )
+            win.Fence() # make sure the communication completed
             return dataDst
         else:
-            raise CDMSError('Must have MPI to invoke fetchHaloData')
+            raise CDMSError, 'Must have MPI to invoke fetchHaloData'
 
     def freeHalo(self):
         """
-        Free the MPI windows attached to the halo. This must be
+        Free the MPI windows attached to the halo. This must be 
         called before MPI_Finalize.
         """
         for iw in self.__mpiWindows:
-            self.__mpiWindows[iw]['window'].Free()
+            self.__mpiWindows[iw]['window'].Free()        
 
     def __getSlab(self, dim, slce):
         """
         Get slab. A slab is a multi-dimensional slice extending in
         all directions except along dim where slce applies
-
+        
         dim      - dimension (0=first index, 1=2nd index...)
         slce     - python slice object along dimension dim
-
+        
         return slab
         """
         ndims = len(self.shape)
-
-        slab = [slice(0, None) for i in range(dim)] \
-            + [slce] + \
-            [slice(0, None) for i in range(dim + 1, ndims)]
+        
+        slab = [ slice(0, None) for i in range(dim) ] \
+                    + [slce] + \
+                  [ slice(0, None) for i in range(dim+1, ndims) ]
         return tuple(slab)
 
     def __getMPIType(self):
@@ -853,54 +836,50 @@ class TransientVariable(AbstractVariable, numpy.ma.MaskedArray):
             elif dtyp == numpy.int8:
                 typ = MPI.INT8_T
             else:
-                return None
+                return None          
         else:
             return typ
 
-# PropertiedClasses.set_property(TransientVariable, 'shape',
-# nowrite=1, nodelete=1)
-
+## PropertiedClasses.set_property(TransientVariable, 'shape', 
+##                                nowrite=1, nodelete=1)
 
-def createVariable(*args, **kargs):
-    if kargs.get("fromJSON", False):
+def createVariable(*args,**kargs):
+    if kargs.get("fromJSON",False):
         return fromJSON(*args)
     else:
-        return TransientVariable(*args, **kargs)
+        return TransientVariable(*args,**kargs)
 
-
-def isVariable(s):
+def isVariable (s):
     "Is s a variable?"
     return isinstance(s, AbstractVariable)
 
-
 def asVariable(s, writeable=1):
-    """Returns s if s is a Variable; if writeable is 1, return
-       s if s is a TransientVariable. If s is not a variable of
+    """Returns s if s is a Variable; if writeable is 1, return 
+       s if s is a TransientVariable. If s is not a variable of 
        the desired type, attempt to make it so and return that.
        If we fail raise CDMSError
     """
     target_class = AbstractVariable
-    if writeable:
-        target_class = TransientVariable
+    if writeable: target_class = TransientVariable
     if isinstance(s, target_class):
         return s
     elif isinstance(s, AbstractVariable):
         return s.subSlice()
-
+    
     try:
         result = createVariable(s)
     except CDMSError:
-        result = None
-
+        result =  None
+    
     # if result.dtype.char == numpy.ma.PyObject:
     if issubclass(result.dtype.type, numpy.object_):
         result = None
     if result is None:
-        raise CDMSError("asVariable could not make a Variable from the input.")
+        raise CDMSError, "asVariable could not make a Variable from the input."
     return result
 
 if __name__ == '__main__':
-    for s in [(20,), (4, 5)]:
+    for s in [(20,), (4,5)]:
         x = numpy.arange(20)
         x.shape = s
         t = createVariable(x)
@@ -909,24 +888,21 @@ if __name__ == '__main__':
         assert numpy.ma.allclose(x, t)
         assert t.dtype.char == numpy.int
         assert numpy.ma.size(t) == numpy.ma.size(x)
-        assert numpy.ma.size(t, 0) == len(t)
-        assert numpy.ma.allclose(
-            t.getAxis(0)[:], numpy.ma.arange(numpy.ma.size(t, 0)))
+        assert numpy.ma.size(t,0) == len(t)
+        assert numpy.ma.allclose(t.getAxis(0)[:], numpy.ma.arange(numpy.ma.size(t,0)))
         t.missing_value = -99
         assert t.missing_value == -99
         assert t.fill_value == -99
-    t = createVariable(numpy.ma.arange(5), mask=[0, 0, 0, 1, 0])
-    t.set_fill_value(1000)
+    t = createVariable(numpy.ma.arange(5), mask=[0,0,0,1,0])
+    t.set_fill_value (1000)
     assert t.fill_value == 1000
     assert t.missing_value == 1000
     t.missing_value = -99
     assert t[2] == 2
     t[3] = numpy.ma.masked
     assert t[3] is numpy.ma.masked
-    f = createVariable(
-        numpy.ma.arange(5, typecode=numpy.float32), mask=[0, 0, 0, 1, 0])
-    f2 = createVariable(
-        numpy.ma.arange(5, typecode=numpy.float32), mask=[0, 0, 0, 1, 0])
+    f = createVariable(numpy.ma.arange(5, typecode=numpy.float32), mask=[0,0,0,1,0])
+    f2 = createVariable(numpy.ma.arange(5, typecode=numpy.float32), mask=[0,0,0,1,0])
     f[3] = numpy.ma.masked
     assert f[3] is numpy.ma.masked
     assert numpy.ma.allclose(2.0, f[2])
@@ -936,6 +912,6 @@ if __name__ == '__main__':
     assert t.getdimattribute(0, 'name') == 'fudge'
     f2b = f2.getdimattribute(0, 'bounds')
     t.setdimattribute(0, 'bounds', f2b)
-    assert numpy.ma.allclose(
-        f.getdimattribute(0, 'bounds'), f2.getdimattribute(0, 'bounds'))
+    assert numpy.ma.allclose(f.getdimattribute(0,'bounds'), f2.getdimattribute(0,'bounds'))
     print "Transient Variable test passed ok."
+
diff --git a/Packages/cdms2/Lib/typeconv.py b/Packages/cdms2/Lib/typeconv.py
index e63ad64d3..0f2bd2c18 100644
--- a/Packages/cdms2/Lib/typeconv.py
+++ b/Packages/cdms2/Lib/typeconv.py
@@ -1,26 +1,24 @@
-import numpy as np
 __all__ = ['oldtype2dtype', 'convtypecode', 'convtypecode2', 'oldtypecodes']
 
+import numpy as np
 
 oldtype2dtype = {'1': np.dtype(np.byte),
                  's': np.dtype(np.short),
-                 #                 'i': np.dtype(np.intc),
-                 #                 'l': np.dtype(int),
-                 #                 'b': np.dtype(np.ubyte),
+#                 'i': np.dtype(np.intc),
+#                 'l': np.dtype(int),
+#                 'b': np.dtype(np.ubyte),
                  'w': np.dtype(np.ushort),
                  'u': np.dtype(np.uintc),
-                 #                 'f': np.dtype(np.single),
-                 #                 'd': np.dtype(float),
-                 #                 'F': np.dtype(np.csingle),
-                 #                 'D': np.dtype(complex),
-                 #                 'O': np.dtype(object),
-                 #                 'c': np.dtype('c'),
+#                 'f': np.dtype(np.single),
+#                 'd': np.dtype(float),
+#                 'F': np.dtype(np.csingle),
+#                 'D': np.dtype(complex),
+#                 'O': np.dtype(object),
+#                 'c': np.dtype('c'),
                  None: np.dtype(int)
-                 }
+    }
 
 # converts typecode=None to int
-
-
 def convtypecode(typecode, dtype=None):
     if dtype is None:
         try:
@@ -30,10 +28,8 @@ def convtypecode(typecode, dtype=None):
     else:
         return dtype
 
-# if both typecode and dtype are None
+#if both typecode and dtype are None
 #  return None
-
-
 def convtypecode2(typecode, dtype=None):
     if dtype is None:
         if typecode is None:
@@ -49,12 +45,10 @@ def convtypecode2(typecode, dtype=None):
 _changedtypes = {'B': 'b',
                  'b': '1',
                  'h': 's',
-                 # 'H': 'w',
+##                  'H': 'w',
                  'I': 'u'}
 
-
 class _oldtypecodes(dict):
-
     def __getitem__(self, obj):
         char = np.dtype(obj).char
         try:
diff --git a/Packages/cdms2/Lib/variable.py b/Packages/cdms2/Lib/variable.py
index c96dd5e21..a1f3f7f7d 100644
--- a/Packages/cdms2/Lib/variable.py
+++ b/Packages/cdms2/Lib/variable.py
@@ -1,16 +1,22 @@
-# Automatically adapted for numpy.oldnumeric Aug 01, 2007 by
+## Automatically adapted for numpy.oldnumeric Aug 01, 2007 by 
 
 """
 DatasetVariable: Dataset-based variables
 """
+from cdms2 import Cdunif
 import numpy
-from . import cdmsNode
+import cdmsNode
 import cdtime
 import copy
-from .cdmsobj import getPathFromTemplate, Max32int
-from .avariable import AbstractVariable
-from .sliceut import *  # noqa
-from .error import CDMSError
+import os
+import string
+import sys
+import types
+import cdmsobj
+from cdmsobj import CdmsObj, getPathFromTemplate, Max32int
+from avariable import AbstractVariable
+from sliceut import *
+from error import CDMSError
 
 InvalidGridElement = "Grid domain elements are not yet implemented: "
 InvalidRegion = "Invalid region: "
@@ -20,47 +26,44 @@ TooManyPartitions = "Variable has too many partitioned axes, max is two: "
 WriteNotImplemented = "Dataset write operation not implemented"
 FileClosed = "Cannot read from closed file or dataset, variable: "
 
-
 def timeindex(value, units, basetime, delta, delunits, calendar):
     """ Calculate (t - basetime)/delu
     where t = reltime(value, units)
     and delu is the time interval (delta, delunits) (e.g., 1 month).
     """
     tval = cdtime.reltime(value, units)
-    tounits = "%s since %s" % (delunits, basetime)
+    tounits = "%s since %s"%(delunits, basetime)
     newval = tval.torel(tounits, calendar)
-    return int(newval.value / delta)
-
+    return int(newval.value/delta)
 
 class DatasetVariable(AbstractVariable):
 
-    def __init__(self, parent, id, variableNode=None):
+    def __init__(self,parent,id, variableNode=None):
         """ "Variable (parent, variableNode=None)"
            variableNode is the variable tree node, if any.
            parent is the containing dataset instance.
         """
-        AbstractVariable.__init__(self, parent, variableNode)
-        val = self.__cdms_internals__ + ['domain', 'name_in_file']
+        AbstractVariable.__init__ (self, parent, variableNode)
+        val = self.__cdms_internals__ + ['domain','name_in_file']
         self.___cdms_internals__ = val
         self.id = id
         self.domain = []
         # Get self.name_in_file from the .xml file if present
         if not hasattr(self, 'name_in_file'):
             self.name_in_file = id
-
+            
         # if self.attributes.has_key('name_in_file'):
         #     self.name_in_file = self.attributes['name_in_file']
-        if variableNode is not None:
-            self._numericType_ = cdmsNode.CdToNumericType.get(
-                variableNode.datatype)
+        if variableNode is not None:          
+            self._numericType_ = cdmsNode.CdToNumericType.get(variableNode.datatype)
         else:
             self._numericType_ = numpy.float
         assert self.id is not None
-
-    def __len__(self):
+        
+    def __len__ (self):
         "Length of first dimension"
-        if len(self.domain) > 0:
-            (axis, start, length, true_length) = self.domain[0]
+        if len(self.domain)>0:
+            (axis,start,length,true_length) = self.domain[0]
         else:
             length = 0
 
@@ -71,34 +74,33 @@ class DatasetVariable(AbstractVariable):
 #            parentid = self.parent.id
 #        else:
 #            parentid = "**CLOSED**"
-# return "<Variable: %s, dataset: %s, shape: %s>"%(self.id, parentid,
-# `self.shape`)
+#        return "<Variable: %s, dataset: %s, shape: %s>"%(self.id, parentid, `self.shape`)
 
     def __getitem__(self, key):
         if self.parent is None:
-            raise CDMSError(FileClosed + str(self.id))
+            raise CDMSError, FileClosed+str(self.id)
         return AbstractVariable.__getitem__(self, key)
-
+        
     def getValue(self, squeeze=1):
         """Return the entire set of values."""
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
+            raise CDMSError, FileClosed+self.id
         return self.getSlice(Ellipsis, squeeze=squeeze)
-
+    
     def __getslice__(self, low, high):
         if self.parent is None:
-            raise CDMSError(FileClosed + self.id)
+            raise CDMSError, FileClosed+self.id
 
         # Hack to prevent netCDF overflow error on 64-bit architectures
         high = min(Max32int, high)
-
+        
         return AbstractVariable.__getslice__(self, low, high)
 
     def __setitem__(self, index, value):
-        raise CDMSError(WriteNotImplemented)
+        raise CDMSError, WriteNotImplemented
 
     def __setslice__(self, low, high, value):
-        raise CDMSError(WriteNotImplemented)
+        raise CDMSError, WriteNotImplemented
 
     def _getShape(self):
         return self.getShape()
@@ -108,19 +110,19 @@ class DatasetVariable(AbstractVariable):
         return numpy.dtype(tc)
 
     def getShape(self):
-        shape = []
-        for (axis, start, length, true_length) in self.domain:
+        shape=[]
+        for (axis,start,length,true_length) in self.domain:
             shape.append(length)
         return tuple(shape)
 
-    def typecode(self):
-        return numpy.dtype(self._numericType_).char
+    def typecode (self):
+      return numpy.dtype(self._numericType_).char
 
     def size(self):
         "Number of elements."
         n = 1
         for k in self.shape:
-            n = k * n
+            n = k*n
         return n
 
     def initDomain(self, axisdict, griddict):
@@ -133,39 +135,38 @@ class DatasetVariable(AbstractVariable):
             if domelem is None:
                 domelem = griddict.get(dename)
                 if grid is None:
-                    raise CDMSError(NoSuchAxisOrGrid + dename)
+                    raise CDMSError, NoSuchAxisOrGrid + dename
                 else:
-                    raise CDMSError(InvalidGridElement + dename)
+                    raise CDMSError, InvalidGridElement + dename
             partlenstr = denode.getExternalAttr('partition_length')
             if partlenstr is not None:
-                truelen = int(partlenstr)
+                truelen = string.atoi(partlenstr)
             else:
                 truelen = denode.length
             self.domain.append((domelem, denode.start, denode.length, truelen))
 
     # Get the template
     def getTemplate(self):
-        if hasattr(self, 'template'):
+        if hasattr(self,'template'):
             template = self.template
-        elif hasattr(self.parent, 'template'):
+        elif hasattr(self.parent,'template'): 
             template = self.parent.template
         else:
             template = None
         return template
 
-    def getAxis(self, n):
-        if n < 0:
-            n = n + self.rank()
+    def getAxis (self, n):
+        if n < 0: n = n + self.rank()
         return self.domain[n][0]
 
-    def getDomain(self):
+    def getDomain (self):
         return self.domain
 
     # Get the paths associated with the interval region specified
     # by 'intervals'. This incorporates most of the logic of __getitem__,
     # without actually reading the data.
-    #
-    # 'specs' is a list of interval range specifications as defined
+    # 
+    # 'specs' is a list of interval range specifications as defined 
     # for getSlice.
     #
     # The function returns a list of tuples of the form (path,slicelist),
@@ -177,11 +178,11 @@ class DatasetVariable(AbstractVariable):
     #   f = Cdunif.CdunifFile(path,'r')
     #   var = f.variables[self.name_in_file]
     #   data = apply(var.getitem,slicelist)
-    #
+    #   
     def getPaths(self, *specs, **keys):
 
         # Create an equivalent list of slices
-        speclist = self._process_specs(specs, keys)
+        speclist = self._process_specs (specs, keys)
         slicelist = self.specs2slices(speclist)
 
         # Generate the filelist
@@ -191,20 +192,20 @@ class DatasetVariable(AbstractVariable):
         result = []
         if partitionSlices is None:
             pass
-        elif npart == 0:
+        elif npart==0:
             filename, slicelist = partitionSlices
             if filename is not None:
                 result.append((filename, tuple(slicelist)))
-        elif npart == 1:
+        elif npart==1:
             for filename, slicelist in partitionSlices:
                 if filename is not None:
                     result.append((filename, tuple(slicelist)))
-        elif npart == 2:
+        elif npart==2:
             for filelist in partitionSlices:
                 for filename, slicelist in filelist:
                     if filename is not None:
                         result.append((filename, tuple(slicelist)))
-
+                
         return result
 
     def genMatch(self, axis, interval, matchnames):
@@ -218,34 +219,34 @@ class DatasetVariable(AbstractVariable):
         returns the modified matchnames tuple.
         """
         if axis.isTime():
-            if hasattr(self.parent, 'cdms_filemap'):
+            if hasattr(self.parent,'cdms_filemap'):
                 start = interval[0]
                 end = interval[1]
             else:                       # Use template method
                 time0 = axis[interval[0]]
-                time1 = axis[interval[1] - 1]
-                isabs = (axis.units.find(" as ") != -1)
+                time1 = axis[interval[1]-1]
+                isabs = (string.find(axis.units," as ")!=-1)
                 if isabs:
-                    start = cdtime.abstime(time0, axis.units)
-                    end = cdtime.abstime(time1, axis.units)
+                    start = cdtime.abstime(time0,axis.units)
+                    end = cdtime.abstime(time1,axis.units)
                 else:
                     cal = axis.getCalendar()
-                    start = cdtime.reltime(time0, axis.units).tocomp(cal)
-                    end = cdtime.reltime(time1, axis.units).tocomp(cal)
+                    start = cdtime.reltime(time0,axis.units).tocomp(cal)
+                    end = cdtime.reltime(time1,axis.units).tocomp(cal)
             matchnames[1] = start
             matchnames[2] = end
         elif axis.isForecast():
             start = axis.getValue()[interval[0]]
-            end = axis.getValue()[interval[1] - 1]
+            end   = axis.getValue()[interval[1]-1]
             matchnames[5] = start
             matchnames[6] = end
         else:
-            if hasattr(self.parent, 'cdms_filemap'):
+            if hasattr(self.parent,'cdms_filemap'):
                 start = interval[0]
                 end = interval[1]
             else:
                 start = int(axis[interval[0]])
-                end = int(axis[interval[1] - 1])
+                end = int(axis[interval[1]-1])
             matchnames[3] = start
             matchnames[4] = end
 
@@ -255,13 +256,12 @@ class DatasetVariable(AbstractVariable):
         """Lookup or generate the file path, depending on whether a filemap
         or template is present.
         """
-        if hasattr(self.parent, 'cdms_filemap'):
+        if hasattr(self.parent,'cdms_filemap'):
             id, tstart, tend, levstart, levend, fcstart, fcend = matchnames
-            filename = self.parent._filemap_[
-                (self.id, tstart, levstart, fcstart)]
+            filename = self.parent._filemap_[(self.id, tstart, levstart, fcstart)]
             # ... filemap uses dataset IDs
         else:
-            filename = getPathFromTemplate(template, matchnames)
+            filename = getPathFromTemplate(template,matchnames)
         return filename
 
     def getPartition(self, axis):
@@ -270,7 +270,7 @@ class DatasetVariable(AbstractVariable):
         get the partition from the _varpart_ attribute, otherwise (for templating) use
         axis.partition.
         """
-        if hasattr(self.parent, 'cdms_filemap'):
+        if hasattr(self.parent,'cdms_filemap'):
             if axis.isTime():
                 partition = self._varpart_[0]
             elif axis.isForecast():
@@ -281,7 +281,7 @@ class DatasetVariable(AbstractVariable):
             partition = axis.partition
         return partition
 
-    def expertPaths(self, slist):
+    def expertPaths (self, slist):
         """ expertPaths(self, slicelist)
         takes a list of slices,
         returns a 3-tuple: (npart, dimensionlist, partitionSlices) where:
@@ -318,7 +318,7 @@ class DatasetVariable(AbstractVariable):
 
         # Handle rank-0 variables separately
         if self.rank() == 0:
-            matchnames = [realid, None, None, None, None, None, None]
+            matchnames = [realid,None,None,None,None,None,None]
             filename = self.getFilePath(matchnames, template)
 
             result = (0, (), (filename, []))
@@ -327,162 +327,162 @@ class DatasetVariable(AbstractVariable):
         # Find the number of partitioned axes
         npart = 0
         ndim = 0
-        for (axis, start, length, true_length) in self.domain:
-            if hasattr(axis, 'partition'):
-                npart = npart + 1
-                if npart == 1:
+        for (axis,start,length,true_length) in self.domain:
+            if hasattr(axis,'partition'):
+                npart = npart+1
+                if npart==1:
+                    part1 = axis
                     npart1 = ndim
-                elif npart == 2:
+                elif npart==2:
+                    part2 = axis
                     npart2 = ndim
                 else:
-                    raise CDMSError(TooManyPartitions + variable.id)
-            ndim = ndim + 1
+                    raise CDMSError,  TooManyPartitions + variable.id
+            ndim = ndim+1
 
         # If no partitioned axes, just read the data
-        if npart == 0:
-            matchnames = [realid, None, None, None, None, None, None]
+        if npart==0:
+            matchnames = [realid,None,None,None,None,None,None]
             filename = self.getFilePath(matchnames, template)
             result = (0, (), (filename, slicelist))
 
         # If one partitioned axes:
-        elif npart == 1:
+        elif npart==1:
 
             # intersect the slice and partition for that axis
             slice1 = slicelist[npart1]
-            (axis, startelem, length, true_length) = self.domain[npart1]
+            (axis,startelem,length,true_length) = self.domain[npart1]
             partition = slicePartition(slice1, self.getPartition(axis))
-            if partition == []:
+            if partition==[]:
                 return (1, (npart1,), None)
 
             # For each (interval, partslice) in the partition:
             resultlist = []
             (firstinterval, firstslice) = partition[0]
             prevhigh = firstinterval[0]
-            for (interval, partslice) in partition:
+            for (interval,partslice) in partition:
 
                 # If the previous interval high is less than
                 # the current interval low value, interpose
                 # missing data.
                 low = interval[0]
-                if prevhigh < low:
-                    missing_interval = (prevhigh, low)
+                if prevhigh<low:
+                    missing_interval = (prevhigh,low)
                     missing_slice = sliceIntersect(slice1, missing_interval)
 
                     # Note: if the slice has a stride>1, it might not intersect,
                     # so don't interpose missing data in this case.
                     if missing_slice is not None:
                         slicelist[npart1] = missing_slice
-                        resultlist.append((None, copy.copy(slicelist)))
+                        resultlist.append((None,copy.copy(slicelist)))
                 prevhigh = interval[1]
 
                 # generate the filename
-                matchnames = [realid, None, None, None, None, None, None]
+                matchnames = [realid, None, None, None, None,None,None]
                 matchnames = self.genMatch(axis, interval, matchnames)
                 filename = self.getFilePath(matchnames, template)
 
                 # adjust the partslice for the interval offset
                 # and replace in the slice list
-                filestart = partslice.start - interval[0]
-                filestop = partslice.stop - interval[0]
-                fileslice = slice(filestart, filestop, partslice.step)
+                filestart = partslice.start-interval[0]
+                filestop = partslice.stop-interval[0]
+                fileslice = slice(filestart,filestop,partslice.step)
                 slicelist[npart1] = fileslice
 
-                resultlist.append((filename, copy.copy(slicelist)))
+                resultlist.append((filename,copy.copy(slicelist)))
 
-            result = (1, (npart1,), resultlist)
+            result = (1,(npart1,),resultlist)
 
         # If two partitioned axes, 2-D version of previous case
-        if npart == 2:
+        if npart==2:
             slice1 = slicelist[npart1]
             slice2 = slicelist[npart2]
-            (axis1, startelem1, length1, true_length1) = self.domain[npart1]
-            (axis2, startelem2, length2, true_length2) = self.domain[npart2]
+            (axis1,startelem1,length1,true_length1) = self.domain[npart1]
+            (axis2,startelem2,length2,true_length2) = self.domain[npart2]
             partition1 = slicePartition(slice1, self.getPartition(axis1))
             partition2 = slicePartition(slice2, self.getPartition(axis2))
-            if partition1 == [] or partition2 == []:
-                return (2, (npart1, npart2), None)
+            if partition1==[] or partition2==[]:
+                return (2, (npart1,npart2), None)
 
             # For each (interval, partslice) in the partition:
             resultlist = []
             (firstinterval1, firstslice1) = partition1[0]
             prevhigh1 = firstinterval1[0]
-            for (interval1, partslice1) in partition1:
+            for (interval1,partslice1) in partition1:
 
                 # If the previous interval high is less than
                 # the current interval low value, interpose
                 # missing data.
                 low = interval1[0]
-                if prevhigh1 < low:
-                    missing_interval = (prevhigh1, low)
+                if prevhigh1<low:
+                    missing_interval = (prevhigh1,low)
                     missing_slice = sliceIntersect(slice1, missing_interval)
                     if missing_slice is not None:
                         slicelist[npart1] = missing_slice
-                        resultlist.append([(None, copy.copy(slicelist))])
+                        resultlist.append( [(None,copy.copy(slicelist))] )
                 prevhigh1 = interval1[1]
 
                 # generate matchnames
-                matchnames = [realid, None, None, None, None, None, None]
+                matchnames = [realid, None, None, None, None,None,None]
                 matchnames = self.genMatch(axis1, interval1, matchnames)
 
                 # adjust the partslice for the interval offset
                 # and replace in the slice list
-                filestart = partslice1.start - interval1[0]
-                filestop = partslice1.stop - interval1[0]
-                fileslice = slice(filestart, filestop, partslice1.step)
+                filestart = partslice1.start-interval1[0]
+                filestop = partslice1.stop-interval1[0]
+                fileslice = slice(filestart,filestop,partslice1.step)
                 slicelist[npart1] = fileslice
 
                 chunklist = []
                 (firstinterval2, firstslice2) = partition2[0]
                 prevhigh2 = firstinterval2[0]
-                for (interval2, partslice2) in partition2:
+                for (interval2,partslice2) in partition2:
 
                     # If the previous interval high is less than
                     # the current interval low value, interpose
                     # missing data.
                     low = interval2[0]
-                    if prevhigh2 < low:
-                        missing_interval = (prevhigh2, low)
-                        missing_slice = sliceIntersect(
-                            slice1, missing_interval)
+                    if prevhigh2<low:
+                        missing_interval = (prevhigh2,low)
+                        missing_slice = sliceIntersect(slice1, missing_interval)
                         if missing_slice is not None:
                             slicelist[npart2] = missing_slice
-                            chunklist.append((None, copy.copy(slicelist)))
+                            chunklist.append((None,copy.copy(slicelist)))
                     prevhigh2 = interval2[1]
 
                     # generate the filename
                     matchnames = self.genMatch(axis2, interval2, matchnames)
                     filename = self.getFilePath(matchnames, template)
 
-                    filestart = partslice2.start - interval2[0]
-                    filestop = partslice2.stop - interval2[0]
-                    fileslice = slice(filestart, filestop, partslice2.step)
+                    filestart = partslice2.start-interval2[0]
+                    filestop = partslice2.stop-interval2[0]
+                    fileslice = slice(filestart,filestop,partslice2.step)
                     slicelist[npart2] = fileslice
 
-                    chunklist.append((filename, copy.copy(slicelist)))
+                    chunklist.append((filename,copy.copy(slicelist)))
 
                 resultlist.append(chunklist)
 
-            result = (2, (npart1, npart2), resultlist)
+            result = (2,(npart1,npart2),resultlist)
 
         return result
 
-    def expertSlice(self, initslist):
+    def expertSlice (self, initslist):
 
         # Handle negative slices
-        revlist = []
-        # Slices to apply to result if reversals needed
+        revlist = []                    # Slices to apply to result if reversals needed
         slist = []                      # Slices with positive strides
         haveReversals = 0               # True iff result array needs reversing
-        i = 0
+        i=0
         for s in initslist:
-            if s.step < 0:
+            if s.step<0:
                 axislen = self.shape[i]
-                slist.append(reverseSlice(s, axislen))
-                revlist.append(slice(None, None, -1))
+                slist.append(reverseSlice(s,axislen))
+                revlist.append(slice(None,None,-1))
                 haveReversals = 1
             else:
                 slist.append(s)
-                revlist.append(slice(None, None, 1))
+                revlist.append(slice(None,None,1))
             i += 1
 
         # This does most of the work
@@ -494,17 +494,18 @@ class DatasetVariable(AbstractVariable):
         for i in range(len(self.domain)):
             if self.domain[i][0].isForecast():
                 fci = i
+                fcv = initslist[i].start
                 break
 
         # If no intersection, return an 'empty' array.
         if partitionSlices is None:
-            return numpy.ma.zeros((0,), self._numericType_)
+            return numpy.ma.zeros((0,),self._numericType_)
 
         # Handle rank-0 variables separately
         if self.rank() == 0:
             filename, dumlist = partitionSlices
 
-            f = self.parent.openFile(filename, 'r')
+            f = self.parent.openFile(filename,'r')
             try:
                 var = f.variables[self.name_in_file]
                 result = var.getValue()
@@ -513,36 +514,32 @@ class DatasetVariable(AbstractVariable):
             return result
 
         # If no partitioned axes, just read the data
-        if npart == 0:
+        if npart==0:
             filename, slicelist = partitionSlices
 
-            f = self.parent.openFile(filename, 'r')
+            f = self.parent.openFile(filename,'r')
             try:
                 var = f.variables[self.name_in_file]
-                if fci is None:
-                    result = self._returnArray(
-                        var.getitem(*tuple(slicelist)),
-                        0)
+                if fci==None:
+                    result = self._returnArray(apply(var.getitem,tuple(slicelist)),0)
                 else:
                     # If there's a forecast axis, the file doesn't know about it so
                     # don't use it in slicing data out of the file.
-                    result = self._returnArray(
-                        var.getitem(
-                            *tuple(slicelist[0:fci] + slicelist[fci + 1:])),
-                        0)
+                    result = self._returnArray( apply( var.getitem, \
+                                   tuple( slicelist[0:fci]+slicelist[fci+1:] ) ), \
+                                                0 )
                     # But the result still needs an index in the forecast direction,
-                    # which is simple to do because there is only one forecast
-                    # per file:
-                    result.resize(map(lenSlice, slicelist))
+                    # which is simple to do because there is only one forecast per file:
+                    result.resize( map(lenSlice,slicelist) )
 
             finally:
                 f.close()
             sh = result.shape
             if 0 in sh:
-                raise CDMSError(IndexError + 'Coordinates out of Domain')
+                raise CDMSError, IndexError + 'Coordinates out of Domain'
 
         # If one partitioned axes:
-        elif npart == 1:
+        elif npart==1:
 
             npart1 = idims[0]
             resultlist = []
@@ -551,47 +548,44 @@ class DatasetVariable(AbstractVariable):
                 # If the slice is missing, interpose missing data
                 if filename is None:
                     shapelist = map(lenSlice, slicelist)
-                    chunk = numpy.ma.zeros(
-                        tuple(shapelist),
-                        self._numericType_)
+                    chunk = numpy.ma.zeros(tuple(shapelist),self._numericType_)
                     chunk[...] = numpy.ma.masked
 
                 # else read the data and close the file
                 else:
-                    f = self.parent.openFile(filename, 'r')
+                    f = self.parent.openFile(filename,'r')
                     try:
                         var = f.variables[self.name_in_file]
-                        if fci is None:
-                            chunk = var.getitem(*tuple(slicelist))
+                        if fci==None:
+                            chunk = apply(var.getitem,tuple(slicelist))
                         else:
                             # If there's a forecast axis, the file doesn't know about it so
                             # don't use it in slicing data out of the file.
-                            chunk = var.getitem(
-                                *tuple(slicelist[0:fci] + slicelist[fci + 1:]))
+                            chunk = apply( var.getitem, \
+                                           tuple( slicelist[0:fci]+slicelist[fci+1:] ) )
                             # But the chunk still needs an index in the forecast direction,
-                            # which is simple to do because there is only one
-                            # forecast per file:
-                            chunk.resize(map(lenSlice, slicelist))
+                            # which is simple to do because there is only one forecast per file:
+                            chunk.resize( map(lenSlice,slicelist) )
 
                     finally:
                         f.close()
                     sh = chunk.shape
                     if 0 in sh:
-                        raise CDMSError('Coordinates out of Domain')
+                        raise CDMSError, 'Coordinates out of Domain'
 
-                resultlist.append(self._returnArray(chunk, 0))
+                resultlist.append(self._returnArray(chunk,0))
 
             # Combine the chunks into a single array
             # Note: This works because slicelist is the same length
             # as the domain, and var.getitem returns a chunk
             # with singleton dimensions included. This means that
             # npart1 corresponds to the correct dimension of chunk.
-            result = numpy.ma.concatenate(resultlist, axis=npart1)
+            result = numpy.ma.concatenate(resultlist,axis=npart1)
             for chunk in resultlist:
                 del(chunk)
 
         # If two partitioned axes, 2-D version of previous case
-        if npart == 2:
+        if npart==2:
             npart1, npart2 = idims
 
             resultlist = []
@@ -602,45 +596,42 @@ class DatasetVariable(AbstractVariable):
                     # If the slice is missing, interpose missing data
                     if filename is None:
                         shapelist = map(lenSlice, slicelist)
-                        chunk = numpy.ma.zeros(
-                            tuple(shapelist),
-                            self._numericType_)
+                        chunk = numpy.ma.zeros(tuple(shapelist),self._numericType_)
                         chunk[...] = numpy.ma.masked
 
                     # else read the data and close the file
                     else:
-                        f = self.parent.openFile(filename, 'r')
+                        f = self.parent.openFile(filename,'r')
                         try:
                             var = f.variables[self.name_in_file]
-                            if fci is None:
-                                chunk = var.getitem(*tuple(slicelist))
+                            if fci==None:
+                                chunk = apply(var.getitem,tuple(slicelist))
                             else:
                                 # If there's a forecast axis, the file doesn't know about it so
                                 # don't use it in slicing data out of the file.
-                                chunk = var.getitem(
-                                    *tuple(slicelist[0:fci] + slicelist[fci + 1:]))
+                                chunk = apply( var.getitem, \
+                                               tuple( slicelist[0:fci]+slicelist[fci+1:] ) )
                                 # But the chunk still needs an index in the forecast direction,
-                                # which is simple to do because there is only
-                                # one forecast per file:
-                                chunk.resize(map(lenSlice, slicelist))
+                                # which is simple to do because there is only one forecast per file:
+                                chunk.resize( map(lenSlice,slicelist) )
 
                         finally:
                             f.close()
                         sh = chunk.shape
                         if 0 in sh:
-                            raise CDMSError('Coordinates out of Domain')
-                    chunklist.append(self._returnArray(chunk, 0))
+                            raise CDMSError, 'Coordinates out of Domain'
+                    chunklist.append(self._returnArray(chunk,0))
 
                 # Note: This works because slicelist is the same length
                 # as the domain, and var.getitem returns a chunk
                 # with singleton dimensions included. This means that
                 # npart1 corresponds to the correct dimension of chunk.
-                bigchunk = numpy.ma.concatenate(chunklist, axis=npart2)
+                bigchunk = numpy.ma.concatenate(chunklist,axis=npart2)
                 for chunk in chunklist:
                     del(chunk)
                 resultlist.append(bigchunk)
 
-            result = numpy.ma.concatenate(resultlist, axis=npart1)
+            result = numpy.ma.concatenate(resultlist,axis=npart1)
             for bigchunk in resultlist:
                 del(bigchunk)
 
@@ -651,15 +642,16 @@ class DatasetVariable(AbstractVariable):
 
         return result
 
-    shape = property(_getShape, None)
-# shape = _getShape
-    dtype = property(_getdtype, None)
 
-# PropertiedClasses.set_property (DatasetVariable, 'shape',
-# DatasetVariable._getShape, nowrite=1,
-# nodelete=1)
-# PropertiedClasses.set_property (DatasetVariable, 'dtype',
-# DatasetVariable._getdtype, nowrite=1,
-# nodelete=1)
+    shape = property(_getShape,None)
+##     shape = _getShape
+    dtype = property(_getdtype,None)
+    
+## PropertiedClasses.set_property (DatasetVariable, 'shape', 
+##                                   DatasetVariable._getShape, nowrite=1,
+##                                   nodelete=1)
+## PropertiedClasses.set_property (DatasetVariable, 'dtype', 
+##                                   DatasetVariable._getdtype, nowrite=1,
+##                                   nodelete=1)
 
-# internattr.add_internal_attribute(DatasetVariable, 'domain')
+## internattr.add_internal_attribute(DatasetVariable, 'domain')
diff --git a/Packages/cdms2/Lib/xlink.py b/Packages/cdms2/Lib/xlink.py
new file mode 100644
index 000000000..870b5fafd
--- /dev/null
+++ b/Packages/cdms2/Lib/xlink.py
@@ -0,0 +1,9 @@
+"""
+CDMS Xlink objects - pointers to other objects
+"""
+class Xlink(CdmsObj):
+    def __init__(self,xlinkNode=None):
+        assert xlinkNode is None or xlinkNode.tag=='xlink',\
+               'Node is not a link node'
+        CdmsObj.__init__(self,xlinkNode)
+
-- 
GitLab


From 8424d38c99fa8c2aad8a788e42ef457ea32e8ad3 Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 11 Jul 2016 14:10:13 -0700
Subject: [PATCH 192/196] Implemented gettextextents()

---
 Packages/vcs/vcs/VTKPlots.py | 33 ++++++++++++++++++++++++++++++++-
 1 file changed, 32 insertions(+), 1 deletion(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 7c24f56cb..2082199fa 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1243,7 +1243,38 @@ class VTKVCSBackend(object):
         return VTKAnimate.VTKAnimate(*args, **kargs)
 
     def gettextextent(self, textorientation, texttable):
-        warnings.warn("Please implement gettextextent for VTK Backend")
+        # Ensure renwin exists
+        self.createRenWin()
+
+        if isinstance(textorientation, (str, unicode)):
+            textorientation = vcs.gettextorientation(textorientation)
+        if isinstance(texttable, (str, unicode)):
+            texttable = vcs.gettexttable(texttable)
+
+        from vtk_ui.text import text_dimensions
+        
+        text_property = vtk.vtkTextProperty()
+        win_size = self.renWin.GetSize()
+        vcs2vtk.prepTextProperty(text_property, win_size, to=textorientation, tt=texttable)
+        
+        dpi = self.renWin.GetDPI()
+        
+        length = max(len(texttable.string), len(texttable.x), len(texttable.y))
+        
+        strings = texttable.string + [texttable.string[-1]] * (length - len(texttable.string))
+        xs = texttable.x + [texttable.x[-1]] * (length - len(texttable.x))
+        ys = texttable.y + [texttable.y[-1]] * (length - len(texttable.y))
+
+        labels = zip(strings, xs, ys)
+
+        extents = []
+
+        for s, x, y in labels:
+            width, height = text_dimensions(s, text_property, dpi)
+            extents.append([x, x + float(width) / win_size[0], y, y + float(height) / win_size[1]])
+
+        return extents
+
 
     def getantialiasing(self):
         if self.renWin is None:
-- 
GitLab


From e6d5def8c5bd811c0a610248c104867f83fe351b Mon Sep 17 00:00:00 2001
From: Sam Fries <fries2@llnl.gov>
Date: Mon, 11 Jul 2016 15:28:41 -0700
Subject: [PATCH 193/196] Added test, fixed some bugginess

---
 Packages/vcs/vcs/VTKPlots.py        |  3 ++-
 testing/vcs/CMakeLists.txt          |  7 +++++++
 testing/vcs/test_vcs_textextents.py | 21 +++++++++++++++++++++
 3 files changed, 30 insertions(+), 1 deletion(-)
 create mode 100644 testing/vcs/test_vcs_textextents.py

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 2082199fa..cc48e7c51 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1254,7 +1254,8 @@ class VTKVCSBackend(object):
         from vtk_ui.text import text_dimensions
         
         text_property = vtk.vtkTextProperty()
-        win_size = self.renWin.GetSize()
+        info = self.canvasinfo()
+        win_size = info["width"], info["height"]
         vcs2vtk.prepTextProperty(text_property, win_size, to=textorientation, tt=texttable)
         
         dpi = self.renWin.GetDPI()
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index 828e88f72..88f5b9c60 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -1088,6 +1088,13 @@ cdat_add_test(test_vcs_no_continents
   ${BASELINE_DIR}/test_vcs_no_continents.png
 )
 
+cdat_add_test(test_vcs_textextents
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_textextents.py
+  ${BASELINE_DIR}/test_textextents.png
+)
+
+
 
 add_subdirectory(vtk_ui)
 add_subdirectory(editors)
diff --git a/testing/vcs/test_vcs_textextents.py b/testing/vcs/test_vcs_textextents.py
new file mode 100644
index 000000000..a26aa722b
--- /dev/null
+++ b/testing/vcs/test_vcs_textextents.py
@@ -0,0 +1,21 @@
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
+
+# We have to specify the geometry to make sure that the size of the canvas doesn't change between the init and the plot functions
+x = regression.init(bg=True, geometry=(1200,1091))
+text = x.createtext()
+text.string = ["A very very very very long string", "A\nmult-line\nstring", "Short"]
+# Use any value for initial; then we'll manually "right align" using the text extents
+text.x = [.1]
+text.y = [.1, .5, .9]
+
+# This function only gets the extents for the *current* size
+extents = x.gettextextent(text)
+# Now we'll manually populate this with the desired values
+text.x = []
+for min_x, max_x, min_y, max_y in extents:
+    w = max_x - min_x
+    #h = max_y - min_y
+    text.x.append(1 - w)
+
+x.plot(text, bg=1)
+regression.run(x, "test_textextents.png")
-- 
GitLab


From dedace8e2959446451bfb898e88af474e5c3e893 Mon Sep 17 00:00:00 2001
From: Sam Fries <samuelbfries@gmail.com>
Date: Wed, 13 Jul 2016 08:30:23 -0700
Subject: [PATCH 194/196] Docstring cleanup (#2056)

* Fixed up module level and init() docstrings

* Cleaned up xmldocs, and fixed a bunch of strings

* Migrating changes from main repo to my fork. create* mostly done.

* Cleaning up docstrings. Including actual object return types.

* Cleaning up documentation. Testing example functions and fixing as necessary.

* Too many things. Formatting fixed on lots of stuff.

* More Canvas.py docstrings cleaned up.

* Formatting fixes.

* Canvas/manageElements TODOs through G (mostly)

* Mostly finished with most of Canvas. Have minor questions on a bunch of functions.

* Canvas formatting done. Missing some info.

* Migrating changes from main repo to my fork. create* mostly done.

* Cleaning up docstrings. Including actual object return types.

* Cleaning up documentation. Testing example functions and fixing as necessary.

* Too many things. Formatting fixed on lots of stuff.

* More Canvas.py docstrings cleaned up.

* Formatting fixes.

* Canvas/manageElements TODOs through G (mostly)

* Mostly finished with most of Canvas. Have minor questions on a bunch of functions.

* Canvas formatting done. Missing some info.

* Fixed some flake8 issues

* Trimmed docstrings to fit 120char standard.

* manageElements done. Working on boxfill.

* Fixed colormap typo

* Revert "manageElements done. Working on boxfill."

This reverts commit 433de3f4c9fb26b3bd50bfb4653cfa49aa73b729.

* Flake8

* Fixed canvasraised name to make @doutriaux1 happy
---
 Packages/vcs/vcs/Canvas.py         | 2466 +++++++++++++---------------
 Packages/vcs/vcs/VTKPlots.py       |    6 +
 Packages/vcs/vcs/__init__.py       |  125 +-
 Packages/vcs/vcs/manageElements.py | 2248 ++++++++++++++++---------
 Packages/vcs/vcs/utils.py          |   12 +
 Packages/vcs/vcs/xmldocs.py        |  223 ++-
 6 files changed, 2793 insertions(+), 2287 deletions(-)

diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 903e22a7a..ca1564d54 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -1,33 +1,4 @@
-#!/usr/bin/env python
-# Adapted for numpy/ma/cdms2 by convertcdms.py
-#
-# The VCS Canvas API controls -  canvas module
-#
-###############################################################################
-#                                                                             #
-# Module:       canvas module                                                 #
-#                                                                             #
-# Copyright:    "See file Legal.htm for copyright information."               #
-#                                                                             #
-# Authors:      PCMDI Software Team                                           #
-#               Lawrence Livermore National Laboratory:                       #
-#               support@pcmdi.llnl.gov                                        #
-#                                                                             #
-# Description:  PCMDI's VCS Canvas is used to display plots and to create and #
-#               run animations.  It is always visible on the screen in a      #
-#               landscape (width exceeding height), portrait (height exceeding#
-#               width), or full-screen mode.                                  #
-#                                                                             #
-# Version: 2.4                                                                #
-#                                                                             #
-###############################################################################
-
-"""Canvas: the class representing a vcs drawing window
-Normally, created by vcs.init()
-Contains the method plot.
-"""
 import warnings
-from pauser import pause
 import numpy.ma
 import MV2
 import numpy
@@ -68,6 +39,7 @@ from xmldocs import plot_keywords_doc, graphics_method_core, axesconvert, xaxisc
     plot_2_1D_options
 gui_canvas_closed = 0
 canvas_closed = 0
+import vcsaddons  # noqa
 import vcs.manageElements  # noqa
 import configurator  # noqa
 from projection import no_deformation_projections  # noqa
@@ -126,7 +98,7 @@ def dictionarytovcslist(dictionary, name):
 
 def _determine_arg_list(g_name, actual_args):
     "Determine what is in the argument list for plotting graphics methods"
-    import vcsaddons
+
     itemplate_name = 2
     igraphics_method = 3
     igraphics_option = 4
@@ -309,22 +281,12 @@ def _process_keyword(obj, target, source, keyargs, default=None):
 
 
 class Canvas(object):
-
     """
- Function: Canvas                     # Construct a VCS Canvas class Object
-
- Description of Function:
-    Construct the VCS Canas object. There can only be at most 8 VCS
-    Canvases open at any given time.
+    The object onto which all plots are drawn.
 
- Example of Use:
-    a=vcs.Canvas()                    # This examples constructs a VCS Canvas
-"""
-    ##########################################################################
-    #                                                                        #
-    # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode).       #
-    #                                                                        #
-    ##########################################################################
+    Usually created using `vcs.init`, this object provides easy access
+    to the functionality of the entire VCS module.
+    """
     __slots__ = [
         '_mode',
         '_pause_time',
@@ -332,9 +294,7 @@ class Canvas(object):
         '_worldcoordinate',
         '_winfo_id',
         '_varglist',
-        '_canvas_gui',
         '_animate_info',
-        '_canvas_template_editor',
         '_isplottinggridded',
         '_user_actions_names',
         '_user_actions',
@@ -347,7 +307,6 @@ class Canvas(object):
         'worldcoordinate',
         'winfo_id',
         'varglist',
-        'canvas_gui'
         'animate_info',
         'canvas_template_editor',
         'isplottinggridded',
@@ -357,7 +316,6 @@ class Canvas(object):
         'user_actions_names',
         'user_actions',
         'size',
-        'canvas_guianimate_info',
         'ParameterChanged',
         'colormap',
         'backgroundcolor',
@@ -444,13 +402,6 @@ class Canvas(object):
         return self._varglist
     varglist = property(_getvarglist, _setvarglist)
 
-    def _setcanvas_gui(self, value):
-        self._canvas_gui = value
-
-    def _getcanvas_gui(self):
-        return self._canvas_gui
-    canvas_gui = property(_getcanvas_gui, _setcanvas_gui)
-
     def _setcanvas(self, value):
         raise vcsError("Error, canvas is not an attribute you can set")
 
@@ -497,15 +448,6 @@ class Canvas(object):
         return self._worldcoordinate
     worldcoordinate = property(_getworldcoordinate, _setworldcoordinate)
 
-    def _setcanvas_template_editor(self, value):
-        self._canvas_template_editor = value  # No check on this!
-
-    def _getcanvas_template_editor(self):
-        return self._canvas_template_editor
-    canvas_template_editor = property(
-        _getcanvas_template_editor,
-        _setcanvas_template_editor)
-
     def _setisplottinggridded(self, value):
         if not isinstance(value, bool):
             raise vcsError("isplottinggridded must be boolean")
@@ -833,34 +775,23 @@ class Canvas(object):
 #        tv = self._datawc_tv( tv, arglist )
         return tv
 
-    ##########################################################################
-    #                                                                        #
-    # Print out the object's doc string.                                     #
-    #                                                                        #
-    ##########################################################################
     def objecthelp(self, *arg):
         """
- Function: objecthelp               # Print out the object's doc string
-
- Description of Function:
     Print out information on the VCS object. See example below on its use.
 
- Example of Use:
-    a=vcs.init()
+    :Example:
 
-    ln=a.getline('red')                 # Get a VCS line object
-    a.objecthelp(ln)                    # This will print out information on how to use ln
+::
+
+    a=vcs.init()
+    # Get a VCS line object
+    ln=a.getline('red')
+    # This will print out information on how to use ln
+    a.objecthelp(ln)
     """
         for x in arg:
             print getattr(x, "__doc__", "")
 
-    ############################################################################
-    #                                                                          #
-    # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode #
-    # is set to 0, the user will have to manually update the VCS Canvas by     #
-    # using the "update" function.                                             #
-    #                                                                          #
-    ############################################################################
     def __init__(self, mode=1, pause_time=0,
                  call_from_gui=0, size=None, backend="vtk", geometry=None, bg=None):
         self._canvas_id = vcs.next_canvas_id
@@ -885,9 +816,7 @@ class Canvas(object):
 
         self.winfo_id = -99
         self.varglist = []
-        self.canvas_gui = None
         self.isplottinggridded = False
-        self.canvas_guianimate_info = None
 
         if size is None:
             psize = 1.2941176470588236
@@ -963,12 +892,10 @@ class Canvas(object):
 
 # Initial.attributes is being called in main.c, so it is not needed here!
 # Actually it is for taylordiagram graphic methods....
-###########################################################################################
-#  Okay, then this is redundant since it is done in main.c. When time perments, put the   #
-#  taylordiagram graphic methods attributes in main.c Because this is here we must check  #
-#  to make sure that the initial attributes file is called only once for normalization    #
-#  purposes....                                                                           #
-###########################################################################################
+#  Okay, then this is redundant since it is done in main.c. When time perments, put the
+#  taylordiagram graphic methods attributes in main.c Because this is here we must check
+#  to make sure that the initial attributes file is called only once for normalization
+#  purposes....
 
         self.canvas_template_editor = None
         self.ratio = '0'
@@ -998,26 +925,25 @@ class Canvas(object):
 
     # Functions to set/querie drawing of UV-CDAT logo
     def drawlogoon(self):
-        """Turn on drawing of logo on pix"""
+        """Show UV-CDAT logo on the canvas"""
         self.enableLogo = True
 
     def drawlogooff(self):
-        """Turn off drawing of logo on pix"""
+        """Hide UV-CDAT logo on the canvas"""
         self.enableLogo = False
 
     def getdrawlogo(self):
-        """Return value of draw logo"""
+        """
+        Returns value of draw logo
+
+        :returns: Boolean value of system variable which indicates whether log will be drawn
+        :rtype: bool
+        """
         return self.enableLogo
 
     def initLogoDrawing(self):
         self.drawLogo = self.enableLogo
 
-    #############################################################################
-    #                                                                           #
-    # Update wrapper function for VCS.                                          #
-    #                                                                           #
-    #############################################################################
-
     def update(self, *args, **kargs):
         """
  Function: update                   # Update the VCS Canvas.
@@ -1027,7 +953,7 @@ class Canvas(object):
     set to manual, then use this function to update the plot(s)
     manually.
 
- Example of Use:
+ :Example:
     ...
 
     a=vcs.init()
@@ -1045,25 +971,6 @@ class Canvas(object):
 
         return self.backend.update(*args, **kargs)
 
-    #############################################################################
-    #                                                                           #
-    # Update wrapper function for VCS with a check to update the continents.    #
-    #                                                                           #
-    #############################################################################
-    def _update_continents_check(self, *args):
-
-        a = self.canvas.updatecanvas_continents(*args)
-        self.flush()  # update the canvas by processing all the X events
-        self.backing_store()
-        pause(self.pause_time)
-
-        return a
-
-    #############################################################################
-    #                                                                           #
-    # Script VCS primary or secondary elements wrapper functions for VCS.       #
-    #                                                                           #
-    #############################################################################
     def scriptobject(self, obj, script_filename=None, mode=None):
         """
  Function: scriptobject       # Script a single primary or secondary class object
@@ -1085,7 +992,7 @@ class Canvas(object):
           However, a `default' attribute set that has been copied under a
           different name can be saved as a script file.
 
- Example of Use:
+ :Example:
     a=vcs.init()
     l=a.getline('red')         # To Modify an existing line object
     i=x.createisoline('dean')  # Create an instance of default isoline object
@@ -1138,13 +1045,14 @@ class Canvas(object):
         else:
             print 'This is not a template, graphics method or secondary method object.'
 
-    #############################################################################
-    #                                                                           #
-    # Remove VCS primary and secondary methods wrapper functions for VCS.       #
-    #                                                                           #
-    #############################################################################
-
     def removeobject(self, obj):
+        """
+        Removes a VCS object from the cache of available objects that can be referred to by name.
+
+        :param obj: The VCS object to be removed.
+        :type obj: a VCS object
+        :returns: ???
+        """
         __doc__ = vcs.removeobject.__doc__  # noqa
         return vcs.removeobject(obj)
 
@@ -1152,18 +1060,23 @@ class Canvas(object):
         return vcs.removeP(*args)
 
     def clean_auto_generated_objects(self, type=None):
-        """ cleans all self/auto genrated objects in vcs, only if they're not in use
-        Example:
-        import vcs
-        x=vcs.init()
-        x.clean_auto_generated_objects() # cleans everything
-        x.clean_auto_generated_objects('template') # cleans template objects
+        """
+        Cleans up all automaticaly generated VCS objects.
+
+        This function will delete all references to objects that
+        VCS created automatically in response to user actions but are
+        no longer in use. This shouldn't be necessary most of the time,
+        but if you're running into performance/memory issues, calling it
+        periodically may help.
+
+        :param type: Type of objects to remove. By default, will remove everything.
+        :type type: None, str, list/tuple (of str)
         """
 
         if type is None:
             type = self.listelements()
             type.remove("fontNumber")
-        elif isinstance(type, str):
+        elif isinstance(type, (str, unicode)):
             type = [type, ]
         elif not isinstance(type, (list, tuple)):
             return
@@ -1191,12 +1104,8 @@ class Canvas(object):
 
     def check_name_source(self, name, source, typ):
         return vcs.check_name_source(name, source, typ)
+    check_name_source.__doc__ = vcs.manageElements.check_name_source.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Template functions for VCS.                                               #
-    #                                                                           #
-    #############################################################################
     def createtemplate(self, name=None, source='default'):
         return vcs.createtemplate(name, source)
     createtemplate.__doc__ = vcs.manageElements.createtemplate.__doc__
@@ -1205,11 +1114,6 @@ class Canvas(object):
         return vcs.gettemplate(Pt_name_src)
     gettemplate.__doc__ = vcs.manageElements.gettemplate.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Projection functions for VCS.                                             #
-    #                                                                           #
-    #############################################################################
     def createprojection(self, name=None, source='default'):
         return vcs.createprojection(name, source)
     createprojection.__doc__ = vcs.manageElements.createprojection.__doc__
@@ -1218,11 +1122,6 @@ class Canvas(object):
         return vcs.getprojection(Proj_name_src)
     getprojection.__doc__ = vcs.manageElements.getprojection.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Boxfill functions for VCS.                                                #
-    #                                                                           #
-    #############################################################################
     def createboxfill(self, name=None, source='default'):
         return vcs.createboxfill(name, source)
     createboxfill.__doc__ = vcs.manageElements.createboxfill.__doc__
@@ -1233,59 +1132,54 @@ class Canvas(object):
 
     def boxfill(self, *args, **parms):
         """
-Options:::
-%s
-%s
-%s
-:::
- Input:::
-%s
-    :::
- Output:::
-%s
-    :::
+        Plot a boxfill.
 
- Function: boxfill                        # Generate a boxfill plot
+        Generate a boxfill plot given the data, boxfill graphics method, and
+        template. If no boxfill class object is given, then the 'default' boxfill
+        graphics method is used. Similarly, if no template class object is given,
+        then the 'default' template is used.
 
- Description of Function:
-    Generate a boxfill plot given the data, boxfill graphics method, and
-    template. If no boxfill class object is given, then the 'default' boxfill
-    graphics method is used. Similarly, if no template class object is given,
-    then the 'default' template is used.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('boxfill')                        # Show all the existing boxfill graphics methods
-    box=a.getboxfill('quick')                # Create instance of 'quick'
-    a.boxfill(array,box)                # Plot array using specified box and default
-                                        #         template
-    templt=a.gettemplate('AMIP')        # Create an instance of template 'AMIP'
-    a.clear()                           # Clear VCS canvas
-    a.boxfill(array,box,template)       # Plot array using specified box and template
-    a.boxfill(box,array,template)       # Plot array using specified box and template
-    a.boxfill(template,array,box)       # Plot array using specified box and template
-    a.boxfill(template,array,box)       # Plot array using specified box and template
-    a.boxfill(array,'AMIP','quick')     # Use 'AMIP' template and 'quick' boxfill
-    a.boxfill('AMIP',array,'quick')     # Use 'AMIP' template and 'quick' boxfill
-    a.boxfill('AMIP','quick',array)     # Use 'AMIP' template and 'quick' boxfill
-
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End boxfill Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+    # Show all the existing boxfill graphics methods
+    a.show('boxfill')
+    # Create instance of 'quick'
+    box=a.getboxfill('quick')
+    # Plot array using specified box and default template
+    a.boxfill(array,box)
+    # Create an instance of template 'AMIP'
+    templt=a.gettemplate('AMIP')
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified box and template
+    a.boxfill(array,box,template)
+    # Plot array using specified box and template
+    a.boxfill(box,array,template)
+    # Plot array using specified box and template
+    a.boxfill(template,array,box)
+    # Plot array using specified box and template
+    a.boxfill(template,array,box)
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill(array,'AMIP','quick')
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill('AMIP',array,'quick')
+    # Use 'AMIP' template and 'quick' boxfill
+    a.boxfill('AMIP','quick',array)
 
+%s
+%s
+%s
+%s
+%s
 """
         arglist = _determine_arg_list('boxfill', args)
         return self.__plot(arglist, parms)
     boxfill.__doc__ = boxfill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # Taylordiagram functions for VCS.                                          #
-    #                                                                           #
-    #############################################################################
     def createtaylordiagram(self, name=None, source='default'):
         return vcs.createtaylordiagram(name, source)
     createtaylordiagram.__doc__ = vcs.manageElements.createtaylordiagram.__doc__
@@ -1296,32 +1190,32 @@ Options:::
 
     def taylordiagram(self, *args, **parms):
         """
- Function: taylordiagram                        # Generate an taylordiagram plot
+        Generate a taylor diagram plot.
 
- Description of Function:
-    Generate a taylordiagram plot given the data, taylordiagram graphics method, and
-    template. If no taylordiagram class object is given, then the 'default' taylordiagram
-    graphics method is used. Similarly, if no template class object is given,
-    then the 'default' template is used.
+        Generate a taylordiagram plot given the data, taylordiagram graphics method, and
+        template. If no taylordiagram class object is given, then the 'default' taylordiagram
+        graphics method is used. Similarly, if no template class object is given,
+        then the 'default' template is used.
+
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('taylordiagram')                   # Show all the existing taylordiagram graphics methods
-    td=a.gettaylordiagram()                   # Create instance of 'default'
-    a.taylordiagram(array,td)                 # Plot array using specified iso and default
-                                              #       template
-    a.clear()                                 # Clear VCS canvas
-    a.taylordiagram(array,td,template)        # Plot array using specified iso and template
+    # Show all the existing taylordiagram graphics methods
+    a.show('taylordiagram')
+    # Create instance of 'default'
+    td=a.gettaylordiagram()
+    # Plot array using specified iso and default template
+    a.taylordiagram(array,td)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified iso and template
+    a.taylordiagram(array,td,template)
 """
         arglist = _determine_arg_list('taylordiagram', args)
         return self.__plot(arglist, parms)
 
-    #############################################################################
-    #                                                                           #
-    # Meshfill functions for VCS.                                               #
-    #                                                                           #
-    #############################################################################
-
     def createmeshfill(self, name=None, source='default'):
         return vcs.createmeshfill(name, source)
     createmeshfill.__doc__ = vcs.manageElements.createmeshfill.__doc__
@@ -1332,9 +1226,6 @@ Options:::
 
     def meshfill(self, *args, **parms):  # noqa
         """
- Function: meshfill               # Generate an meshfill plot
-
- Description of Function:
     Generate a meshfill plot given the data, the mesh, a meshfill graphics method, and
     a template. If no meshfill class object is given, then the 'default' meshfill
     graphics method is used. Similarly, if no template class object is given,
@@ -1354,25 +1245,28 @@ Options:::
     data.shape=(10000,)
     mesh.shape=(10000,2,4)
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('meshfill')                   # Show all the existing meshfill graphics methods
-    mesh=a.getmeshfill()                 # Create instance of 'default'
-    a.meshfill(array,mesh)               # Plot array using specified mesh and default
-                                         #       template
-    a.clear()                            # Clear VCS canvas
+    # Show all the existing meshfill graphics methods
+    a.show('meshfill')
+    # Create instance of 'default'
+    mesh=a.getmeshfill()
+    # Plot array using specified mesh and default template
+    a.meshfill(array,mesh)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified mesh mesh graphic method and template
     a.meshfill(array,mesh,mesh_graphic_method,template)
-                           # Plot array using specified mesh mesh graphic method and template
+
+:returns: ???
+:rtype: ???
 """
         arglist = _determine_arg_list('meshfill', args)
         return self.__plot(arglist, parms)
 
-    #############################################################################
-    #                                                                           #
-    # DV3D functions for VCS.                                                   #
-    #                                                                           #
-    #############################################################################
-
     def create3d_scalar(self, name=None, source='default'):
         return vcs.create3d_scalar(name, source)
 
@@ -1412,11 +1306,6 @@ Options:::
         arglist = _determine_arg_list('3d_dual_scalar', args)
         return self.__plot(arglist, parms)
 
-    #############################################################################
-    #                                                                           #
-    # Isofill functions for VCS.                                                #
-    #                                                                           #
-    #############################################################################
     def createisofill(self, name=None, source='default'):
         return vcs.createisofill(name, source)
     createisofill.__doc__ = vcs.manageElements.createisofill.__doc__
@@ -1439,28 +1328,29 @@ Options:::
 %s
     :::
 
- Function: isofill                        # Generate an isofill plot
-
- Description of Function:
     Generate a isofill plot given the data, isofill graphics method, and
     template. If no isofill class object is given, then the 'default' isofill
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('isofill')                   # Show all the existing isofill graphics methods
-    iso=a.getisofill('quick')           # Create instance of 'quick'
-    a.isofill(array,iso)                # Plot array using specified iso and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.isofill(array,iso,template)       # Plot array using specified iso and template
+     # Show all the existing isofill graphics methods
+    a.show('isofill')
+    # Create instance of 'quick'
+    iso=a.getisofill('quick')
+    # Plot array using specified iso and default template
+    a.isofill(array,iso)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified iso and template
+    a.isofill(array,iso,template)
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End isofill Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+:returns: ???
+:rtype: ???
 
 """
         arglist = _determine_arg_list('isofill', args)
@@ -1468,11 +1358,6 @@ Options:::
     isofill.__doc__ = isofill.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # Isoline functions for VCS.                                                #
-    #                                                                           #
-    #############################################################################
     def createisoline(self, name=None, source='default'):
         return vcs.createisoline(name, source)
     createisoline.__doc__ = vcs.manageElements.createisoline.__doc__
@@ -1495,15 +1380,15 @@ Options:::
 %s
     :::
 
- Function: isoline                        # Generate an isoline plot
-
- Description of Function:
     Generate a isoline plot given the data, isoline graphics method, and
     template. If no isoline class object is given, then the 'default' isoline
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.show('isoline')                   # Show all the existing isoline graphics methods
     iso=a.getisoline('quick')           # Create instance of 'quick'
@@ -1512,11 +1397,8 @@ Options:::
     a.clear()                           # Clear VCS canvas
     a.isoline(array,iso,template)       # Plot array using specified iso and template
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End isoline Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+:returns: ???
+:rtype: ???
 
 """
         arglist = _determine_arg_list('isoline', args)
@@ -1532,11 +1414,6 @@ Options:::
         return vcs.get1d(name)
     create1d.__doc__ = vcs.manageElements.create1d.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Xyvsy functions for VCS.                                                  #
-    #                                                                           #
-    #############################################################################
     def createxyvsy(self, name=None, source='default'):
         return vcs.createxyvsy(name, source)
     createxyvsy.__doc__ = vcs.manageElements.createxyvsy.__doc__
@@ -1559,40 +1436,35 @@ Options:::
 %s
     :::
 
- Function: xyvsy                        # Generate a Xyvsy plot
-
- Description of Function:
     Generate a Xyvsy plot given the data, Xyvsy graphics method, and
     template. If no Xyvsy class object is given, then the 'default' Xyvsy
     graphics method is used. Simerly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
-    a=vcs.init()
-    a.show('xyvsy')                   # Show all the existing Xyvsy graphics methods
-    xyy=a.getxyvsy('quick')           # Create instance of 'quick'
-    a.xyvsy(array,xyy)                # Plot array using specified xyy and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.xyvsy(array,xyy,template)       # Plot array using specified xyy and template
+    :Example:
+
+::
 
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End xyvsy Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    a=vcs.init()
+    # Show all the existing Xyvsy graphics methods
+    a.show('xyvsy')
+    # Create instance of 'quick'
+    xyy=a.getxyvsy('quick')
+    # Plot array using specified xyy and default template
+    a.xyvsy(array,xyy)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified xyy and template
+    a.xyvsy(array,xyy,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('xyvsy', args)
         return self.__plot(arglist, parms)
     xyvsy.__doc__ = xyvsy.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # Yxvsx functions for VCS.                                                  #
-    #                                                                           #
-    #############################################################################
     def createyxvsx(self, name=None, source='default'):
         return vcs.createyxvsx(name, source)
     createyxvsx.__doc__ = vcs.manageElements.createyxvsx.__doc__
@@ -1615,40 +1487,35 @@ Options:::
 %s
     :::
 
- Function: yxvsx                        # Generate a Yxvsx plot
-
- Description of Function:
     Generate a Yxvsx plot given the data, Yxvsx graphics method, and
     template. If no Yxvsx class object is given, then the 'default' Yxvsx
     graphics method is used. Simerly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('yxvsx')                   # Show all the existing Yxvsx graphics methods
-    yxx=a.getyxvsx('quick')           # Create instance of 'quick'
-    a.yxvsx(array,yxx)                # Plot array using specified yxx and default
-                                      #       template
-    a.clear()                         # Clear VCS canvas
-    a.yxvsx(array,yxx,template)       # Plot array using specified yxx and template
-
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End yxvsx Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    # Show all the existing Yxvsx graphics methods
+    a.show('yxvsx')
+    # Create instance of 'quick'
+    yxx=a.getyxvsx('quick')
+    # Plot array using specified yxx and default template
+    a.yxvsx(array,yxx)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified yxx and template
+    a.yxvsx(array,yxx,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('yxvsx', args)
         return self.__plot(arglist, parms)
     yxvsx.__doc__ = yxvsx.__doc__ % (
         plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # XvsY functions for VCS.                                                   #
-    #                                                                           #
-    #############################################################################
     def createxvsy(self, name=None, source='default'):
         return vcs.createxvsy(name, source)
     createxvsy.__doc__ = vcs.manageElements.createxvsy.__doc__
@@ -1669,29 +1536,29 @@ Options:::
 %s
     :::
 
- Function: xvsy                      # Generate a XvsY plot
-
- Description of Function:
     Generate a XvsY plot given the data, XvsY graphics method, and
     template. If no XvsY class object is given, then the 'default' XvsY
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('xvsy')                   # Show all the existing XvsY graphics methods
-    xy=a.getxvsy('quick')            # Create instance of 'quick'
-    a.xvsy(array,xy)                 # Plot array using specified xy and default
-                                     #       template
-    a.clear()                        # Clear VCS canvas
-    a.xvsy(array,xy,template)        # Plot array using specified xy and template
-
-#################################################################################################################
-###########################################                       ###############################################
-########################################## End xvsy Description ################################################
-#########################################                       #################################################
-#################################################################################################################
+    # Show all the existing XvsY graphics methods
+    a.show('xvsy')
+    # Create instance of 'quick'
+    xy=a.getxvsy('quick')
+    # Plot array using specified xy and default template
+    a.xvsy(array,xy)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified xy and template
+    a.xvsy(array,xy,template)
 
+:returns:
+:rtype:
 """
         arglist = _determine_arg_list('xvsy', args)
         return self.__plot(arglist, parms)
@@ -1701,11 +1568,6 @@ Options:::
                                    plot_2_1D_input,
                                    plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # Vector functions for VCS.                                                 #
-    #                                                                           #
-    #############################################################################
     def createvector(self, name=None, source='default'):
         return vcs.createvector(name, source)
     createvector.__doc__ = vcs.manageElements.createvector.__doc__
@@ -1716,31 +1578,30 @@ Options:::
 
     def vector(self, *args, **parms):
         """
- Function: vector                      # Generate a vector plot
-
- Description of Function:
     Generate a vector plot given the data, vector graphics method, and
     template. If no vector class object is given, then the 'default' vector
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('vector')                   # Show all the existing vector graphics methods
-    vec=a.getvector('quick')           # Create instance of 'quick'
-    a.vector(array,vec)                # Plot array using specified vec and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.vector(array,vec,template)       # Plot array using specified vec and template
+    # Show all the existing vector graphics methods
+    a.show('vector')
+    # Create instance of 'quick'
+    vec=a.getvector('quick')
+    # Plot array using specified vec and default template
+    a.vector(array,vec)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified vec and template
+    a.vector(array,vec,template)
 """
         arglist = _determine_arg_list('vector', args)
         return self.__plot(arglist, parms)
 
-    #############################################################################
-    #                                                                           #
-    # Scatter functions for VCS.                                                #
-    #                                                                           #
-    #############################################################################
     def createscatter(self, name=None, source='default'):
         return vcs.createscatter(name, source)
     createscatter.__doc__ = vcs.manageElements.createscatter.__doc__
@@ -1760,29 +1621,27 @@ Options:::
  Output:::
 %s
     :::
-
- Function: scatter                      # Generate a scatter plot
-
- Description of Function:
     Generate a scatter plot given the data, scatter graphics method, and
     template. If no scatter class object is given, then the 'default' scatter
     graphics method is used. Similarly, if no template class object is given,
     then the 'default' template is used.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('scatter')                   # Show all the existing scatter graphics methods
-    sct=a.getscatter('quick')           # Create instance of 'quick'
-    a.scatter(array,sct)                # Plot array using specified sct and default
-                                        #       template
-    a.clear()                           # Clear VCS canvas
-    a.scatter(array,sct,template)       # Plot array using specified sct and template
+    # Show all the existing scatter graphics methods
+    a.show('scatter')
+    # Create instance of 'quick'
+    sct=a.getscatter('quick')
+    # Plot array using specified sct and default template
+    a.scatter(array,sct)
+    # Clear VCS canvas
+    a.clear()
+    # Plot array using specified sct and template
+    a.scatter(array,sct,template)
 
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End scatter Description ################################################
-#########################################                         #################################################
-###################################################################################################################
 
 """
 
@@ -1791,11 +1650,6 @@ Options:::
     scatter.__doc__ = scatter.__doc__ % (
         plot_keywords_doc, graphics_method_core, axesconvert, plot_2_1D_input, plot_output)
 
-    #############################################################################
-    #                                                                           #
-    # Line  functions for VCS.                                                  #
-    #                                                                           #
-    #############################################################################
     def createline(self, name=None, source='default', ltype=None,  # noqa
                    width=None, color=None, priority=None,
                    viewport=None, worldcoordinate=None,
@@ -1814,22 +1668,31 @@ Options:::
 
     def line(self, *args, **parms):
         """
- Function: line                           # Generate a line plot
-
- Description of Function:
     Plot a line segment on the Vcs Canvas. If no line class
     object is given, then an error will be returned.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('line')                      # Show all the existing line objects
-    ln=a.getline('red')                 # Create instance of 'red'
-    ln.width=4                          # Set the line width
-    ln.color = 242                      # Set the line color
-    ln.type = 4                         # Set the line type
-    ln.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    ln.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.line(ln)                          # Plot using specified line object
+    # Show all the existing line objects
+    a.show('line')
+    # Create instance of 'red'
+    ln=a.getline('red')
+    # Set the line width
+    ln.width=4
+    # Set the line color
+    ln.color = 242
+    # Set the line type
+    ln.type = 4
+    # Set the x value points
+    ln.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+    # Set the y value points
+    ln.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+    # Plot using specified line object
+    a.line(ln)
+
 """
         arglist = _determine_arg_list('line', args)
         return self.__plot(arglist, parms)
@@ -1839,20 +1702,58 @@ Options:::
                  worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                  x=None, y=None, projection='default', bg=0):
         """
- Function: drawline                           # Generate and draw a line plot
-
- Description of Function:
     Generate and draw a line object on the VCS Canvas.
 
- Example of Use:
+     :Example:
+
+ ::
+
     a=vcs.init()
-    a.show('line')                      # Show all the existing line objects
+    # Show all the existing line objects
+    a.show('line')
+    # Create instance of line object 'red'
     ln=a.drawline(name='red', ltype='dash', width=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-    a.line(ln)                          # Plot using specified line object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    a.line(ln)
+
+
+:param name: Name of created object
+:type name: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be drawn
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the line will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:returns: ???
+:rtype: ???
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the line.')
@@ -1875,11 +1776,6 @@ Options:::
 
         return ln
 
-    #############################################################################
-    #                                                                           #
-    # Marker  functions for VCS.                                                #
-    #                                                                           #
-    #############################################################################
     def createmarker(self, name=None, source='default', mtype=None,  # noqa
                      size=None, color=None, priority=1,
                      viewport=None, worldcoordinate=None,
@@ -1898,23 +1794,31 @@ Options:::
 
     def marker(self, *args, **parms):
         """
- Function: marker                           # Generate a marker plot
-
- Description of Function:
-    Plot a marker segment on the Vcs Canvas. If no marker class
-    object is given, then an error will be returned.
+        Plot a marker segment on the Vcs Canvas. If no marker class
+        object is given, then an error will be returned.
 
- Example of Use:
-    a=vcs.init()
-    a.show('marker')                     # Show all the existing marker objects
-    mrk=a.getmarker('red')               # Create instance of 'red'
-    mrk.size=4                           # Set the marker size
-    mrk.color = 242                      # Set the marker color
-    mrk.type = 4                         # Set the marker type
-    mrk.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    mrk.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.marker(mrk)                          # Plot using specified marker object
-"""
+         :Example:
+            a=vcs.init()
+            # Show all the existing marker objects
+            a.show('marker')
+            # Create instance of 'red'
+            mrk=a.getmarker('red')
+            # Set the marker size
+            mrk.size=4
+            # Set the marker color
+            mrk.color = 242
+            # Set the marker type
+            mrk.type = 4
+            # Set the x value points
+            mrk.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+            # Set the y value points
+            mrk.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+            # Plot using specified marker object
+            a.marker(mrk)
+
+:returns: a VCS displayplot object
+:rtype: vcs.displayplot.Dp
+        """
         arglist = _determine_arg_list('marker', args)
         return self.__plot(arglist, parms)
 
@@ -1923,20 +1827,55 @@ Options:::
                    worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                    x=None, y=None, bg=0):
         """
- Function: drawmarker                           # Generate and draw a marker plot
+        Generate and draw a marker object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a marker object on the VCS Canvas.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('marker')                      # Show all the existing marker objects
-    mrk=a.drawmarker(name='red', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing marker objects
+    a.show('marker')
+    # Create instance of marker object 'red'
+    mrk=a.drawmarker(name='red', mtype='dot', size=2,
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-    a.marker(mrk)                          # Plot using specified marker object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    a.marker(mrk)
+
+
+:param name: Name of created object
+:type name: str
+
+:param mtype: Marker type, i.e. 'dot', 'plus', 'star, etc.
+:type mtype: str
+
+:param size: Size of the marker to draw
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A drawmarker object
+:rtype: vcs.marker.Tm
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the marker.')
@@ -1958,11 +1897,6 @@ Options:::
 
         return mrk
 
-    #############################################################################
-    #                                                                           #
-    # Fillarea  functions for VCS.                                              #
-    #                                                                           #
-    #############################################################################
     def createfillarea(self, name=None, source='default', style=None,
                        index=None, color=None, priority=1,
                        viewport=None, worldcoordinate=None,
@@ -1982,23 +1916,35 @@ Options:::
 
     def fillarea(self, *args, **parms):
         """
- Function: fillarea                           # Generate a fillarea plot
+    Generate a fillarea plot
 
- Description of Function:
     Plot a fillarea segment on the Vcs Canvas. If no fillarea class
     object is given, then an error will be returned.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
-    a.show('fillarea')                  # Show all the existing fillarea objects
-    fa=a.getfillarea('red')             # Create instance of 'red'
-    fa.style=1                          # Set the fillarea style
-    fa.index=4                          # Set the fillarea index
-    fa.color = 242                      # Set the fillarea color
-    fa.type = 4                         # Set the fillarea type
-    fa.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-    fa.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-    a.fillarea(fa)                          # Plot using specified fillarea object
+    # Show all the existing fillarea objects
+    a.show('fillarea')
+    # Create instance of 'red'
+    fa=a.createfillarea('red')
+    # Set the fillarea style
+    fa.style=1
+    # Set the fillarea index
+    fa.index=4
+    # Set the fillarea color
+    fa.color = 'black'
+    # Set the x value points
+    fa.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+    # Set the y value points
+    fa.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+    # Plot using specified fillarea object
+    a.fillarea(fa)
+
+:returns: A fillarea object
+:rtype: vcs.displayplot.Dp
 """
         arglist = _determine_arg_list('fillarea', args)
         return self.__plot(arglist, parms)
@@ -2008,20 +1954,58 @@ Options:::
                      worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                      x=None, y=None, bg=0):
         """
- Function: drawfillarea                           # Generate and draw a fillarea plot
+        Generate and draw a fillarea object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a fillarea object on the VCS Canvas.
+        :Example:
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('fillarea')                      # Show all the existing fillarea objects
-    fa=a.drawfillarea(name='red', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing fillarea objects
+    a.show('fillarea')
+    # Create instance of fillarea object 'red'
+    fa=a.drawfillarea(name='red', style=1, color=242,
+                  priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of fillarea object 'red'
-    a.fillarea(fa)                          # Plot using specified fillarea object
+                  y=[0,10,20,30,40,50], bg=0 )
+    # Plot using specified fillarea object
+    a.fillarea(fa)
+
+
+:param name: Name of created object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_
+              to fill the fillarea with. Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param bg: Boolean value. True => object drawn in background (not shown on canvas). False => object shown on canvas.
+:type bg: bool
+
+:returns: A fillarea object
+:rtype: vcs.fillarea.Tf
 """
         if (name is None) or (not isinstance(name, str)):
             raise vcsError('Must provide string name for the fillarea.')
@@ -2043,11 +2027,6 @@ Options:::
 
         return fa
 
-    #############################################################################
-    #                                                                           #
-    # Text Table  functions for VCS.                                            #
-    #                                                                           #
-    #############################################################################
     def createtexttable(self, name=None, source='default', font=None,
                         spacing=None, expansion=None, color=None, priority=None,
                         viewport=None, worldcoordinate=None,
@@ -2065,11 +2044,6 @@ Options:::
                                 viewport, worldcoordinate, x, y)
     gettexttable.__doc__ = vcs.manageElements.gettexttable.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Text Orientation  functions for VCS.                                      #
-    #                                                                           #
-    #############################################################################
     def createtextorientation(self, name=None, source='default'):
         return vcs.createtextorientation(name, source)
     createtextorientation.__doc__ = vcs.manageElements.createtextorientation.__doc__
@@ -2078,11 +2052,6 @@ Options:::
         return vcs.gettextorientation(To_name_src)
     gettextorientation.__doc__ = vcs.manageElements.gettextorientation.__doc__
 
-    #############################################################################
-    #                                                                           #
-    # Text Combined  functions for VCS.                                         #
-    #                                                                           #
-    #############################################################################
     def createtextcombined(self, Tt_name=None, Tt_source='default', To_name=None, To_source='default',  # noqa
                            font=None, spacing=None, expansion=None, color=None,
                            priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
@@ -2110,28 +2079,42 @@ Options:::
 
     def textcombined(self, *args, **parms):
         """
-        Function: text or textcombined         # Generate a textcombined plot
+        Generate a textcombined plot
 
-        Description of Function:
         Plot a textcombined segment on the Vcs Canvas. If no textcombined class
         object is given, then an error will be returned.
+        *This function can also be called using the format **text(self, *args, **parms)** *
+
+        :Example:
+
+::
 
-        Example of Use:
         a=vcs.init()
-        a.show('texttable')                 # Show all the existing texttable objects
-        a.show('textorientation')           # Show all the existing textorientation objects
-        tt=a.gettext('std','7left')         # Create instance of 'std' and '7left'
-        tt.string = 'Text1'                 # Show the string "Text1" on the VCS Canvas
-        tt.font=2                           # Set the text size
-        tt.color = 242                      # Set the text color
-        tt.angle = 45                       # Set the text angle
-        tt.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]] # Set the x value points
-        tt.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]] # Set the y value points
-        a.text(tt)                          # Plot using specified text object
-
-        Optionally you can pass a string, the coordinates and any keyword
-        Example:
-        x.plot('Hi',.5,.5,color=241,angle=45)
+        # Show all the existing texttable objects
+        a.show('texttable')
+        # Show all the existing textorientation objects
+        a.show('textorientation')
+        # Create instance of 'std' and '7left'
+        tt=a.gettext('std','7left')
+        # Show the string "Text1" on the VCS Canvas
+        tt.string = 'Text1'
+        # Set the text size
+        tt.font=2
+        # Set the text color
+        tt.color = 242
+        # Set the text angle
+        tt.angle = 45
+        # Set the x value points
+        tt.x=[[0.0,2.0,2.0,0.0,0.0], [0.5,1.5]]
+        # Set the y value points
+        tt.y=[[0.0,0.0,2.0,2.0,0.0], [1.0,1.0]]
+        # Plot using specified text object
+        a.text(tt)
+        #Optionally you can pass a string, the coordinates, and any keyword
+        a.plot('Hi',.5,.5,color=241,angle=45)
+
+:returns: ???
+:rtype: ???
         """
         # First check if color is a string
         if 'color' in parms.keys():
@@ -2166,7 +2149,11 @@ Options:::
 
     def gettextextent(self, textobject):
         """Returns the coordinate of the box surrounding a text object once printed
-        Example:
+
+        :Example:
+
+::
+
         x=vcs.init()
         t=x.createtext()
         t.x=[.5]
@@ -2174,6 +2161,13 @@ Options:::
         t.string=['Hello World']
         extent = x.gettextextent(t)
         print extent
+
+:param textobject: A VCS text object
+:type textobject: ???
+
+:returns: ???
+:rtype: ???
+
         """
         if not vcs.istext(textobject):
             raise vcsError('You must pass a text object')
@@ -2192,20 +2186,59 @@ Options:::
                          worldcoordinate=[0.0, 1.0, 0.0, 1.0],
                          x=None, y=None, bg=0):
         """
- Function: drawtexttable                           # Generate and draw a texttable plot
+    Generate and draw a textcombined object on the VCS Canvas.
 
- Description of Function:
-    Generate and draw a texttable object on the VCS Canvas.
+     :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
-    a.show('texttable')                      # Show all the existing texttable objects
-    tt=a.drawtexttable(Tt_name = 'red', To_name='7left', mtype='dash', size=2,
-                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+    # Show all the existing texttable objects
+    a.show('texttable')
+    # Create instance of texttable object 'red'
+    tc=a.drawtextcombined(Tc_name = 'red', To_name='7left', mtype='dash', size=2,
+                  color=242, priority=1, viewport=[0, 1.0, 0, 1.0],
                   worldcoordinate=[0,100, 0,50]
                   x=[0,20,40,60,80,100],
-                  y=[0,10,20,30,40,50] )      # Create instance of texttable object 'red'
-    a.texttable(tt)                          # Plot using specified texttable object
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    a.texttable(tt)
+
+
+:param name: Name of created object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_
+              to fill the fillarea with. Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param bg: Boolean value. True => object drawn in background (not shown on canvas). False => object shown on canvas.
+:type bg: bool
+
+:returns: A texttable object
+:rtype:
 """
         if (Tt_name is None) or (not isinstance(Tt_name, str)):
             raise vcsError('Must provide string name for the texttable.')
@@ -2259,11 +2292,6 @@ Options:::
     #    self.clear()
     #    self.plot(*self.__last_plot_actual_args, **self.__last_plot_keyargs)
 
-    ###########################################################################
-    #                                                                         #
-    # Plot wrapper for VCS.                                                   #
-    #                                                                         #
-    ###########################################################################
     def plot(self, *actual_args, **keyargs):
         """
 Options:::
@@ -2277,9 +2305,6 @@ Options:::
 %s
     :::
 
- Function: plot
-
- Description of plot:
     Plot an array(s) of data given a template and graphics method. The VCS template is
     used to define where the data and variable attributes will be displayed on the VCS
     Canvas. The VCS graphics method is used to define how the array(s) will be shown
@@ -2355,24 +2380,31 @@ Options:::
     if both 'xaxis' and 'grid' keywords are specified, the value of 'xaxis' takes precedence
     over the x-axis of grid.
 
- Example of Use:
-    x=vcs.init()        # x is an instance of the VCS class object (constructor)
-    x.plot(array)       # this call will use default settings for template and boxfill
-    x.plot(array, 'AMIP', 'isofill','AMIP_psl') # this is specifying the template and
-                                                  graphics method
-    t=x.gettemplate('AMIP')        # get a predefined the template 'AMIP'
-    vec=x.getvector('quick')       # get a predefined the vector graphics method 'quick'
-    x.plot(array1, array2, t, vec) # plot the data as a vector using the 'AMIP' template
-    x.clear()                      # clear the VCS Canvas of all plots
-    box=x.createboxfill('new')     # create boxfill graphics method 'new'
-    x.plot(box,t,array)            # plot array data using box 'new' and template 't'
-
-###############################################################################################################
-###########################################                      ##############################################
-########################################## End plot Description ###############################################
-#########################################                      ################################################
-###############################################################################################################
-
+     :Example:
+
+::
+
+    # x is an instance of the VCS class object (constructor)
+    x=vcs.init()
+    # this call will use default settings for template and boxfill
+    x.plot(array)
+    # this is specifying the template and graphics method
+    x.plot(array, 'AMIP', 'isofill','AMIP_psl')
+    # get a predefined the template 'AMIP'
+    t=x.gettemplate('AMIP')
+    # get a predefined the vector graphics method 'quick'
+    vec=x.getvector('quick')
+    # plot the data as a vector using the 'AMIP' template
+    x.plot(array1, array2, t, vec)
+    # clear the VCS Canvas of all plots
+    x.clear()
+    # create boxfill graphics method 'new'
+    box=x.createboxfill('new')
+    # plot array data using box 'new' and template 't'
+    x.plot(box,t,array)
+
+:returns: ???
+:rtype: ???
 """
         self.__last_plot_actual_args = actual_args
         self.__last_plot_keyargs = keyargs
@@ -2448,15 +2480,14 @@ Options:::
         return new
 
     def __plot(self, arglist, keyargs):
-        import vcsaddons
 
-        # This routine has five arguments in arglist from _determine_arg_list
-        # It adds one for bg and passes those on to Canvas.plot as its sixth
-        # arguments.
+            # This routine has five arguments in arglist from _determine_arg_list
+            # It adds one for bg and passes those on to Canvas.plot as its sixth
+            # arguments.
 
-        # First of all let's remember which elets we have before comin in here
-        # so that anything added (temp objects) can be removed at clear
-        # time
+            # First of all let's remember which elets we have before comin in here
+            # so that anything added (temp objects) can be removed at clear
+            # time
         original_elts = {}
         new_elts = {}
         for k in vcs.elements.keys():
@@ -3493,14 +3524,9 @@ Options:::
                     tp = "boxfill"
                 elif tp in ("xvsy", "xyvsy", "yxvsx", "scatter"):
                     tp = "1d"
-                if tp in vcsaddons.gms:
-                    gm = vcsaddons.gms[tp][arglist[4]]
-                    arglist[3] = gm
-                else:
-                    gm = vcs.elements[tp][arglist[4]]
+                gm = vcs.elements[tp][arglist[4]]
                 if hasattr(gm, "priority") and gm.priority == 0:
                     return
-
             p = self.getprojection(gm.projection)
             if p.type in no_deformation_projections and (
                     doratio == "0" or doratio[:4] == "auto"):
@@ -3715,22 +3741,20 @@ Options:::
                 del(keyargs["bg"])
             if isinstance(arglist[3], vcsaddons.core.VCSaddon):
                 if arglist[1] is None:
-                    dn = arglist[3].plot_internal(
+                    dn = arglist[3].plot(
                         arglist[0],
                         template=arglist[2],
                         bg=bg,
                         x=self,
                         **keyargs)
                 else:
-                    dn = arglist[3].plot_internal(
+                    dn = arglist[3].plot(
                         arglist[0],
                         arglist[1],
                         template=arglist[2],
                         bg=bg,
                         x=self,
                         **keyargs)
-                self.display_names.append(dn.name)
-                return dn
             else:
                 returned_kargs = self.backend.plot(*arglist, **keyargs)
                 if not keyargs.get("donotstoredisplay", False):
@@ -3808,86 +3832,48 @@ Options:::
     def setAnimationStepper(self, stepper):
         self.backend.setAnimationStepper(stepper)
 
-    ##########################################################################
-    #                                                                        #
-    # VCS utility wrapper to return the number of displays that are "ON".    #
-    #                                                                        #
-    ##########################################################################
-    def return_display_ON_num(self, *args):
-        return self.canvas.return_display_ON_num(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # VCS utility wrapper to return the current display names.               #
-    #                                                                        #
-    ##########################################################################
     def return_display_names(self, *args):
         return self.display_names
 
-    ##########################################################################
-    #                                                                        #
-    # VCS utility wrapper to remove the display names.                       #
-    #                                                                        #
-    ##########################################################################
     def remove_display_name(self, *args):
-        return self.canvas.remove_display_name(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # CGM  wrapper for VCS.                                                  #
-    #                                                                        #
-    ##########################################################################
-    def cgm(self, file, mode='w'):
         """
- Function: cgm
+        Removes a plotted item from the canvas.
 
- Description of Function:
-    To save a graphics plot in CDAT the user can call CGM along with the name of
-    the output. This routine will save the displayed image on the VCS canvas as
-    a binary vector graphics that can be imported into MSWord or Framemaker. CGM
-    files are in ISO standards output format.
+        :param args: Any number of display names to remove.
+        :type args: str list
+        """
+        for a in args:
+            if a in self.display_names:
+                self.display_names.remove(a)
+        self.update()
 
-    The CGM command is used to create or append to a cgm file. There are two modes
-    for saving a cgm file: `Append' mode (a) appends cgm output to an existing cgm
-    file; `Replace' (r) mode overwrites an existing cgm file with new cgm output.
-    The default mode is to overwrite an existing cgm file (i.e. mode (r)).
+    def cgm(self, file, mode='w'):
+        """
+        Export an image in CGM format.
 
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.cgm(o)
-    a.cgm('example')           # by default a cgm file will overwrite an existing file
-    a.cgm('example','w')  # 'r' will instruct cgm to overwrite an existing file
-    a.cgm('example',mode='w')  # 'r' will instruct cgm to overwrite an existing file
+        :param file: Filename to save
+        :param mode: Ignored.
+        """
 
-"""
         if mode != 'w':
             warnings.warn(
                 "cgm only supports 'w' mode ignoring your mode ('%s')" %
                 mode)
         return self.backend.cgm(file)
 
-    ##########################################################################
-    #                                                                        #
-    # Clear VCS Canvas wrapper for VCS.                                      #
-    #                                                                        #
-    ##########################################################################
     def clear(self, *args, **kargs):
         """
- Function: clear
+        Clears all the VCS displays on a page (i.e., the VCS Canvas object).
 
- Description of Function:
-    In VCS it is necessary to clear all the plots from a page. This routine
-    will clear all the VCS displays on a page (i.e., the VCS Canvas object).
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
+    #clear VCS displays from the page
     a.clear()
 
-  Internally, update() calls clear() to assist in removing plots. The preserve_display argument is used to
-  make sure that the display plots that are associated with the current canvas are not eliminated, and
-  are still able to be used to regenerate the plots.
 """
         if self.animate.created():
             self.animate.close()
@@ -3918,22 +3904,18 @@ Options:::
         self.display_names = []
         return
 
-    ##########################################################################
-    #                                                                        #
-    # Close VCS Canvas wrapper for VCS.                                      #
-    #                                                                        #
-    ##########################################################################
     def close(self, *args, **kargs):
         """
- Function: close
+        Close the VCS Canvas. It will not deallocate the VCS Canvas object.
+        To deallocate the VCS Canvas, use the destroy method.
 
- Description of Function:
-    Close the VCS Canvas. It will not deallocate the VCS Canvas object.
-    To deallocate the VCS Canvas, use the destroy method.
+        :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
+    #close the vcs canvas
     a.close()
 
 """
@@ -3944,19 +3926,14 @@ Options:::
 
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # Destroy VCS Canvas Object (i.e., call the Dealloc C code).             #
-    #                                                                        #
-    ##########################################################################
     def destroy(self):
         """
- Function: destroy
-
- Description of Function:
     Destroy the VCS Canvas. It will deallocate the VCS Canvas object.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.destory()
@@ -3968,136 +3945,42 @@ Options:::
         gc.garbage
         gc.collect()
 
-    ##########################################################################
-    #                                                                        #
-    # Graphics Method Change display.                                        #
-    #                                                                        #
-    ##########################################################################
     def change_display_graphic_method(self, display, type, name):
         '''
- Function: change_display_graphic_method
-
- Description of Function:
-    Changes the type and graphic metohd of a display.
+        Changes the type and graphic method of a plot.
+
+        :param display: Display to change.
+        :param type: New graphics method type.
+        :param name: Name of new graphics method.
+        :type display: str or vcs.displayplot.Dp
+        :type name: str
+        :type type: str
+        '''
 
-'''
-        return self.canvas.change_display_graphic_method(
-            *(display, type, name))
-    ##########################################################################
-    #                                                                        #
-    # Figures out which display is selected in graphic method editor mode    #
-    #                                                                        #
-    ##########################################################################
+        if isinstance(display, (str, unicode)):
+            display = vcs.elements["display"][display]
+        display.g_type = type
+        display.g_name = name
+        self.update()
 
     def get_selected_display(self):
         """
- Function: get_selected_display
+        Deprecated. Does not work.
 
     """
         return self.canvas.get_selected_display(*())
 
-    ##########################################################################
-    #                                                                        #
-    # Send a request to turn on a picture template object in the VCS Canvas. #
-    #                                                                        #
-    ##########################################################################
-    def _select_one(self, template_name, attr_name, X1, X2, Y1, Y2):
-        # flush and block the X main loop
-
-        self.canvas._select_one(template_name, attr_name, X1, X2, Y1, Y2)
-
-    ##########################################################################
-    #                                                                        #
-    # Send a request to turn off a picture template object in the VCS Canvas.#
-    #                                                                        #
-    ##########################################################################
-    def _unselect_one(self, template_name, attr_name, X1, X2, Y1, Y2):
-
-        self.canvas._unselect_one(template_name, attr_name, X1, X2, Y1, Y2)
-
-    ##########################################################################
-    #                                                                        #
-    # Set the template editor event flag to select all template objects on   #
-    # the VCS Canvas.                                                        #
-    #                                                                        #
-    ##########################################################################
-    def _select_all(self):
-        # flush and block the X main loop
-
-        self.canvas._select_all()
-
-    ##########################################################################
-    #                                                                        #
-    # Set the template editor event flag to unselect all the template        #
-    # objects on the VCS Canvas.                                             #
-    #                                                                        #
-    ##########################################################################
-    def _unselect_all(self):
-        # flush and block the X main loop
-
-        self.canvas._unselect_all()
-
-    ##########################################################################
-    #                                                                        #
-    # Set the template editor mode for the VCS Canvas screen.                #
-    #                                                                        #
-    ##########################################################################
-    def _SCREEN_TEMPLATE_FLAG(self):
-        self.canvas.SCREEN_TEMPLATE_FLAG()
-
-    ##########################################################################
-    #                                                                        #
-    # Set the graphic method editor mode for the VCS Canvas screen.          #
-    #                                                                        #
-    ##########################################################################
-    def _SCREEN_GM_FLAG(self):
-        self.canvas.SCREEN_GM_FLAG()
-
-    ##########################################################################
-    #                                                                        #
-    # Set the data mode for the VCS Canvas screen.                           #
-    #                                                                        #
-    ##########################################################################
-    def _SCREEN_DATA_FLAG(self):
-        self.canvas.SCREEN_DATA_FLAG()
-
-    ##########################################################################
-    #                                                                        #
-    # Set the screen check mode to DATA for the VCS Canvas.                  #
-    #                                                                        #
-    ##########################################################################
-    def _SCREEN_CHECKMODE_DATA_FLAG(self):
-        self.canvas.SCREEN_CHECKMODE_DATA_FLAG()
-
-    ##########################################################################
-    #                                                                        #
-    # Return the Screen mode, either data mode or template editor mode.      #
-    #                                                                        #
-    ##########################################################################
-    def SCREEN_MODE(self, *args):
-        return self.canvas.SCREEN_MODE(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Return the Screen mode, either data mode or template editor mode.      #
-    #                                                                        #
-    ##########################################################################
     def plot_annotation(self, *args):
         self.canvas.plot_annotation(*args)
 
-    ##########################################################################
-    #                                                                        #
-    # Flush X event que wrapper for VCS.                                     #
-    #                                                                        #
-    ##########################################################################
     def flush(self, *args):
         """
- Function: flush
+    The flush command executes all buffered X events in the queue.
 
- Description of Function:
-    The flush command executes all buffered X events in the que.
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.flush()
@@ -4105,19 +3988,14 @@ Options:::
 """
         return self.backend.flush(*args)
 
-    ##########################################################################
-    #                                                                        #
-    # Geometry wrapper for VCS.                                              #
-    #                                                                        #
-    ##########################################################################
     def geometry(self, *args):
         """
- Function: geometry
-
- Description of Function:
     The geometry command is used to set the size and position of the VCS canvas.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.geometry(450,337)
@@ -4135,67 +4013,61 @@ Options:::
 
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # VCS Canvas Information wrapper.                                        #
-    #                                                                        #
-    ##########################################################################
     def canvasinfo(self, *args, **kargs):
         """
- Function: canvasinfo
-
- Description of Function:
-    Obtain the current attributes of the VCS Canvas window.
+        Obtain the current attributes of the VCS Canvas window.
 
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.canvasinfo()
-
-"""
+        :returns: Dictionary with keys: "mapstate" (whether the canvas is opened), "height", "width", "depth", "x", "y"
+        """
         return self.backend.canvasinfo(*args, **kargs)
 
-    ##########################################################################
-    #                                                                        #
-    # Get continents type wrapper for VCS.                                   #
-    #                                                                        #
-    ##########################################################################
     def getcontinentstype(self, *args):
         """
- Function: getcontinentstype
-
- Description of Function:
     Retrieve continents type from VCS; either an integer between 0 and 11 or the
     path to a custom continentstype.
 
- Example of Use:
+     :Example:
+
+::
+
      a=vcs.init()
-     cont_type = a.getcontinentstype() # Get the continents type
+     # Get the continents type
+     cont_type = a.getcontinentstype()
+
+:returns: An int between 1 and 0, or the path to a custom continentstype
+:rtype: int or system filepath
 """
         try:
             return self._continents
         except:
             return None
 
-    ###########################################################################
-    #                                                                         #
-    # Postscript to GIF wrapper for VCS.                                      #
-    #                                                                         #
-    ###########################################################################
     def pstogif(self, filename, *opt):
         """
-  Function: pstogif
-
-  Description of Function:
      In some cases, the user may want to save the plot out as a gif image. This
      routine allows the user to convert a postscript file to a gif file.
 
-  Example of Use:
+    :Example:
+
+::
+
      a=vcs.init()
      a.plot(array)
-     a.pstogif('filename.ps')       # convert the postscript file to a gif file (l=landscape)
-     a.pstogif('filename.ps','l')   # convert the postscript file to a gif file (l=landscape)
-     a.pstogif('filename.ps','p')   # convert the postscript file to a gif file (p=portrait)
+     # convert the postscript file to a gif file (l=landscape)
+     a.pstogif('filename.ps')
+     # convert the postscript file to a gif file (l=landscape)
+     a.pstogif('filename.ps','l')
+     # convert the postscript file to a gif file (p=portrait)
+     a.pstogif('filename.ps','p')
+
+:param filename: String name of the desired output file
+:type filename: str
+
+:param opt: One of 'l' or 'p', indicating landscape or portrait mode, respectively.
+:type opt: str
+
+:returns: ???
+:rtype: ???
  """
         from os import popen
 
@@ -4225,22 +4097,17 @@ Options:::
         f.close()
         return
 
-    ##########################################################################
-    #                                                                        #
-    # Grid wrapper for VCS.                                                  #
-    #                                                                        #
-    ##########################################################################
     def grid(self, *args):
         """
- Function: grid
-
- Description of Function:
     Set the default plotting region for variables that have more dimension values
     than the graphics method. This will also be used for animating plots over the
     third and fourth dimensions.
 
 
- Example of Use:
+ :Example:
+
+::
+
     a=vcs.init()
     a.grid(12,12,0,71,0,45)
 """
@@ -4249,16 +4116,8 @@ Options:::
 
         return p
 
-    ##########################################################################
-    #                                                                        #
-    # Landscape VCS Canvas orientation wrapper for VCS.                      #
-    #                                                                        #
-    ##########################################################################
     def landscape(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
- Function: landscape
-
- Description of Function:
     Change the VCS Canvas orientation to Landscape.
 
      Note: the (width, height) and (x, y) arguments work in pairs. That is, you must
@@ -4272,20 +4131,43 @@ Options:::
                 the screen with the point. Some X servers are not handling the threads properly
                 to keep up with the demands of the X client.
 
- Example of Use:
+     :Example:
+::
+
     a=vcs.init()
     a.plot(array)
-    a.landscape() # Change the VCS Canvas orientation and set object flag to landscape
-    a.landscape(clear=1) # Change the VCS Canvas to landscape and clear the page
-    a.landscape(width = 400, height = 337) # Change to landscape and set the window size
-    a.landscape(x=100, y = 200) # Change to landscape and set the x and y screen position
-    a.landscape(width = 400, height = 337, x=100, y = 200, clear=1) # Chagne to landscape and give specifications
-"""
-        if (self.orientation() == 'landscape'):
-            return
+    # Change the VCS Canvas orientation and set object flag to landscape
+    a.landscape()
+    # Change the VCS Canvas to landscape and clear the page
+    a.landscape(clear=1)
+    # Change to landscape and set the window size
+    a.landscape(width = 400, height = 337)
+    # Change to landscape and set the x and y screen position
+    a.landscape(x=100, y = 200)
+    # Change to landscape and give specifications
+    a.landscape(width = 400, height = 337, x=100, y = 200, clear=1)
 
-        if (((not isinstance(width, int))) or ((not isinstance(height, int))) or
-                ((not isinstance(x, int))) or ((not isinstance(y, int))) or
+:param width: Width of the canvas, in pixels
+:type width: int
+
+:param height: Height of the canvas, in pixels
+:type height: int
+
+:param x: Unused
+:type x: int
+
+:param y: Unused
+:type y: int
+
+:param clear: Indicates the canvas should be cleared (1), or should not be cleared (0), when orientation is changed.
+:type clear: int
+
+"""
+        if (self.orientation() == 'landscape'):
+            return
+
+        if (((not isinstance(width, int))) or ((not isinstance(height, int))) or
+                ((not isinstance(x, int))) or ((not isinstance(y, int))) or
                 ((width != -99) and (width < 0)) or ((height != -99) and (height < 0)) or
                 ((x != -99) and (x < 0)) or ((y != -99) and (y < 0))):
             raise ValueError(
@@ -4311,157 +4193,119 @@ Options:::
 
         return l
 
-    ##########################################################################
-    #                                                                        #
-    # List Primary and Secondary elements wrapper for VCS.                   #
-    #                                                                        #
-    ##########################################################################
     def listelements(self, *args):
         """
- Function: listelements
+        Returns a Python list of all the VCS class objects.
 
- Description of Function:
-    Returns a Python list of all the VCS class objects.
+        The list that will be returned:
+        ['1d', '3d_dual_scalar', '3d_scalar', '3d_vector', 'boxfill', 'colormap', 'display', 'fillarea',
+         'font', 'fontNumber', 'isofill', 'isoline', 'line', 'list', 'marker', 'meshfill', 'projection',
+         'scatter', 'taylordiagram', 'template', 'textcombined', 'textorientation', 'texttable',
+         'vector', 'xvsy', 'xyvsy', 'yxvsx']
 
-   The list that will be returned:
-   ['template', 'boxfill', 'isofill', 'isoline',
-    'scatter', 'vector', 'xvsy', 'xyvsy', 'yxvsx', 'colormap', 'fillarea', 'format',
-    'line', 'list', 'marker', 'text']
+         :Example:
+
+::
+
+        a=vcs.init()
+        a.listelements()
+
+:returns: A list of string names of all VCS class objects
+:rtype: list
 
- Example of Use:
-    a=vcs.init()
-    a.listelements()
 """
         f = vcs.listelements
         L = sorted(f(*args))
 
         return L
 
-    ##########################################################################
-    #                                                                        #
-    # update VCS's Canvas orientation wrapper for VCS.                       #
-    #                                                                        #
-    ##########################################################################
     def updateorientation(self, *args):
         """
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    x.updateorientation()
+    a.updateorientation()
 """
 
         a = self.canvas.updateorientation(*args)
 
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # Open VCS Canvas wrapper for VCS.                                       #
-    #                                                                        #
-    ##########################################################################
     def open(self, width=None, height=None, **kargs):
         """
- Function: open
-
- Description of Function:
     Open VCS Canvas object. This routine really just manages the VCS canvas. It will
     popup the VCS Canvas for viewing. It can be used to display the VCS Canvas.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.open()
     a.open(800,600)
+
+:param width: Integer representing the desire width of the opened window in pixels
+:type width: int
+
+:param height: Integer representing the desire height of the opened window in pixels
+:type height: int
+
 """
 
         a = self.backend.open(width, height, **kargs)
 
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # Return VCS Canvas ID.                                                  #
-    #                                                                        #
-    ##########################################################################
     def canvasid(self, *args):
         '''
- Function: canvasid
-
- Description of Function:
-    Return VCS Canvas object ID. This ID number is found at the top of the VCS Canvas
-    as part of its title.
+        Get the ID of this canvas.
 
- Example of Use:
-    a=vcs.init()
-    a.open()
-    id = a.canvasid()
-'''
+        This ID number is found at the top of the VCS Canvas, as part of its title.
+        '''
         return self._canvas_id
 
-    ##########################################################################
-    #                                                                        #
-    # Connect the VCS Canvas to the GUI.                                     #
-    #                                                                        #
-    ##########################################################################
-    def _connect_gui_and_canvas(self, *args):
-        return self.canvas.connect_gui_and_canvas(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for    #
-    # VCS.                                                                   #
-    #                                                                        #
-    ##########################################################################
-    def page(self, *args):
-        """
- Function: page
-
- Description of Function:
-    Change the VCS Canvas orientation to either 'portrait' or 'landscape'.
-
-    The orientation of the VCS Canvas and of cgm and raster images is controlled by
-    the PAGE command. Only portrait (y > x) or landscape (x > y) orientations are
-    permitted.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.page()      # Change the VCS Canvas orientation and set object flag to portrait
-"""
-
-        l = self.canvas.page(*args)
-
-        return l
-
-    ##########################################################################
-    #                                                                        #
-    # Portrait VCS Canvas orientation wrapper for VCS.                       #
-    #                                                                        #
-    ##########################################################################
     def portrait(self, width=-99, height=-99, x=-99, y=-99, clear=0):
         """
- Function: portrait
+        Change the VCS Canvas orientation to Portrait.
 
- Description of Function:
-    Change the VCS Canvas orientation to Portrait.
+        If the current orientation of the canvas is already portrait, nothing happens.
 
-     Note: the (width, height) and (x, y) arguments work in pairs. That is, you must
-           set (width, height) or (x, y) together to see any change in the VCS Canvas.
+        :Example:
 
-           If the portrait method is called  with arguments before displaying a VCS Canvas,
-           then the arguments (width, height, x, y, and clear) will have no effect on the
-           canvas.
+::
 
-     Known Bug: If the visible plot on the VCS Canvas is not adjusted properly, then resize
-                the screen with the point. Some X servers are not handling the threads properly
-                to keep up with the demands of the X client.
-
- Example of Use:
     a=vcs.init()
     a.plot(array)
-    a.portrait()      # Change the VCS Canvas orientation and set object flag to portrait
-    a.portrait(clear=1) # Change the VCS Canvas to portrait and clear the page
-    a.portrait(width = 337, height = 400) # Change to portrait and set the window size
-    a.portrait(x=100, y = 200) # Change to portrait and set the x and y screen position
-    a.portrait(width = 337, height = 400, x=100, y = 200, clear=1) # Chagne to portrait and give specifications
+    # Change the VCS Canvas orientation and set object flag to portrait
+    a.portrait()
+    # Change the VCS Canvas to portrait and clear the page
+    a.portrait(clear=1)
+    # Change to portrait and set the window size
+    a.portrait(width = 337, height = 400)
+    # Change to portrait and set the x and y screen position
+    a.portrait(x=100, y = 200)
+    # Change to portrait and give specifications
+    a.portrait(width = 337, height = 400, x=100, y = 200, clear=1)
+
+:param width: Width to set the canvas to (in pixels)
+:type width: int
+
+:param height: Height to set the canvas to (in pixels)
+:type height: int
+
+:param x: Unused.
+:type x: None
+
+:param y: Unused.
+:type y: None
+
+:param clear: 0: Do not clear the canvas when orientation is changed. 1: clear the canvas when orientation is changed.
+:type clear: int
+
+:returns: ???
+:rtype: ???
 """
         if (self.orientation() == 'portrait'):
             return
@@ -4493,37 +4337,52 @@ Options:::
 
         return p
 
-    ##########################################################################
-    #                                                                        #
-    # png wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
     def ffmpeg(self, movie, files, bitrate=1024, rate=None, options=None):
         """
- Function: ffmpeg
-
- Description of Function:
     MPEG output from a list of valid files.
-    Note that ffmpeg is smart enough to output to more than just mpeg format
+    Can output to more than just mpeg format.
+
+    Note: ffmpeg ALWAYS overwrites the output file
+
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     #... code to generate png files ...
-    # here is dummy example
+    # here is a dummy example
     files =[]
     for i in range(10):
-      x.png('my_png__%i' % i)
+      a.png('my_png__%i' % i)
       files.append('my_png__%i.png' % i)
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png') # generates mpeg from pattern
-    x.ffmpeg('mymovie.mpeg',files) # generates from list of files
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',bitrate=512) # generates mpeg at 512kbit
-                                 bitrate (bitrate is important to movie quality)
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',rate=50) # generates movie with 50 frame per second
-    x.ffmpeg('mymovie.mpeg','my_png_%d.png',options='-r 50 -b 1024k')
-    # genrats movie at 50 frame per sec and 1024k bitrate
-    NOTE : via the optins arg you can add audio file to your movie (see ffmpeg help)
-    returns the output string generated by ffmpeg program
-    ALWAYS overwrite output file
+    # generates mpeg from pattern
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png')
+    # generates from list of files
+    a.ffmpeg('mymovie.mpeg',files)
+    # generates mpeg at 512kbit
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',bitrate=512)
+    # generates movie with 50 frame per second
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',rate=50)
+    # generates movie at 50 frame per sec and 1024k bitrate
+    a.ffmpeg('mymovie.mpeg','my_png_%d.png',options='-r 50 -b 1024k')
+
+    NOTE : via the options arg you can add audio file to your movie (see ffmpeg help)
+
+:param movie: ???
+:type movie: ???
+
+:param files: String file name
+:type files: str, list, or tuple
+
+:param rate: Desired output framerate
+:type rate: str
+
+:param options: ???
+:type options: ???
+
+:returns: The output string generated by ffmpeg program
+:rtype: str
+
 """
         args = ["ffmpeg", "-y"]
 
@@ -4607,26 +4466,37 @@ Options:::
         return self.backend.getantialiasing()
 
     def setantialiasing(self, antialiasing):
-        """ Turn ON/OFF antialiasing"""
+        """ Set antialiasing rate.
+
+:param antialiasing: Integer from 0-64, representing the antialising rate (0 means no antialiasing).
+:type antialiasing: int
+        """
         self.backend.setantialiasing(antialiasing)
 
-    ##########################################################################
-    #                                                                        #
-    # bg dims wrapper for VCS.                                               #
-    #                                                                        #
-    ##########################################################################
     def setbgoutputdimensions(self, width=None, height=None, units='inches'):
         """
- Function: setbgoutputdimensions
+        Sets dimensions for output in bg mode.
 
- Description of Function:
-    Sets dimensions for output in bg mode.
+         :Example:
 
- Example of Use:
-    a=vcs.init()
-    a.setbgoutputdimensions(width=11.5, height= 8.5)  # US Legal
-    a.setbgoutputdimensions(width=21, height=29.7, units='cm')  # A4
-"""
+::
+
+            a=vcs.init()
+            # US Legal
+            a.setbgoutputdimensions(width=11.5, height= 8.5)
+            # A4
+            a.setbgoutputdimensions(width=21, height=29.7, units='cm')
+
+:param width: Float representing the desired width of the output, using the specified unit of measurement
+:type width: float
+
+:param height: Float representing the desired height of the output, using the specified unit of measurement.
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+        """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
             raise Exception(
@@ -4651,23 +4521,35 @@ Options:::
             *args,
             **kargs)
 
-    ##########################################################################
-    #                                                                        #
-    # png wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
     def png(self, file, width=None, height=None,
             units=None, draw_white_background=True, **args):
         """
- Function: png
-
- Description of Function:
     PNG output, dimensions set via setbgoutputdimensions
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.png('example')       # Overwrite a png file
+    # Overwrite a png file
+    a.png('example')
+
+:param file: A string containing the path to ???
+:type file: str
+
+:param width: Float representing the desired width of the output png, using the specified unit of measurement
+:type width: float
+
+:param height: Float representing the desired height of the output png, using the specified unit of measurement.
+               Must be one of ['inches', 'in', 'cm', 'mm', None, 'pixel', 'pixels', 'dot', 'dots'].
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:param draw_white_background: Boolean value indicating whether or no the background should be white. Defaults to True.
+:type draw_white_background: bool
 """
         base = os.path.dirname(file)
         if base != "" and not os.path.exists(base):
@@ -4683,25 +4565,41 @@ Options:::
         return self.backend.png(
             file, W, H, units, draw_white_background, **args)
 
-    ##########################################################################
-    #                                                                        #
-    # pdf wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
     def pdf(self, file, width=None, height=None, units='inches',
             textAsPaths=True):
         """
- Function: postscript
+    PDF output is another form of vector graphics.
 
- Description of Function:
-    SVG output is another form of vector graphics.
+    :Example:
+
+::
 
- Example of Use:
     a=vcs.init()
     a.plot(array)
-    a.pdf('example')       # Overwrite a postscript file
-    a.pdf('example', width=11.5, height= 8.5)  # US Legal
-    a.pdf('example', width=21, height=29.7, units='cm')  # A4
+    # Overwrite a postscript file
+    a.pdf('example')
+    # US Legal
+    a.pdf('example', width=11.5, height= 8.5)
+    # A4
+    a.pdf('example', width=21, height=29.7, units='cm')
+
+:param file: Desired string name of the output file
+:type file: str
+
+:param width: Integer specifying the desired width of the output, measured in the chosen units
+:type width: int
+
+:param height: Integer specifying the desired height of the output, measured in the chosen units
+:type height: int
+
+:param units: Must be one of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Default is 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: bool
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4714,26 +4612,42 @@ Options:::
         if not file.split('.')[-1].lower() in ['pdf']:
             file += '.pdf'
         return self.backend.pdf(file, W, H, textAsPaths)
-    ##########################################################################
-    #                                                                        #
-    # SVG wrapper for VCS.                                                   #
-    #                                                                        #
-    ##########################################################################
 
     def svg(self, file, width=None, height=None, units='inches',
             textAsPaths=True):
         """
- Function: postscript
-
- Description of Function:
     SVG output is another form of vector graphics.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.svg('example')       # Overwrite a postscript file
-    a.svg('example', width=11.5, height= 8.5)  # US Legal
-    a.svg('example', width=21, height=29.7, units='cm')  # A4
+    # Overwrite a postscript file
+    a.svg('example')
+    # US Legal
+    a.svg('example', width=11.5, height= 8.5)
+    # A4
+    a.svg('example', width=21, height=29.7, units='cm')
+
+:param file:
+:type file:
+
+:param width: Float to set width of output SVG, in specified unit of measurement
+:type width: float
+
+:param height: Float to set height of output SVG, in specified unit of measurement
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Deafults to 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: bool
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4845,7 +4759,13 @@ Options:::
         return top_margin, bottom_margin, right_margin, left_margin
 
     def isopened(self):
-        """Is the Canvas opened?"""
+        """
+        Is the Canvas opened?
+
+:returns: A boolean value indicating whether the Canvas is opened (1), or closed (0)
+:rtype: bool
+        """
+
         return self.backend.isopened()
 
     def _compute_width_height(self, width, height, units, ps=False):
@@ -4923,9 +4843,6 @@ Options:::
     def postscript(self, file, mode='r', orientation=None, width=None, height=None,
                    units='inches', textAsPaths=True):
         """
- Function: postscript
-
- Description of Function:
     Postscript output is another form of vector graphics. It is larger than its CGM output
     counter part, because it is stored out in ASCII format.
 
@@ -4935,17 +4852,50 @@ Options:::
     postscript file (i.e. mode (r)).
 
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array)
-    a.postscript('example')       # Overwrite a postscript file
-    a.postscript('example', 'a')  # Append postscript to an existing file
-    a.postscript('example', 'r')  # Overwrite an existing file
-    a.postscript('example', mode='a')  # Append postscript to an existing file
-    a.postscript('example', width=11.5, height= 8.5)  # US Legal (default)
-    a.postscript('example', width=21, height=29.7, units='cm')  # A4
-    a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
+    # Overwrite a postscript file
+    a.postscript('example')
+    # Append postscript to an existing file
+    a.postscript('example', 'a')
+    # Overwrite an existing file
+    a.postscript('example', 'r')
+    # Append postscript to an existing file
+    a.postscript('example', mode='a')
+    # US Legal (default)
+    a.postscript('example', width=11.5, height= 8.5)
+    # A4
+    a.postscript('example', width=21, height=29.7, units='cm')
     # US Legal output and control of margins (for printer friendly output), default units 'inches'
+    a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
+
+:param file: String name of the desired output file
+:type file: str
+
+:param mode: The mode in which to open the file. One of 'r' or 'a'.
+:type mode: str
+
+:param orientation: Deprecated.
+:type orientation: None
+
+:param width: Desired width of the postscript output, in the specified unit of measurement
+:type width: int
+
+:param height: Desired height of the postscript output, in the specified unit of measurement
+:type height: int
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:param textAsPaths: ???
+:type textAsPaths: ???
+
+:returns: ???
+:rtype: ???
 """
         if units not in [
                 'inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']:
@@ -4976,167 +4926,33 @@ Options:::
             else:
                 shutil.move(psnm, file)
 
-    ##########################################################################
-    #                                                                        #
-    # Showbg wrapper for VCS.                                                #
-    #                                                                        #
-    ##########################################################################
-    def showbg(self, *args):
-        """
- Function: showbg
-
- Description of Function:
-    This function displays graphics segments, which are currently stored in the frame buffer,
-    on the VCS Canvas. That is, if the plot function was called with the option bg = 1 (i.e.,
-    background mode), then the plot is produced in the frame buffer and not visible to the
-    user. In order to view  the graphics segments, this function will copy the contents of
-    the frame buffer to the VCS Canvas, where the graphics can be viewed by the user.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array, bg=1)
-    x.showbg()
-"""
-        a = self.canvas.showbg(*args)
-
-        return a
-
-    ##########################################################################
-    #                                                                        #
-    # Backing Store wrapper for VCS.                                         #
-    #                                                                        #
-    ##########################################################################
-    def backing_store(self, *args):
-        """
- Function: backing_store
-
- Description of Function:
-    This function creates a backing store pixmap for the VCS Canvas.
-
- Example of Use:
-    a=vcs.init()
-    a.backing_store()
-"""
-        return self.canvas.backing_store(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Update the animation slab. Used only for the VCS Canvas GUI.           #
-    #                                                                        #
-    ##########################################################################
-    def update_animation_data(self, *args):
-        return self.canvas.update_animation_data(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Return the dimension information. Used only for the VCS Canvas GUI.    #
-    #                                                                        #
-    ##########################################################################
-    def return_dimension_info(self, *args):
-        return self.canvas.return_dimension_info(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Raster wrapper for VCS.                                                #
-    #                                                                        #
-    ##########################################################################
-    def raster(self, file, mode='a'):
-        """
- Function: raster
-
- Description of Function:
-    In some cases, the user may want to save the plot out as an raster image. This
-    routine allows the user to save the VCS canvas output as a SUN raster file.
-    This file can be converted to other raster formats with the aid of xv and other
-    such imaging tools found freely on the web.
-
-    If no path/file name is given and no previously created raster file has been
-    designated, then file
-
-    /$HOME/%s/default.ras
-
-    will be used for storing raster images. However, if a previously created raster
-    file is designated, that file will be used for raster output.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.raster('example','a')   # append raster image to existing file
-    a.raster('example','r')   # overwrite existing raster file
-    a.raster(file='example',mode='r')   # overwrite existing raster file
-""" % (self._dotdir)
-        return self.canvas.raster(*(file, mode))
-
-    ##########################################################################
-    #                                                                        #
-    # Reset grid wrapper for VCS.                                            #
-    #                                                                        #
-    ##########################################################################
-    def resetgrid(self, *args):
-        """
- Function: resetgrid
-
- Description of Function:
-    Set the plotting region to default values.
-
- Example of Use:
-    Not Working!
-"""
-        return self.canvas.resetgrid(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Script wrapper for VCS.                                                #
-    #                                                                        #
-    ##########################################################################
     def _scriptrun(self, *args):
         return vcs._scriptrun(*args)
 
     def scriptrun(self, aFile, *args, **kargs):
         vcs.scriptrun(aFile, *args, **kargs)
 
-    ##########################################################################
-    #                                                                        #
-    # Set default graphics method and template wrapper for VCS.              #
-    #                                                                        #
-    ##########################################################################
-    def set(self, *args):
-        """
- Function: set
-
- Description of Function:
-    Set the default VCS primary class objects: template and graphics methods.
-    Keep in mind the template, determines the appearance of each graphics segment;
-    the graphic method specifies the display technique; and the data defines what
-    is to be displayed. Note, the data cannot be set with this function.
-
- Example of Use:
-    a=vcs.init()
-    a.set('isofill','quick') # Changes the default graphics method to Isofill: 'quick'
-    a.plot(array)
-"""
-        return self.canvas.set(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Set VCS color map wrapper for VCS.                                     #
-    #                                                                        #
-    ##########################################################################
     def setcolormap(self, name):
         """
- Function: setcolormap
-
- Description of Function:
     It is necessary to change the colormap. This routine will change the VCS
     color map.
 
     If the the visul display is 16-bit, 24-bit, or 32-bit TrueColor, then a redrawing
     of the VCS Canvas is made evertime the colormap is changed.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.setcolormap("AMIP")
+
+:param name: ???
+:type name: ???
+
+:returns: ???
+:rtype: ???
 """
         # Don't update the VCS segment if there is no Canvas. This condition
         # happens in the initalize function for VCDAT only. This will cause a
@@ -5149,16 +4965,8 @@ Options:::
         self.update()
         return
 
-    ##########################################################################
-    #                                                                        #
-    # Set VCS color map cell wrapper for VCS.                                #
-    #                                                                        #
-    ##########################################################################
     def setcolorcell(self, *args):
         """
- Function: setcolorcell
-
- Description of Function:
     Set a individual color cell in the active colormap. If default is
     the active colormap, then return an error string.
 
@@ -5169,7 +4977,10 @@ Options:::
     value must range from 0 to 100. Where 0 represents no color intensity
     and 100 is the greatest color intensity.
 
- Example of Use:
+    :Example:
+
+::
+
     a=vcs.init()
     a.plot(array,'default','isofill','quick')
     a.setcolormap("AMIP")
@@ -5185,20 +4996,15 @@ Options:::
         a = vcs.setcolorcell(self.colormap, *args)
         return a
 
-    ##########################################################################
-    #                                                                        #
-    # Set continents line wrapper for VCS.                                   #
-    #                                                                        #
-    ##########################################################################
     def setcontinentsline(self, line="default"):
         """
-    Function: setcontinentsline
-
-    Description of Function:
         One has the option of configuring the appearance of the lines used to
         draw continents by providing a VCS Line object.
 
-    Example of Use:
+        :Example:
+
+::
+
         a = vcs.init()
         line = vcs.createline()
         line.width = 5
@@ -5206,6 +5012,12 @@ Options:::
         a.setcontinentsline(line)
         # Use default line
         a.setcontinentsline("default")
+
+:param line: ???
+:type line: str
+
+:returns: ???
+:rtype: ???
         """
         linename = VCS_validation_functions.checkLine(self, "continentsline", line)
         line = vcs.getline(linename)
@@ -5217,16 +5029,8 @@ Options:::
         else:
             return self._continents_line
 
-    ##########################################################################
-    #                                                                        #
-    # Set continents type wrapper for VCS.                                   #
-    #                                                                        #
-    ##########################################################################
     def setcontinentstype(self, value):
         """
-   Function: setcontinentstype
-
-   Description of Function:
       One has the option of using continental maps that are predefined or that
       are user-defined. Predefined continental maps are either internal to VCS
       or are specified by external files. User-defined continental maps are
@@ -5244,10 +5048,19 @@ Options:::
 
       You can also pass a file by path.
 
-   Example of Use:
+      :Example:
+
+::
+
       a=vcs.init()
       a.setcontinentstype(3)
       a.plot(array,'default','isofill','quick')
+
+:param value: Integer representing continent type, as specified in function description
+:type value: int
+
+:returns: ???
+:rtype: ???
   """
         continent_path = VCS_validation_functions.checkContinents(self, value)
         self._continents = value
@@ -5269,11 +5082,6 @@ Options:::
         except:
             return VCS_validation_functions.checkContinents(self, 1)
 
-    ##########################################################################
-    #                                                                           #
-    # Screen GIF wrapper for VCS.                                               #
-    #                                                                           #
-    ##########################################################################
     def gif(self, filename='noname.gif', merge='r', orientation=None,
             geometry='1600x1200'):
         """
@@ -5301,7 +5109,7 @@ Options:::
     file; `Replace' (r) mode overwrites an existing gif file with new gif output.
     The default mode is to overwrite an existing gif file (i.e. mode (r)).
 
- Example of Use:
+ :Example:
     a=vcs.init()
     a.plot(array)
     a.gif(filename='example.gif', merge='a', orientation='l', geometry='800x600')
@@ -5321,46 +5129,63 @@ Options:::
         nargs = ('gif', filename, merge, orientation, geometry)
         return self.backend.gif(nargs)
 
-    ##########################################################################
-    #                                                                        #
-    # Screen GhostScript (gs) wrapper for VCS.                               #
-    #                                                                        #
-    ##########################################################################
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
 
         warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning)
 
-    ##########################################################################
-    #                                                                        #
-    # Screen Encapsulated PostScript wrapper for VCS.                        #
-    #                                                                        #
-    ##########################################################################
     def eps(self, file, mode='r', orientation=None, width=None, height=None,
             units='inches', textAsPaths=True):
         """
-        Function: Encapsulated PostScript
-
-        Description of Function:
         In some cases, the user may want to save the plot out as an Encapsulated
         PostScript image. This routine allows the user to save the VCS canvas output
         as an Encapsulated PostScript file.
         This file can be converted to other image formats with the aid of xv and other
         such imaging tools found freely on the web.
 
+        :Example:
+
+::
 
-        Example of Use:
         a=vcs.init()
         a.plot(array)
-        a.postscript('example')       # Overwrite a postscript file
-        a.postscript('example', 'a')  # Append postscript to an existing file
-        a.postscript('example', 'r')  # Overwrite an existing file
-        a.postscript('example', mode='a')  # Append postscript to an existing file
-        a.postscript('example', width=11.5, height= 8.5)  # US Legal (default)
-        a.postscript('example', width=21, height=29.7, units='cm')  # A4
+        # Overwrite a postscript file
+        a.postscript('example')
+        # Append postscript to an existing file
+        a.postscript('example', 'a')
+        # Overwrite an existing file
+        a.postscript('example', 'r')
+        # Append postscript to an existing file
+        a.postscript('example', mode='a')
+         # US Legal (default)
+        a.postscript('example', width=11.5, height= 8.5)
+        # A4
+        a.postscript('example', width=21, height=29.7, units='cm')
         a.postscript('example', right_margin=.2,left_margin=.2,top_margin=.2,bottom_margin=.2)
         # US Legal output and control of margins (for printer friendly output), default units 'inches'
-        """
+
+
+:param file: String name of the desired output file
+:type file: str
+
+:param mode: The mode in which to open the file. One of 'r' or 'a'.
+:type mode: str
+
+:param orientation: Deprecated.
+:type orientation: None
+
+:param width: Width of the output image, in the unit of measurement specified
+:type width: float
+
+:param height: Height of the output image, in the unit of measurement specified
+:type height: float
+
+:param units: One of ['inches', 'in', 'cm', 'mm', 'pixel', 'pixels', 'dot', 'dots']. Defaults to 'inches'.
+:type units: str
+
+:returns: ???
+:rtype: ???
+"""
         ext = file.split(".")[-1]
         if ext.lower() != 'eps':
             file = file + '.eps'
@@ -5380,23 +5205,25 @@ Options:::
         os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines()
         os.remove(tmpfile)
 
-    ##########################################################################
-    #                                                                        #
-    # Show VCS primary and secondary elements wrapper for VCS.               #
-    #                                                                        #
-    ##########################################################################
     def show(self, *args):
         return vcs.show(*args)
     show.__doc__ = vcs.__doc__
 
-    ##########################################################################
-    #                                                                        #
-    # Look if a graphic method is in a file           .                      #
-    #                                                                        #
-    ##########################################################################
     def isinfile(self, GM, file=None):
-        """ Checks if a graphic method is stored in a file
-        if no file name is passed then looks into the initial.attributes file"""
+        """
+        Checks if a graphic method is stored in a file
+        if no file name is passed then looks into the initial.attributes file
+
+:param GM: The graphics method to search for
+:type GM: ???
+
+:param file: String name of the file to search
+:type file: str
+
+:returns: ???
+:rtype: ???
+
+        """
         nm = GM.name
         gm = GM.g_name
         key = gm + '_' + nm + '('
@@ -5410,12 +5237,7 @@ Options:::
             if ln.find(key) > -1:
                 f.close()
                 return 1
-        return 0
-    ##########################################################################
-    #                                                                        #
-    # Save VCS initial.attribute file  wrapper for VCS.                      #
-    #                                                                        #
-    ##########################################################################
+        return
 
     def saveinitialfile(self):
         """
@@ -5433,7 +5255,7 @@ Options:::
     The contents of the initial.attributes file can be customized by
     the user.
 
- Example of Use:
+ :Example:
     a=vcs.init()
     ...
 
@@ -5450,123 +5272,85 @@ Options:::
         self.clean_auto_generated_objects()
         return vcs.saveinitialfile()
 
-    ##########################################################################
-    #                                                                        #
-    # Raise VCS Canvas to the top of all its siblings.                       #
-    #                                                                        #
-    ##########################################################################
-    def canvasraised(self, *args):
+    def raisecanvas(self, *args):
         """
- Function: canvasraised                         # Raise the VCS Canvas to the top
-
- Description of Function:
-    This function marks a VCS Canvas as eligible to be displayed and
-    positions the window at the top of the stack of its siblings.
-
- Example of Use:
-    a=vcs.init()
-    ...
-
-    a.canvasraised()
-"""
-
-        return self.backend.canvasraised(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no#
-    # VCS Canvas is displayed on the screen.                                 #
-    #                                                                        #
-    ##########################################################################
-    def iscanvasdisplayed(self, *args):
+        Raise the VCS Canvas to the top of all open windows.
         """
- Function: iscanvasdisplayed          # Return 1 if a VCS Canvas is displayed
+        return self.backend.raisecanvas(*args)
 
- Description of Function:
-    This function returns a 1 if a VCS Canvas is displayed or a 0 if
-    no VCS Canvas is displayed on the screen.
-
- Example of Use:
-    a=vcs.init()
-    ...
-
-    a.iscanvasdisplayed()
-"""
-
-        return self.canvas.iscanvasdisplayed(*args)
-
-    ##########################################################################
-    #                                                                        #
-    # Is VCS's orientation landscape?                                        #
-    #                                                                        #
-    ##########################################################################
     def islandscape(self):
         """
- Function: islandscape
-
- Description of Function:
     Indicates if VCS's orientation is landscape.
 
     Returns a 1 if orientation is landscape.
     Otherwise, it will return a 0, indicating false (not in landscape mode).
 
- Example of Use:
-    a=vcs.init()
-    ...
+    :Example:
 
+::
+
+    a=vcs.init()
+    # ...
     if a.islandscape():
-       a.portrait()               # Set VCS's orientation to portrait mode
+        # Set VCS's orientation to portrait mode
+       a.portrait()
+
+:returns: Boolean indicating VCS is in landscape mode (1), or not (0)
+:rtype: bool
 """
         if (self.orientation() == 'landscape'):
             return 1
         else:
             return 0
 
-    ##########################################################################
-    #                                                                        #
-    # Is VCS's orientation portrait?                                         #
-    #                                                                        #
-    ##########################################################################
     def isportrait(self):
         """
- Function: isportrait
-
- Description of Function:
     Indicates if VCS's orientation is portrait.
 
-    Returns a 1 if orientation is portrait.
-    Otherwise, it will return a 0, indicating false (not in portrait mode).
 
- Example of Use:
-    a=vcs.init()
-    ...
+    :Example:
 
+::
+
+    a=vcs.init()
+    #...
     if a.isportrait():
-       a.landscape()               # Set VCS's orientation to landscape mode
+        # Set VCS's orientation to landscape mode
+        a.landscape()
+
+:returns: Returns a 1 if orientation is portrait, or 0 if not in portrait mode
+:rtype: bool
+
 """
         if (self.orientation() == 'portrait'):
             return 1
         else:
             return 0
-    ##########################################################################
-    #                                                                        #
-    # Dislplay plot functions for VCS.                                       #
-    #                                                                        #
-    ##########################################################################
 
     def getplot(self, Dp_name_src='default', template=None):
         """
- Function: getplot                  # Get existing display plot
-
- Description of Function:
     This function will create a display plot object from an existing display
     plot object from an existing VCS plot. If no display plot name
     is given, then None is returned.
 
- Example of Use:
+     :Example:
+
+::
+
     a=vcs.init()
-    a.show('template')                  # Show all the existing templates
-    plot1=a.getplot('dpy_plot_1')       # plot1 instance of 'dpy_plot_1' display plot
+    # Show all the existing templates
+    a.show('template')
+    # plot1 instance of 'dpy_plot_1' display plot
+    plot1=a.getplot('dpy_plot_1')
+
+:param Dp_name_src: String name of an existing display plot object
+:type Dp_name_src: str
+
+:param template: ???
+:type template: ???
+
+:returns: ???
+:rtype: ???
 """
         if not isinstance(Dp_name_src, str):
             raise ValueError('Error -  The argument must be a string.')
@@ -5577,11 +5361,6 @@ Options:::
             display._template_origin = template
         return display
 
-    ##########################################################################
-    #                                                                        #
-    # Colormap functions for VCS.                                            #
-    #                                                                        #
-    ##########################################################################
     def createcolormap(self, Cp_name=None, Cp_name_src='default'):
         return vcs.createcolormap(Cp_name, Cp_name_src)
     createcolormap.__doc__ = vcs.manageElements.createcolormap.__doc__
@@ -5590,15 +5369,16 @@ Options:::
         return vcs.getcolormap(Cp_name_src)
     getcolormap.__doc__ = vcs.manageElements.getcolormap.__doc__
 
-    ##########################################################################
-    #                                                                        #
-    # Font functions.                                                        #
-    #                                                                        #
-    ##########################################################################
     def addfont(self, path, name=""):
         """
-        Add a font to VCS, path then a name you'd like to associate it with
-        """
+        Add a font to VCS.
+
+    :param path: Path to the font file you wish to add (must be .ttf)
+    :type path: str
+
+    :param name: Name to use to represent the font.
+    :type name: str
+"""
         if not os.path.exists(path):
             raise ValueError('Error -  The font path does not exists')
         if os.path.isdir(path):
@@ -5632,20 +5412,19 @@ Options:::
             return nms[0]
 
     def getfontnumber(self, name):
-        """
-        get the font number associated with a font name
-        """
         return vcs.getfontnumber(name)
+    getfontnumber.__doc__ = vcs.utils.getfontnumber.__doc__
 
     def getfontname(self, number):
-        """
-        get the font name associated with a font number
-        """
         return vcs.getfontname(number)
+    getfontname.__doc__ = vcs.utils.getfontname.__doc__
 
     def getfont(self, font):
         """
-        get the font name/number associated with a font number/name
+        Get the font name/number associated with a font number/name
+
+        :param font: The font name/number
+        :type font: int or str
         """
         if isinstance(font, int):
             return self.getfontname(font)
@@ -5655,7 +5434,14 @@ Options:::
             raise vcsError("Error you must pass a string or int")
 
     def switchfonts(self, font1, font2):
-        """ Switch 2 font indexes, you can pass either the font names or indexes """
+        """
+        Switch the font numbers of two fonts.
+
+        :param font1: The first font
+        :type font1: int or str
+        :param font2: The second font
+        :type font2: int or str
+        """
         if isinstance(font1, str):
             index1 = self.getfont(font1)
         elif isinstance(font1, (int, float)):
@@ -5678,7 +5464,15 @@ Options:::
         return self.canvas.switchfontnumbers(*(index1, index2))
 
     def copyfontto(self, font1, font2):
-        """ copy name and path of font 1 into font 2, you can pass either the font names or indexes """
+        """
+        Copy `font1` into `font2`.
+
+:param font1: Name/number of font to copy
+:type font1: str or int
+
+:param font2: Name/number of destination
+:type font2: str or int
+"""
         if isinstance(font1, str):
             index1 = self.getfont(font1)
         elif isinstance(font1, (int, float)):
@@ -5700,81 +5494,49 @@ Options:::
         return self.canvas.copyfontto(*(index1, index2))
 
     def setdefaultfont(self, font):
-        """Sets the passed font as the default font for vcs"""
+        """
+        Sets the passed/def show font as the default font for vcs
+
+:param font: Font name or index to use as default
+:type font: str or int
+
+
+        """
         if isinstance(font, str):
             font = self.getfont(font)
         return self.copyfontto(font, 1)
 
-    ##########################################################################
-    #                                                                        #
-    # Orientation VCS Canvas orientation wrapper for VCS.                    #
-    #                                                                        #
-    ##########################################################################
     def orientation(self, *args, **kargs):
         """
- Function: orientation
+        Return canvas orientation.
 
- Description of Function:
-    Return VCS's orientation. Will return either Portrait or Landscape.
+        The current implementation does not use any args or kargs.
 
- Example of Use:
-    a=vcs.init()
-    a.orientation()      # Return either "landscape" or "portrait"
-"""
+        :Example:
+
+::
+        a = vcs.init()
+        # Show current orientation of the canvas
+        a.orientation()
+
+:returns: A string indicating the orientation of the canvas, i.e. 'landscape' or 'portrait'
+:rtype: str
+        """
         return self.backend.orientation(*args, **kargs)
 
-    ##########################################################################
-    #                                                                        #
-    # Get VCS color map cell wrapper for VCS.                                #
-    #                                                                        #
-    ##########################################################################
     def getcolorcell(self, *args):
         """
- Function: getcolorcell
-
- Description of Function:
-    Get an individual color cell in the active colormap. If default is
-    the active colormap, then return an error string.
-
-    If the the visul display is 16-bit, 24-bit, or 32-bit TrueColor, then a redrawing
-    of the VCS Canvas is made evertime the color cell is changed.
 
-    Note, the user can only change color cells 0 through 239 and R,G,B
-    value must range from 0 to 100. Where 0 represents no color intensity
-    and 100 is the greatest color intensity.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.setcolormap("AMIP")
-    a.getcolorcell(11,0,0,0)
-    a.getcolorcell(21,100,0,0)
-    a.getcolorcell(31,0,100,0)
-    a.getcolorcell(41,0,0,100)
-    a.getcolorcell(51,100,100,100)
-    a.getcolorcell(61,70,70,70)
-
-"""
+        """
+        """%s""" % vcs.getcolorcell.__doc__
         return vcs.getcolorcell(args[0], self)
 
-    ##########################################################################
-    #                                                                        #
-    # Get VCS color map name wrapper for VCS.                                #
-    #                                                                        #
-    ##########################################################################
-    def getcolormapname(self, *args):
+    def getcolormapname(self):
         """
- Function: getcolormapcell
-
- Description of Function:
-    Get colormap name of the active colormap.
+        Returns the name of the colormap this canvas is set to use by default.
 
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array,'default','isofill','quick')
-    a.getcolormapname()
-"""
+        To set that colormap, use :ref:`vcs.Canvas.Canvas.setcolormap`.
+        """
         if self.colormap is None:
             return vcs._colorMap
         return self.colormap
@@ -5784,12 +5546,6 @@ Options:::
         print 'Keywords:', kargs
         return None
 
-#############################################################################
-#                                                                           #
-# Primarily used for reseting the animation date and time string.           #
-#                                                                           #
-#############################################################################
-
 
 def change_date_time(tv, number):
     timeaxis = tv.getTime()
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 7c24f56cb..2216184e0 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1553,3 +1553,9 @@ class VTKVCSBackend(object):
         img = reader.GetOutput()
         size = img.GetDimensions()
         return size[0], size[1]
+
+    def raisecanvas(self):
+        if self.renWin is None:
+            warnings.warn("Cannot raise if you did not open the canvas yet.")
+            return
+        self.renWin.MakeCurrent()
diff --git a/Packages/vcs/vcs/__init__.py b/Packages/vcs/vcs/__init__.py
index 26e87d84a..e450c23fb 100755
--- a/Packages/vcs/vcs/__init__.py
+++ b/Packages/vcs/vcs/__init__.py
@@ -1,27 +1,48 @@
 """
-# VCS Visualization and Control System - (VCS) module
-#
-#################################################################################
-#                                                                               #
-# Module:       vcs module                                                      #
-#                                                                               #
-# Authors:      PCMDI Software Team                                             #
-#               support@pcmdi.llnl.gov                                          #
-#               http://cdat.sf.net/cdat                                         #
-#                                                                               #
-# Description:  Python command wrapper for VCS's functionality. VCS is computer #
-#               software for the selection, manipulation, and display of        #
-#               scientific data. By specification of the desired data, the      #
-#               graphics method, and the display template, the VCS user gains   #
-#               virtually complete control of the appearance of the data        #
-#               display and associated text and animation.                      #
-#                                                                               #
-# Upgrade to VTK:                                                               #
-# Author: Charles Doutriaux                                                     #
-# Description: Took out all C code and used VTK's python bindings instead       #
-#                                                                               #
-#################################################################################
+=====================================
+VCS: Visualization and Control System
+=====================================
+
+-------
+Authors
+-------
+
+Creator: Dean Williams (LLNL, AIMS Team)
+
+Lead Developer: Charles Doutriaux (LLNL, AIMS Team)
+
+Contributors: https://github.com/UV-CDAT/uvcdat/graphs/contributors
+
+Support Email: uvcdat-support@llnl.gov
+
+Project Site: http://uvcdat.llnl.gov/
+
+Project Repo: https://github.com/UV-CDAT/uvcdat/graphs/contributors
+
+-----------
+Description
+-----------
+VCS is a visualization library for scientific data. It has a simple
+model for defining a plot, that is decomposed into three parts:
+
+1. **Data**: If it's iterable, we'll plot it... or at least try!
+   Currently we support numpy arrays, lists (nested and not),
+   and CDMS2 variables (there's some special support for metadata
+   from CDMS2 that gives some niceties in your plot, but it's not
+   mandatory).
+2. **Graphics Method**: We have a variety of plot types that we
+   support out-of-the box; you can easily customize every aspect
+   of them to create the effect that you're looking for. If you can't,
+   we also support defining your own graphics methods, which you can
+   share with other users using standard python infrastructure (conda, pip).
+3. **Template**: Templates control the appearance of everything that
+   *isn't* your data. They position labels, control fonts, adjust borders,
+   place legends, and more. They're very flexible, and give the fine-grained
+   control of your plot that is needed for the truly perfect plot. Once you've
+   customized them, you can also save them out for later use, and distribute
+   them to other users.
 """
+
 _doValidation = True
 next_canvas_id = 1
 import cdat_info  # noqa
@@ -223,37 +244,45 @@ if os.path.exists(user_init):
     vcs.scriptrun(user_init)
 
 canvaslist = []
-#
-#
-# Construct a VCS Canvas Object.                                                #
-#
-#
 
 
 def init(mode=1, pause_time=0, call_from_gui=0, size=None,
          backend="vtk", geometry=None, bg=None):
     '''
- Function: init   # Initialize, Construct a VCS Canvas Object
-
- Description of Function:
-    Construct the VCS Canas object.
-
- Example of Use:
-    import vcs,cdms2
-
-    file=cdms2.open('filename.nc')
-    slab=file.getslab('variable')
-    a=vcs.init()                        # This examples constructs 4 VCS Canvas
-    a.plot(slab)                        # Plot slab using default settings
-    b=vcs.init()                        # Construct VCS object
-    template=b.gettemplate('AMIP')      # Get 'example' template object
-    b.plot(slab,template)               # Plot slab using template 'AMIP'
-    c=vcs.init()                        # Construct new VCS object
-    isofill=c.getisofill('quick')       # Get 'quick' isofill graphics method
-    c.plot(slab,template,isofill)       # Plot slab using template and isofill objects
-    d=vcs.init()                        # Construct new VCS object
-    isoline=c.getisoline('quick')       # Get 'quick' isoline graphics method
-    c.plot(isoline,slab,template)       # Plot slab using isoline and template objects
+    Initialize and construct a VCS Canvas object.
+
+    :Example:
+
+::
+
+    import vcs
+
+    # Portrait orientation of 1 width per 2 height
+    portrait = vcs.init(size=.5)
+    # also accepts "usletter"
+    letter = vcs.init(size="letter")
+    a4 = vcs.init(size="a4")
+
+    import vtk
+    # Useful for embedding VCS inside another application
+    my_win = vtk.vtkRenderWindow()
+    embedded = vcs.init(backend=my_win)
+
+    dict_init = vcs.init(geometry={"width": 1200, "height": 600})
+    tuple_init = vcs.init(geometry=(1200, 600))
+
+    bg_canvas = vcs.init(bg=True)
+
+:param size: Aspect ratio for canvas (width / height)
+:param backend: Which VCS backend to use
+:param geometry: Size (in pixels) you want the canvas to be.
+:param bg: Initialize a canvas to render in "background" mode (without displaying a window)
+:type size: float or case-insensitive str
+:type backend: str, `vtk.vtkRenderWindow`
+:type geometry: dict or tuple
+:type bg: bool
+:return: an initialized canvas
+:rtype: `vcs.Canvas.Canvas`
 '''
     canvas = Canvas.Canvas(
         mode=mode,
diff --git a/Packages/vcs/vcs/manageElements.py b/Packages/vcs/vcs/manageElements.py
index c61f762b5..751986a5e 100644
--- a/Packages/vcs/vcs/manageElements.py
+++ b/Packages/vcs/vcs/manageElements.py
@@ -67,21 +67,35 @@ def check_name_source(name, source, typ):
 
 def createtemplate(name=None, source='default'):
     """
-Function: createtemplate                  # Construct a new template
+    Create a new template given the the name and the existing template to copy
+    the attributes from. If no existing template name is given, then the default
+    template will be used as the template to which the attributes will be copied
+    from.
+
+    If the name provided already exists, then an error will be returned. Template
+    names must be unique.
+
+    :Example:
+
+::
+
+    # create 'example1' template from 'default' template
+    con=vcs.createtemplate('example1')
+    # Show all the existing templates
+    vcs.listelements('template')
+    # create 'example2' from 'quick' template
+    con=vcs.createtemplate('example2','quick')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a template or a string name of a template
+
+:returns: A template
+:rtype: vcs.template.P
 
-Description of Function:
-Create a new template given the the name and the existing template to copy
-the attributes from. If no existing template name is given, then the default
-template will be used as the template to which the attributes will be copied
-from.
-
-If the name provided already exists, then a error will be returned. Template
-names must be unique.
-
-Example of Use:
-con=vcs.createtemplate('example1') # create 'example1' template from 'default' template
-vcs.listelements('template')                       # Show all the existing templates
-con=vcs.createtemplate('example2','quick') # create 'example2' from 'quick' template
 """
     name, source = check_name_source(name, source, 'template')
 
@@ -90,22 +104,31 @@ con=vcs.createtemplate('example2','quick') # create 'example2' from 'quick' temp
 
 def gettemplate(Pt_name_src='default'):
     """
-Function: gettemplate                       # Construct a new template
+    VCS contains a list of predefined templates. This function will create a
+    template class object from an existing VCS template. If no template name
+    is given, then template 'default' will be used.
 
-Description of Function:
-VCS contains a list of predefined templates. This function will create a
-template class object from an existing VCS template. If no template name
-is given, then template 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtemplate function.)
-
-Example of Use:
-vcs.listelements('template')                  # Show all the existing templates
-templt=vcs.gettemplate()              # templt instance of 'default' template
-templt2=vcs.gettemplate('quick')      # templt2 contains 'quick' template
-"""
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtemplate function.)
+
+    :Example:
+
+::
+
+    # Show all the existing templates
+    vcs.listelements('template')
+    # templt instance of 'default' template
+    templt=vcs.gettemplate()
+    # templt2 contains 'quick' template
+    templt2=vcs.gettemplate('quick')
+
+:param Pt_name_src: String name of an existing template VCS object
+:type Pt_name_src:
+
+:returns: A VCS template object
+:rtype: vcs.template.P
+    """
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Pt_name_src, str):
         raise vcsError('The argument must be a string.')
@@ -117,24 +140,34 @@ templt2=vcs.gettemplate('quick')      # templt2 contains 'quick' template
 
 def createprojection(name=None, source='default'):
     """
-Function: createprojection                # Construct a new projection method
+    Create a new projection method given the the name and the existing
+    projection method to copy the attributes from. If no existing
+    projection method name is given, then the default projection
+    method will be used as the projection method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Projection
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('projection')
+    p=vcs.createprojection('example1',)
+    vcs.show('projection')
+    p=vcs.createprojection('example2','quick')
+    vcs.show('projection')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a projection or a string name of a projection
+
+:returns: A projection graphics method object
+:rtype: vcs.projection.Proj
 
-Description of Function:
-Create a new projection method given the the name and the existing
-projection method to copy the attributes from. If no existing
-projection method name is given, then the default projection
-method will be used as the projection method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Projection
-method names must be unique.
-
-Example of Use:
-vcs.show('projection')
-p=vcs.createprojection('example1',)
-vcs.show('projection')
-p=vcs.createprojection('example2','quick')
-vcs.show('projection')
 """
 
     name, source = check_name_source(name, source, 'projection')
@@ -143,24 +176,31 @@ vcs.show('projection')
 
 def getprojection(Proj_name_src='default'):
     """
-Function: getprojection                    # Construct a new projection method
+    VCS contains a list of graphics methods. This function will create a
+    projection class object from an existing VCS projection method. If
+    no projection name is given, then projection 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-projection class object from an existing VCS projection method. If
-no projection name is given, then projection 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createprojection function.)
-
-Example of Use:
-vcs.show('projection')                   # Show all the existing projection methods
-p=vcs.getprojection()                  # box instance of 'default' projection
-                                    # method
-p2=vcs.getprojection('quick')          # box2 instance of existing 'quick' projection
-                                    #         graphics method
-"""
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createprojection function.)
+
+    :Example:
+
+::
+
+    # Show all the existing projection methods
+    vcs.show('projection')
+    # box instance of 'default' projection method
+    p=vcs.getprojection()
+    # box2 instance of existing 'quick' projection graphics method
+    p2=vcs.getprojection('quick')
+
+:param Proj_name_src: String name of an existing VCS projection object
+:type Proj_name_src: str
+
+:returns: A VCS projection object
+:rtype: vcs.projection.Proj
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Proj_name_src, str):
@@ -185,25 +225,33 @@ Output:::
 %s
 :::
 
-Function: createboxfill                # Construct a new boxfill graphics method
+    Create a new boxfill graphics method given the the name and the existing
+    boxfill graphics method to copy the attributes from. If no existing
+    boxfill graphics method name is given, then the default boxfill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new boxfill graphics method given the the name and the existing
-boxfill graphics method to copy the attributes from. If no existing
-boxfill graphics method name is given, then the default boxfill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('boxfill')
-box=vcs.createboxfill('example1',)
-vcs.show('boxfill')
-box=vcs.createboxfill('example2','quick')
-vcs.show('boxfill')
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('boxfill')
+    box=vcs.createboxfill('example1',)
+    vcs.show('boxfill')
+    box=vcs.createboxfill('example2','quick')
+    vcs.show('boxfill')
 
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a boxfill or a string name of a boxfill
+
+:return: A boxfill graphics method object
+:rtype: vcs.boxfill.Gfb
 """
 
     name, source = check_name_source(name, source, 'boxfill')
@@ -225,31 +273,28 @@ Input:::
 Output:::
 %s
 :::
-Function: getboxfill                        # Construct a new boxfill graphics method
-
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-boxfill class object from an existing VCS boxfill graphics method. If
-no boxfill name is given, then boxfill 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createboxfill function.)
-
-Example of Use:
-vcs.show('boxfill')                   # Show all the existing boxfill graphics methods
-box=vcs.getboxfill()                  # box instance of 'default' boxfill graphics
-                                    # method
-box2=vcs.getboxfill('quick')          # box2 instance of existing 'quick' boxfill
-                                    #         graphics method
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getboxfill Description ################################################
-#########################################                            #################################################
-######################################################################################################################
-
+    VCS contains a list of graphics methods. This function will create a
+    boxfill class object from an existing VCS boxfill graphics method. If
+    no boxfill name is given, then boxfill 'default' will be used.
+
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createboxfill function.)
+
+    :Example:
+    # Show all the existing boxfill graphics methods
+    vcs.show('boxfill')
+    # box instance of 'default' boxfill graphics method
+    box=vcs.getboxfill()
+    # box2 instance of existing 'quick' boxfill graphics method
+    box2=vcs.getboxfill('quick')
+
+:param Gfb_name_src: String name of an existing boxfill VCS object
+:type Gfb_name_src: str
+
+:return: A pre-existing boxfill graphics method
+:rtype: vcs.boxfill.Gfb
 """
-
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gfb_name_src, str):
         raise vcsError('The argument must be a string.')
@@ -263,24 +308,34 @@ getboxfill.__doc__ = getboxfill.__doc__ % (
 
 def createtaylordiagram(name=None, source='default'):
     """
-Function: createtaylordiagram  # Construct a new taylordiagram graphics method
+    Create a new taylordiagram graphics method given the the name and the existing
+    taylordiagram graphics method to copy the attributes from. If no existing
+    taylordiagram graphics method name is given, then the default taylordiagram graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('taylordiagram')
+    td=vcs.createtaylordiagram('example1',)
+    vcs.show('taylordiagram')
+    td=vcs.createtaylordiagram('example2','quick')
+    vcs.show('taylordiagram')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a taylordiagram or a string name of a
+
+:returns: A taylordiagram graphics method object
+:rtype: vcs.taylor.Gtd
 
-Description of Function:
-Create a new taylordiagram graphics method given the the name and the existing
-taylordiagram graphics method to copy the attributes from. If no existing
-taylordiagram graphics method name is given, then the default taylordiagram graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('taylordiagram')
-td=vcs.createtaylordiagram('example1',)
-vcs.show('taylordiagram')
-td=vcs.createtaylordiagram('example2','quick')
-vcs.show('taylordiagram')
 """
 
     name, source = check_name_source(name, source, 'taylordiagram')
@@ -300,24 +355,31 @@ vcs.show('taylordiagram')
 
 def gettaylordiagram(Gtd_name_src='default'):
     """
-Function: gettaylordiagram                     # Construct a new taylordiagram graphics method
+    VCS contains a list of graphics methods. This function will create a
+    taylordiagram class object from an existing VCS taylordiagram graphics method. If
+    no taylordiagram name is given, then taylordiagram 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-taylordiagram class object from an existing VCS taylordiagram graphics method. If
-no taylordiagram name is given, then taylordiagram 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createboxfill function.)
-
-Example of Use:
-vcs.show('taylordiagram')                    # Show all the existing taylordiagram graphics methods
-td=vcs.gettaylordiagram()                    # td instance of 'default' taylordiagram graphics
-                                           # method
-td2=vcs.gettaylordiagram('default')          # td2 instance of existing 'default' taylordiagram
-                                           #         graphics method
-                                    """
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createboxfill function.)
+
+    :Example:
+
+::
+
+    # Show all the existing taylordiagram graphics methods
+    vcs.show('taylordiagram')
+    # td instance of 'default' taylordiagram graphics method
+    td=vcs.gettaylordiagram()
+    # td2 instance of existing 'default' taylordiagram graphics method
+    td2=vcs.gettaylordiagram('default')
+
+:param Gtd_name_src: String name of an existing taylordiagram VCS object
+:type Gtd_name_src: str
+
+:returns: A taylordiagram VCS object
+:rtype: vcs.taylor.Gtd
+                                        """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gtd_name_src, str):
@@ -333,24 +395,34 @@ td2=vcs.gettaylordiagram('default')          # td2 instance of existing 'default
 
 def createmeshfill(name=None, source='default'):
     """
-Function: createmeshfill                # Construct a new meshfill graphics method
+    Create a new meshfill graphics method given the the name and the existing
+    meshfill graphics method to copy the attributes from. If no existing
+    meshfill graphics method name is given, then the default meshfill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
+
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('meshfill')
+    mesh=vcs.createmeshfill('example1')
+    vcs.show('meshfill')
+    mesh=vcs.createmeshfill('example2','quick')
+    vcs.show('meshfill')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a meshfill or a string name of a meshfill
+
+:returns: A meshfill graphics method object
+:rtype: vcs.meshfill.Gfm
 
-Description of Function:
-Create a new meshfill graphics method given the the name and the existing
-meshfill graphics method to copy the attributes from. If no existing
-meshfill graphics method name is given, then the default meshfill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('meshfill')
-mesh=vcs.createmeshfill('example1',)
-vcs.show('meshfill')
-mesh=vcs.createmeshfill('example2','quick')
-vcs.show('meshfill')
 """
     name, source = check_name_source(name, source, 'meshfill')
     return meshfill.Gfm(name, source)
@@ -358,25 +430,32 @@ vcs.show('meshfill')
 
 def getmeshfill(Gfm_name_src='default'):
     """
-Function: getmeshfill                        # Construct a new meshfill graphics method
+    VCS contains a list of graphics methods. This function will create a
+    meshfill class object from an existing VCS meshfill graphics method. If
+    no meshfill name is given, then meshfill 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-meshfill class object from an existing VCS meshfill graphics method. If
-no meshfill name is given, then meshfill 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createmeshfill function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createmeshfill function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('meshfill')                   # Show all the existing meshfill graphics methods
-mesh=a.getmeshfill()                  # mesh instance of 'default' meshfill graphics
-                                    # method
-mesh2=a.getmeshfill('quick')          # mesh2 instance of existing 'quick' meshfill
-                                    #         graphics method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing meshfill graphics methods
+    a.show('meshfill')
+    # mesh instance of 'default' meshfill graphics method
+    mesh=a.getmeshfill()
+    # mesh2 instance of existing 'quick' meshfill graphics method
+    mesh2=a.getmeshfill('quick')
+
+:param Gfm_name_src: String name of an existing meshfill VCS object
+:type Gfm_name_src: str
+
+:returns: A meshfill VCS object
+:rtype: vcs.meshfill.Gfm
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gfm_name_src, str):
@@ -402,24 +481,33 @@ Output:::
 %s
 :::
 
-Function: createisofill  # Construct a new isofill graphics method
+    Create a new isofill graphics method given the the name and the existing
+    isofill graphics method to copy the attributes from. If no existing
+    isofill graphics method name is given, then the default isofill graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new isofill graphics method given the the name and the existing
-isofill graphics method to copy the attributes from. If no existing
-isofill graphics method name is given, then the default isofill graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
-
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
-
-Example of Use:
-vcs.show('isofill')
-iso=vcs.createisofill('example1',)
-vcs.show('isofill')
-iso=vcs.createisofill('example2','quick')
-vcs.show('isofill')
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('isofill')
+    iso=vcs.createisofill('example1')
+    vcs.show('isofill')
+    iso=vcs.createisofill('example2','quick')
+    vcs.show('isofill')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: an isofill object, or string name of an isofill object
+
+:returns: An isofill graphics method
+:rtype: vcs.isofill.Gfi
 
 """
 
@@ -443,24 +531,30 @@ Output:::
 %s
 :::
 
-Function: getisofill          Construct a new isofill graphics method
+    VCS contains a list of graphics methods. This function will create a
+    isofill class object from an existing VCS isofill graphics method. If
+    no isofill name is given, then isofill 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-isofill class object from an existing VCS isofill graphics method. If
-no isofill name is given, then isofill 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createisofill function.)
+
+    :Example:
+
+::
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createisofill function.)
+    # Show all the existing isofill graphics methods
+    vcs.show('isofill')
+    # iso instance of 'default' isofill graphics method
+    iso=vcs.getisofill()
+    # iso2 instance of existing 'quick' isofill graphics method
+    iso2=vcs.getisofill('quick')
 
-Example of Use:
-vcs.show('isofill')                   # Show all the existing isofill graphics methods
-iso=vcs.getisofill()                  # iso instance of 'default' isofill graphics
-                                    #       method
-iso2=vcs.getisofill('quick')          # iso2 instance of existing 'quick' isofill
-                                    #       graphics method
+:param Gfi_name_src: String name of an existing isofill VCS object
+:type Gfi_name_src: str
 
+:returns: The specified isofill VCS object
+:rtype: vcs.isofill.Gfi
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -488,26 +582,31 @@ Output:::
 %s
 :::
 
-Function: createisoline                # Construct a new isoline graphics method
+    Create a new isoline graphics method given the the name and the existing
+    isoline graphics method to copy the attributes from. If no existing
+    isoline graphics method name is given, then the default isoline graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new isoline graphics method given the the name and the existing
-isoline graphics method to copy the attributes from. If no existing
-isoline graphics method name is given, then the default isoline graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
+
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    vcs.show('isoline')
+    iso=vcs.createisoline('example1')
+    vcs.show('isoline')
+    iso=vcs.createisoline('example2','quick')
+    vcs.show('isoline')
 
-Example of Use:
+:param name: The name of the created object
+:type name: str
 
-vcs.show('isoline')
-iso=vcs.createisoline('example1',)
-vcs.show('isoline')
-iso=vcs.createisoline('example2','quick')
-vcs.show('isoline')
+:param source: The object to inherit from
+:type source: an isoline object, or string name of an isoline object
 
+:returns: An isoline graphics method object
+:rtype: vcs.isoline.Gi
 """
 
     name, source = check_name_source(name, source, 'isoline')
@@ -529,30 +628,31 @@ Input:::
 Output:::
 %s
 :::
+    VCS contains a list of graphics methods. This function will create a
+    isoline class object from an existing VCS isoline graphics method. If
+    no isoline name is given, then isoline 'default' will be used.
 
-Function: getisoline                        # Construct a new isoline graphics method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createisoline function.)
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-isoline class object from an existing VCS isoline graphics method. If
-no isoline name is given, then isoline 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createisoline function.)
-
-Example of Use:
-vcs.show('isoline')                   # Show all the existing isoline graphics methods
-iso=vcs.getisoline()                  # iso instance of 'default' isoline graphics
-                                    #       method
-iso2=vcs.getisoline('quick')          # iso2 instance of existing 'quick' isoline
-gm.linewidth=0
-                                    #       graphics method
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getisoline Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+    :Example:
+
+::
+
+    # Show all the existing isoline graphics methods
+    vcs.show('isoline')
+    # iso instance of 'default' isoline graphics method
+    iso=vcs.getisoline()
+    # iso2 instance of existing 'quick' isoline graphics method
+    iso2=vcs.getisoline('quick')
+    gm.linewidth=0
+
+:param Gi_name_src: String name of an existing isoline VCS object
+:type Gi_name_src: str
+
+:returns: The requested isoline VCS object
+:rtype: vcs.isoline.Gi
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -593,33 +693,35 @@ Input:::
 Output:::
 %s
 :::
+    Create a new Xyvsy graphics method given the the name and the existing
+    Xyvsy graphics method to copy the attributes from. If no existing
+    Xyvsy graphics method name is given, then the default Xyvsy graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Function: createxyvsy                  # Construct a new Xyvsy graphics method
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-Description of Function:
-Create a new Xyvsy graphics method given the the name and the existing
-Xyvsy graphics method to copy the attributes from. If no existing
-Xyvsy graphics method name is given, then the default Xyvsy graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+::
 
-Example of Use:
+    a=vcs.init()
+    vcs.show('xyvsy')
+    xyy=vcs.createxyvsy('example1',)
+    vcs.show('xyvsy')
+    xyy=vcs.createxyvsy('example2','quick')
+    vcs.show('xyvsy')
 
-a=vcs.init()
-vcs.show('xyvsy')
-xyy=vcs.createxyvsy('example1',)
-vcs.show('xyvsy')
-xyy=vcs.createxyvsy('example2','quick')
-vcs.show('xyvsy')
-
-#######################################################################################################################
-###########################################                             ###############################################
-########################################## End createxyvsy Description ################################################
-#########################################                             #################################################
-#######################################################################################################################
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a xyvsy or a string name of a xyvsy
+
+:returns: A XYvsY graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -652,29 +754,31 @@ Output:::
 %s
 :::
 
-Function: getxyvsy        # Construct a new Xyvsy graphics method
+    VCS contains a list of graphics methods. This function will create a
+    Xyvsy class object from an existing VCS Xyvsy graphics method. If
+    no Xyvsy name is given, then Xyvsy 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-Xyvsy class object from an existing VCS Xyvsy graphics method. If
-no Xyvsy name is given, then Xyvsy 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createxyvsy function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createxyvsy function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xyvsy')                     # Show all the existing Xyvsy graphics methods
-xyy=vcs.getxyvsy()                    # xyy instance of 'default' Xyvsy graphics
-                                    #       method
-xyy2=vcs.getxyvsy('quick')            # xyy2 instance of existing 'quick' Xyvsy
-                                    #       graphics method
-####################################################################################################################
-###########################################                          ###############################################
-########################################## End getxyvsy Description ################################################
-#########################################                          #################################################
-####################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing Xyvsy graphics methods
+    vcs.show('xyvsy')
+    # xyy instance of 'default' Xyvsy graphics method
+    xyy=vcs.getxyvsy('default_xyvsy_')
+     # xyy2 instance of existing 'quick' Xyvsy graphics method
+    xyy2=vcs.getxyvsy('quick')
+
+:param GXy_name_src: String name of an existing Xyvsy graphics method
+:type GXy_name_src: str
+
+:returns: An Xyvsy graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     gm = vcs.get1d(GXy_name_src)
@@ -699,33 +803,35 @@ Input:::
 Output:::
 %s
 :::
+    Create a new Yxvsx graphics method given the the name and the existing
+    Yxvsx graphics method to copy the attributes from. If no existing
+    Yxvsx graphics method name is given, then the default Yxvsx graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Function: createyxvsx                  # Construct a new Yxvsx graphics method
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-Description of Function:
-Create a new Yxvsx graphics method given the the name and the existing
-Yxvsx graphics method to copy the attributes from. If no existing
-Yxvsx graphics method name is given, then the default Yxvsx graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    :Example:
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+::
 
-Example of Use:
+    a=vcs.init()
+    vcs.show('yxvsx')
+    yxx=vcs.createyxvsx('example1',)
+    vcs.show('yxvsx')
+    yxx=vcs.createyxvsx('example2','quick')
+    vcs.show('yxvsx')
 
-a=vcs.init()
-vcs.show('yxvsx')
-yxx=vcs.createyxvsx('example1',)
-vcs.show('yxvsx')
-yxx=vcs.createyxvsx('example2','quick')
-vcs.show('yxvsx')
-
-#######################################################################################################################
-###########################################                             ###############################################
-########################################## End createyxvsx Description ################################################
-#########################################                             #################################################
-#######################################################################################################################
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a yxvsy or a string name of a yxvsy
+
+:returns: A YXvsX graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -757,30 +863,31 @@ Output:::
 %s
 :::
 
-Function: getyxvsx                     # Construct a new Yxvsx graphics method
+    VCS contains a list of graphics methods. This function will create a
+    Yxvsx class object from an existing VCS Yxvsx graphics method. If
+    no Yxvsx name is given, then Yxvsx 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-Yxvsx class object from an existing VCS Yxvsx graphics method. If
-no Yxvsx name is given, then Yxvsx 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createyxvsx function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createyxvsx function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('yxvsx')                     # Show all the existing Yxvsx graphics methods
-yxx=vcs.getyxvsx()                    # yxx instance of 'default' Yxvsx graphics
-                                    #       method
-yxx2=vcs.getyxvsx('quick')            # yxx2 instance of existing 'quick' Yxvsx
-                                    #       graphics method
-####################################################################################################################
-###########################################                          ###############################################
-########################################## End getyxvsx Description ################################################
-#########################################                          #################################################
-####################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing Yxvsx graphics methods
+    vcs.show('yxvsx')
+    # yxx instance of 'default' Yxvsx graphics method
+    yxx=vcs.getyxvsx()
+    # yxx2 instance of existing 'quick' Yxvsx graphics method
+    yxx2=vcs.getyxvsx('quick')
 
+:param GYx_name_src: String name of an existing Yxvsx graphics method
+:type GYx_name_src: str
+
+:return: A Yxvsx graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GYx_name_src)
     if gm.g_type != "yxvsx":
@@ -804,31 +911,35 @@ Output:::
 %s
 :::
 
-Function: createxvsy                      # Construct a new XvsY graphics method
+    Create a new XvsY graphics method given the the name and the existing
+    XvsY graphics method to copy the attributes from. If no existing
+    XvsY graphics method name is given, then the default XvsY graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new XvsY graphics method given the the name and the existing
-XvsY graphics method to copy the attributes from. If no existing
-XvsY graphics method name is given, then the default XvsY graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xvsy')
-xy=vcs.createxvsy('example1',)
-vcs.show('xvsy')
-xy=vcs.createxvsy('example2','quick')
-vcs.show('xvsy')
-
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End createxvsy Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+::
+
+    a=vcs.init()
+    vcs.show('xvsy')
+    xy=vcs.createxvsy('example1',)
+    vcs.show('xvsy')
+    xy=vcs.createxvsy('example2','quick')
+    vcs.show('xvsy')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a xvsy or a string name of a xvsy
+
+:returns: A XvsY graphics method object
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -860,31 +971,31 @@ Output:::
 %s
 :::
 
-Function: getxvsy                   # Construct a new XvsY graphics method
+    VCS contains a list of graphics methods. This function will create a
+    XvsY class object from an existing VCS XvsY graphics method. If
+    no XvsY name is given, then XvsY 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-XvsY class object from an existing VCS XvsY graphics method. If
-no XvsY name is given, then XvsY 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createxvsy function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createxvsy function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('xvsy')                      # Show all the existing XvsY graphics methods
-xy=vcs.getxvsy()                      # xy instance of 'default' XvsY graphics
-                                    #       method
-xy2=vcs.getxvsy('quick')              # xy2 instance of existing 'quick' XvsY
-                                    #       graphics method
-
-###################################################################################################################
-###########################################                         ###############################################
-########################################## End getxvsy Description ################################################
-#########################################                         #################################################
-###################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing XvsY graphics methods
+    vcs.show('xvsy')
+    # xy instance of 'default' XvsY graphics method
+    xy=vcs.getxvsy()
+    # xy2 instance of existing 'quick' XvsY graphics method
+    xy2=vcs.getxvsy('quick')
+
+:param GXY_name_src: String name of a 1d graphics method
+:type GXY_name_src: str
 
+:returns: A XvsY graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GXY_name_src)
     # Deliberately yxvsx here; xvsy is just an alias
@@ -897,51 +1008,69 @@ getxvsy.__doc__ = getxvsy.__doc__ % (
 
 def createvector(name=None, source='default'):
     """
-Function: createvector                # Construct a new vector graphics method
+        Create a new vector graphics method given the the name and the existing
+        vector graphics method to copy the attributes from. If no existing
+        vector graphics method name is given, then the default vector graphics
+        method will be used as the graphics method to which the attributes will
+        be copied from.
 
-Description of Function:
-Create a new vector graphics method given the the name and the existing
-vector graphics method to copy the attributes from. If no existing
-vector graphics method name is given, then the default vector graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+        If the name provided already exists, then an error will be returned. Graphics
+        method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+        :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('vector')
-vec=vcs.createvector('example1',)
-vcs.show('vector')
-vec=vcs.createvector('example2','quick')
-vcs.show('vector')
-"""
+::
+
+        a=vcs.init()
+        vcs.show('vector')
+        vec=vcs.createvector('example1',)
+        vcs.show('vector')
+        vec=vcs.createvector('example2','quick')
+        vcs.show('vector')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a vector or a string name of a vector
+
+:returns: A vector graphics method object
+:rtype: vcs.vector.Gv
+
+    """
     name, source = check_name_source(name, source, 'vector')
     return vector.Gv(name, source)
 
 
 def getvector(Gv_name_src='default'):
     """
-Function: getvector                   # Construct a new vector graphics method
+    VCS contains a list of graphics methods. This function will create a
+    vector class object from an existing VCS vector graphics method. If
+    no vector name is given, then vector 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-vector class object from an existing VCS vector graphics method. If
-no vector name is given, then vector 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createvector function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createvector function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('vector')                   # Show all the existing vector graphics methods
-vec=vcs.getvector()                  # vec instance of 'default' vector graphics
-                                    #       method
-vec2=vcs.getvector('quick')          # vec2 instance of existing 'quick' vector
-                                    #       graphics method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing vector graphics methods
+    vcs.show('vector')
+    # vec instance of 'default' vector graphics method
+    vec=vcs.getvector()
+    # vec2 instance of existing 'quick' vector graphics method
+    vec2=vcs.getvector('quick')
+
+:param Gv_name_src: String name of an existing vector VCS object
+:type Gv_name_src: str
+
+:returns: A vector graphics method object
+:rtype: vcs.vector.Gv
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Gv_name_src, str):
@@ -965,25 +1094,34 @@ Output:::
 %s
 :::
 
-Function: createscatter                # Construct a new scatter graphics method
+    Create a new scatter graphics method given the the name and the existing
+    scatter graphics method to copy the attributes from. If no existing
+    scatter graphics method name is given, then the default scatter graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new scatter graphics method given the the name and the existing
-scatter graphics method to copy the attributes from. If no existing
-scatter graphics method name is given, then the default scatter graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('scatter')
-sct=vcs.createscatter('example1',)
-vcs.show('scatter')
-sct=vcs.createscatter('example2','quick')
-vcs.show('scatter')
+::
+
+    a=vcs.init()
+    vcs.show('scatter')
+    sct=vcs.createscatter('example1',)
+    vcs.show('scatter')
+    sct=vcs.createscatter('example2','quick')
+    vcs.show('scatter')
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a scatter or a string name of a scatter
+
+:return: A scatter graphics method
+:rtype: vcs.unified1D.G1d
 
 """
     try:
@@ -1016,31 +1154,31 @@ Output:::
 %s
 :::
 
-Function: getscatter                   # Construct a new scatter graphics method
+    VCS contains a list of graphics methods. This function will create a
+    scatter class object from an existing VCS scatter graphics method. If
+    no scatter name is given, then scatter 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-scatter class object from an existing VCS scatter graphics method. If
-no scatter name is given, then scatter 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createscatter function.)
 
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createscatter function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('scatter')                   # Show all the existing scatter graphics methods
-sct=vcs.getscatter()                  # sct instance of 'default' scatter graphics
-                                    #       method
-sct2=vcs.getscatter('quick')          # sct2 instance of existing 'quick' scatter
-                                    #       graphics method
-
-######################################################################################################################
-###########################################                            ###############################################
-########################################## End getscatter Description ################################################
-#########################################                            #################################################
-######################################################################################################################
+::
+
+    a=vcs.init()
+    # Show all the existing scatter graphics methods
+    vcs.show('scatter')
+    # sct instance of 'default' scatter graphics method
+    sct=vcs.getscatter('default_scatter_')
+    # sct2 instance of existing 'quick' scatter graphics method
+    sct2=vcs.getscatter('quick')
+
+:param GSp_name_src: String name of an existing scatter VCS object.
+:type GSp_name_src: str
 
+:returns: A scatter graphics method object
+:rtype: vcs.unified1D.G1d
 """
     gm = vcs.get1d(GSp_name_src)
     if gm.g_type != "scatter":
@@ -1055,31 +1193,70 @@ def createline(name=None, source='default', ltype=None,
                viewport=None, worldcoordinate=None,
                x=None, y=None, projection=None):
     """
-Function: createline                       # Construct a new line secondary method
+    Create a new line secondary method given the the name and the existing
+    line secondary method to copy the attributes from. If no existing line
+    secondary method name is given, then the default line secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new line secondary method given the the name and the existing
-line secondary method to copy the attributes from. If no existing line
-secondary method name is given, then the default line secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('line')
-ln=vcs.createline('example1',)
-vcs.show('line')
-ln=vcs.createline('example2','black')
-vcs.show('line')
-ln2=vcs.createline(name='new', name_src='red',ltype='dash', width=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-vcs.line(ln2)                      # Plot using specified line object
+::
+
+    a=vcs.init()
+    vcs.show('line')
+    ln=vcs.createline('example1')
+    vcs.show('line')
+    ln=vcs.createline('example2','black')
+    vcs.show('line')
+    # Create instance of line object 'red'
+    ln2=vcs.createline(name='new', name_src='red',ltype='dash', width=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    vcs.line(ln2)
+
+:param name: Name of created object
+:type name: str
+
+:param source: a line, or string name of a line
+:type source: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be created
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the line will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:returns: A VCS line secondary method object
+:rtype: vcs.line.Tl
 """
 
     name, source = check_name_source(name, source, 'line')
@@ -1111,38 +1288,72 @@ def getline(name='default', ltype=None, width=None, color=None,
             worldcoordinate=None,
             x=None, y=None):
     """
-Function: getline        # Construct a new line secondary method
+    VCS contains a list of secondary methods. This function will create a
+    line class object from an existing VCS line secondary method. If
+    no line name is given, then line 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-line class object from an existing VCS line secondary method. If
-no line name is given, then line 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createline function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createline function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('line')                   # Show all the existing line secondary methods
-ln=vcs.getline()                   # ln instance of 'default' line secondary
-                                 #       method
-ln2=vcs.getline('quick')           # ln2 instance of existing 'quick' line
-                                 #       secondary method
-ln3=vcs.getline(name='red', ltype='dash', width=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of line object 'red'
-vcs.line(ln3)                      # Plot using specified line object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing line secondary methods
+    vcs.show('line')
+    # ln instance of 'default' line secondary method
+    ln=vcs.getline()
+    # ln2 instance of existing 'quick' line secondary method
+    ln2=vcs.getline('quick')
+    # Create instance of line object 'red'
+    ln3=vcs.getline(name='red', ltype='dash', width=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50],
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified line object
+    vcs.line(ln3)
+
+:param name: Name of created object
+:type name: str
+
+:param ltype: One of "dash", "dash-dot", "solid", "dot", or "long-dash".
+:type ltype: str
+
+:param width: Thickness of the line to be created
+:type width: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A VCS line object
+:rtype: vcs.line.Tl
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
         raise vcsError('The argument must be a string.')
 
     if name not in vcs.elements["line"]:
-        raise ValueError("The line '%s' does not exists" % name)
+        raise ValueError("The line '%s' does not exist" % name)
     ln = vcs.elements["line"][name]
     if ltype is not None and ln.name != 'default':
         ln.type = ltype
@@ -1170,32 +1381,70 @@ def createmarker(name=None, source='default', mtype=None,
                  viewport=None, worldcoordinate=None,
                  x=None, y=None, projection=None):
     """
-Function: createmarker                   # Construct a new marker secondary method
+    Create a new marker secondary method given the the name and the existing
+    marker secondary method to copy the attributes from. If no existing marker
+    secondary method name is given, then the default marker secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new marker secondary method given the the name and the existing
-marker secondary method to copy the attributes from. If no existing marker
-secondary method name is given, then the default marker secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('marker')
-mrk=vcs.createmarker('example1',)
-vcs.show('marker')
-mrk=vcs.createmarker('example2','black')
-vcs.show('boxfill')
-mrk2=vcs.createmarker(name='new', name_src='red',mtype='dash', size=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-vcs.marker(mrk2)                      # Plot using specified marker object
-"""
+::
+
+    a=vcs.init()
+    vcs.show('marker')
+    mrk=vcs.createmarker('example1',)
+    vcs.show('marker')
+    mrk=vcs.createmarker('example2','black')
+    vcs.show('boxfill')
+    # Create instance of marker object 'red'
+    mrk2=vcs.createmarker(name='new', name_src='red',mtype='dot', size=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    vcs.marker(mrk2)
+
+
+:param name: Name of created object
+:type name: str
+
+:param source: A marker, or string name of a marker
+:type source: str
+
+:param mtype: Specifies the type of marker, i.e. "dot", "circle"
+:type mtype: str
+
+:param size:
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A secondary marker method
+:rtype: vcs.marker.Tm
+
+    """
 
     name, source = check_name_source(name, source, 'marker')
 
@@ -1226,31 +1475,69 @@ def getmarker(name='default', mtype=None, size=None, color=None,
               worldcoordinate=None,
               x=None, y=None):
     """
-Function: getmarker                      # Construct a new marker secondary method
+    VCS contains a list of secondary methods. This function will create a
+    marker class object from an existing VCS marker secondary method. If
+    no marker name is given, then marker 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-marker class object from an existing VCS marker secondary method. If
-no marker name is given, then marker 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createmarker function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createmarker function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('marker')                    # Show all the existing marker secondary methods
-mrk=vcs.getmarker()                   # mrk instance of 'default' marker secondary
-                                    #       method
-mrk2=vcs.getmarker('quick')           # mrk2 instance of existing 'quick' marker
-                                    #       secondary method
-mrk3=vcs.getmarker(name='red', mtype='dash', size=2,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of marker object 'red'
-vcs.marker(mrk3)                      # Plot using specified marker object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing marker secondary methods
+    vcs.show('marker')
+    # mrk instance of 'default' marker secondary method
+    mrk=vcs.getmarker()
+    # mrk2 instance of existing 'quick' marker secondary method
+    mrk2=vcs.getmarker('quick')
+    # Create instance of marker object 'red'
+    mrk3=vcs.getmarker(name='red', mtype='dash', size=2,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified marker object
+    vcs.marker(mrk3)
+
+:param name: Name of created object
+:type name: str
+
+:param source: A marker, or string name of a marker
+:type source: str
+
+:param mtype: Specifies the type of marker, i.e. "dot", "circle"
+:type mtype: str
+
+:param size: Size of the marker
+:type size: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the marker will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A marker graphics method object
+:rtype: vcs.marker.Tm
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
@@ -1283,30 +1570,67 @@ def createfillarea(name=None, source='default', style=None,
                    viewport=None, worldcoordinate=None,
                    x=None, y=None):
     """
-Function: createfillarea     # Construct a new fillarea secondary method
+    Create a new fillarea secondary method given the the name and the existing
+    fillarea secondary method to copy the attributes from. If no existing fillarea
+    secondary method name is given, then the default fillarea secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new fillarea secondary method given the the name and the existing
-fillarea secondary method to copy the attributes from. If no existing fillarea
-secondary method name is given, then the default fillarea secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('fillarea')
-fa=vcs.createfillarea('example1',)
-vcs.show('fillarea')
-fa=vcs.createfillarea('example2','black')
-vcs.show('fillarea')
-fa2=vcs.createmarker(name='new', name_src='red',style=1, index=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of fill area object 'red'
-vcs.fillarea(fa2)                      # Plot using specified fill area object
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('fillarea')
+    fa=vcs.createfillarea('example1',)
+    vcs.show('fillarea')
+    fa=vcs.createfillarea('example2','black')
+    vcs.show('fillarea')
+    # Create instance of fill area object 'red'
+    fa2=vcs.createmarker(name='new', name_src='red',style=1, index=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified fill area object
+    vcs.fillarea(fa2)
+
+:param name: Name of created object
+:type name: str
+
+:param source: a fillarea, or string name of a fillarea
+:type source: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.
+              Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the fillarea will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A fillarea object
+:rtype: vcs.fillarea.Tf
 """
 
     name, source = check_name_source(name, source, 'fillarea')
@@ -1337,36 +1661,72 @@ def getfillarea(name='default', style=None,
                 worldcoordinate=None,
                 x=None, y=None):
     """
-Function: getfillarea              # Construct a new fillarea secondary method
+    VCS contains a list of secondary methods. This function will create a
+    fillarea class object from an existing VCS fillarea secondary method. If
+    no fillarea name is given, then fillarea 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-fillarea class object from an existing VCS fillarea secondary method. If
-no fillarea name is given, then fillarea 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createfillarea function.)
-
-Example of Use:
-vcs.show('fillarea')                 # Show all the existing fillarea secondary methods
-fa=vcs.getfillarea()                 # fa instance of 'default' fillarea secondary
-                                   #       method
-fa2=vcs.getfillarea('quick')         # fa2 instance of existing 'quick' fillarea
-                                    #       secondary method
-fa3=vcs.createmarker(name='new', name='red',style=1, index=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of fill area object 'red'
-vcs.fillarea(fa3)                      # Plot using specified fill area object
-"""
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createfillarea function.)
+
+    :Example:
+
+::
+
+    # Show all the existing fillarea secondary methods
+    vcs.show('fillarea')
+    # fa instance of 'default' fillarea secondary method
+    fa=vcs.getfillarea()
+    # fa2 instance of existing 'quick' fillarea secondary method
+    fa2=vcs.getfillarea('quick')
+    # Create instance of fill area object 'red'
+    fa3=vcs.createfillarea(name='new', name='red',style=1, index=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified fill area object
+    vcs.fillarea(fa3)
+
+:param name: String name of an existing fillarea VCS object
+:type name: str
+
+:param style: One of "hatch", "solid", or "pattern".
+:type style: str
+
+:param index: Specifies which `pattern <http://uvcdat.llnl.gov/gallery/fullsize/pattern_chart.png>`_ to fill with.
+              Accepts ints from 1-20.
+:type index: int
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A fillarea secondary object
+:rtype: vcs.fillarea.Tf
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
         raise vcsError('The argument must be a string.')
     if name not in vcs.elements["fillarea"].keys():
-        raise vcsError("Fillarea '%s' doe not exists" % (name))
+        raise vcsError("Fillarea '%s' does not exist" % (name))
 
     fa = vcs.elements["fillarea"][name]
     if (style is not None) and (fa.name != "default"):
@@ -1393,31 +1753,69 @@ def createtexttable(name=None, source='default', font=None,
                     viewport=None, worldcoordinate=None,
                     x=None, y=None):
     """
-Function: createtexttable            # Construct a new texttable secondary method
+    Create a new texttable secondary method given the the name and the existing
+    texttable secondary method to copy the attributes from. If no existing texttable
+    secondary method name is given, then the default texttable secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new texttable secondary method given the the name and the existing
-texttable secondary method to copy the attributes from. If no existing texttable
-secondary method name is given, then the default texttable secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
+
+::
+
+    a=vcs.init()
+    vcs.show('texttable')
+    tt=vcs.createtexttable('example1',)
+    vcs.show('texttable')
+    tt=vcs.createtexttable('example2','black')
+    vcs.show('texttable')
+    # Create instance of texttable object 'new'
+    tt=vcs.createtexttable(name='new',name_src='red',font=1,spacing=1,expansion=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    vcs.texttable(tt)
+
+
+:param name: Name of created object
+:type name: str
+
+:param source: a texttable, or string name of a texttable
+:type source: str
+
+:param font: ???
+:type font: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A texttable graphics method object
+:rtype: vcs.texttable.Tt
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')
-tt=vcs.createtexttable('example1',)
-vcs.show('texttable')
-tt=vcs.createtexttable('example2','black')
-vcs.show('texttable')
-tt=vcs.createtexttable(name='new',name_src='red',font=1,spacing=1,expansion=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of texttable object 'new'
-vcs.texttable(tt)                      # Plot using specified texttable object
 """
 
     name, source = check_name_source(name, source, 'texttable')
@@ -1453,31 +1851,66 @@ def gettexttable(name='default', font=None,
                  worldcoordinate=None,
                  x=None, y=None):
     """
-Function: gettexttable           # Construct a new texttable secondary method
+    VCS contains a list of secondary methods. This function will create a
+    texttable class object from an existing VCS texttable secondary method. If
+    no texttable name is given, then texttable 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-texttable class object from an existing VCS texttable secondary method. If
-no texttable name is given, then texttable 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtexttable function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtexttable function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')              # Show all the existing texttable secondary methods
-tt=vcs.gettexttable()              # tt instance of 'default' texttable secondary
-                                 #       method
-tt2=vcs.gettexttable('quick')      # tt2 instance of existing 'quick' texttable
-                                 #       secondary method
-tt3=vcs.gettexttable(name='red', font=1, spacing=1,expansion=1,
-              color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
-              worldcoordinate=[0,100, 0,50]
-              x=[0,20,40,60,80,100],
-              y=[0,10,20,30,40,50] )      # Create instance of texttable object 'red'
-vcs.texttable(tt3)                      # Plot using specified texttable object
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing texttable secondary methods
+    vcs.show('texttable')
+    # tt instance of 'default' texttable secondary method
+    tt=vcs.gettexttable()
+    # tt2 instance of existing 'quick' texttable secondary method
+    tt2=vcs.gettexttable('quick')
+    # Create instance of texttable object 'red'
+    tt3=vcs.gettexttable(name='red', font=1, spacing=1,expansion=1,
+                  color=242, priority=1, viewport=[0, 2.0, 0, 2.0],
+                  worldcoordinate=[0,100, 0,50]
+                  x=[0,20,40,60,80,100],
+                  y=[0,10,20,30,40,50] )
+    # Plot using specified texttable object
+    vcs.texttable(tt3)
+
+:param name: String name of an existing VCS texttable object
+:type name: str
+
+:param font: ???
+:type font: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the texttable will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:returns: A texttable graphics method object
+:rtype: vcs.texttable.Tt
+
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(name, str):
@@ -1490,24 +1923,34 @@ vcs.texttable(tt3)                      # Plot using specified texttable object
 
 def createtextorientation(name=None, source='default'):
     """
-Function: createtextorientation   # Construct a new textorientation secondary method
+    Create a new textorientation secondary method given the the name and
+    the existing textorientation secondary method to copy the attributes
+    from. If no existing textorientation secondary method name is given,
+    then the default textorientation secondary method will be used as the
+    secondary method to which the attributes will be copied from.
 
-Description of Function:
-Create a new textorientation secondary method given the the name and
-the existing textorientation secondary method to copy the attributes
-from. If no existing textorientation secondary method name is given,
-then the default textorientation secondary method will be used as the
-secondary method to which the attributes will be copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('textorientation')
-to=vcs.createtextorientation('example1',)
-vcs.show('textorientation')
-to=vcs.createtextorientation('example2','black')
-vcs.show('textorientation')
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('textorientation')
+    to=vcs.createtextorientation('example1',)
+    vcs.show('textorientation')
+    to=vcs.createtextorientation('example2','black')
+    vcs.show('textorientation')
+
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a textorientation or a string name of a textorientation
+
+:returns: A textorientation secondary method
+:rtype: vcs.textorientation.To
 """
 
     name, source = check_name_source(name, source, 'textorientation')
@@ -1517,26 +1960,33 @@ vcs.show('textorientation')
 
 def gettextorientation(To_name_src='default'):
     """
-Function: gettextorientation       # Construct a new textorientation secondary method
+    VCS contains a list of secondary methods. This function will create
+    a textorientation class object from an existing VCS textorientation
+    secondary method. If no textorientation name is given, then
+    textorientation 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create
-a textorientation class object from an existing VCS textorientation
-secondary method. If no textorientation name is given, then
-textorientation 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtextorientation function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtextorientation function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('textorientation')    # Show all the existing textorientation secondary methods
-to=vcs.gettextorientation()    # to instance of 'default' textorientation secondary
-                             #       method
-to2=vcs.gettextorientation('quick')  # to2 instance of existing 'quick' textorientation
-                                   #       secondary method
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing textorientation secondary methods
+    vcs.show('textorientation')
+    # to instance of 'default' textorientation secondary method
+    to=vcs.gettextorientation()
+    # to2 instance of existing 'quick' textorientation secondary method
+    to2=vcs.gettextorientation('quick')
+
+:param To_name_src: String name of an existing textorientation VCS object
+:type To_name_src: str
+
+:returns: A textorientation VCS object
+:rtype: vcs.textorientation.To
+    """
 
     # Check to make sure the argument passed in is a STRING
     if not isinstance(To_name_src, str):
@@ -1554,25 +2004,91 @@ def createtextcombined(Tt_name=None, Tt_source='default', To_name=None, To_sourc
                        priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
                        height=None, angle=None, path=None, halign=None, valign=None, projection=None):
     """
-Function: createtext or createtextcombined  # Construct a new text combined secondary method
+    Create a new textcombined secondary method given the the names and
+    the existing texttable and textorientation secondary methods to copy
+    the attributes from. If no existing texttable and textorientation
+    secondary method names are given, then the default texttable and
+    textorientation secondary methods will be used as the secondary method
+    to which the attributes will be copied from.
+
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
+
+    :Example:
+
+::
+
+    vcs.show('texttable')
+    vcs.show('textorientation')
+    tc=vcs.createtextcombined('example1','std','example1','7left')
+    vcs.show('texttable')
+    vcs.show('textorientation')
+
+:param Tt_name: Name of created object
+:type Tt_name: str
+
+:param Tt_source: ???
+:type Tt_source: ???
+
+:param To_name:
+:type To_name: str
+
+:param To_source: ???
+:type To_source: ???
+
+:param font: ???
+:type font: ???
+
+:param spacing: ???
+:type spacing: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the object will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param height: ???
+:type height: ???
+
+:param angle: ???
+:type angle: ???
+
+:param path: ???
+:type path: ???
+
+:param halign: ???
+:type halign: ???
+
+:param valign: ???
+:type valign: ???
+
+:param projection: Specify a geographic projection used to convert x/y from spherical coordinates into 2D coordinates.
+:type projection: str or projection object
+
+:param projection: ???
+:type projection: ???
+>>>>>>> 350516e7fe7fa36d0cf3452a8cc3bc3104319a1b
+
+:returns: A VCS text object
+:rtype: vcs.textcombined.Tc
 
-Description of Function:
-Create a new textcombined secondary method given the the names and
-the existing texttable and textorientation secondary methods to copy
-the attributes from. If no existing texttable and textorientation
-secondary method names are given, then the default texttable and
-textorientation secondary methods will be used as the secondary method
-to which the attributes will be copied from.
-
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
-
-Example of Use:
-vcs.show('texttable')
-vcs.show('textorientation')
-tc=vcs.createtextcombined('example1','std','example1','7left')
-vcs.show('texttable')
-vcs.show('textorientation')
 """
     # Check if to is defined
     if To_name is None:
@@ -1623,28 +2139,90 @@ def gettextcombined(Tt_name_src='default', To_name_src=None, string=None, font=N
                     priority=None, viewport=None, worldcoordinate=None, x=None, y=None,
                     height=None, angle=None, path=None, halign=None, valign=None):
     """
-Function: gettext or gettextcombined   # Construct a new textcombined secondary method
+    VCS contains a list of secondary methods. This function will create
+    a textcombined class object from an existing VCS texttable secondary
+    method and an existing VCS textorientation secondary method. If no
+    texttable or textorientation names are given, then the 'default' names
+    will be used in both cases.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create
-a textcombined class object from an existing VCS texttable secondary
-method and an existing VCS textorientation secondary method. If no
-texttable or textorientation names are given, then the 'default' names
-will be used in both cases.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createtextcombined function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createtextcombined function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-vcs.show('texttable')                  # Show all the existing texttable secondary methods
-vcs.show('textorientation')            # Show all the existing textorientation secondary methods
-tc=vcs.gettextcombined()               # Use 'default' for texttable and textorientation
-tc2=vcs.gettextcombined('std','7left') # Use 'std' texttable and '7left' textorientation
-if istextcombined(tc):               # Check to see if tc is a textcombined
-   tc.list()                         # Print out all its attriubtes
-"""
+::
+
+    a=vcs.init()
+    # Show all the existing texttable secondary methods
+    vcs.show('texttable')
+    # Show all the existing textorientation secondary methods
+    vcs.show('textorientation')
+    # Use 'default' for texttable and textorientation
+    tc=vcs.gettextcombined()
+    # Use 'std' texttable and '7left' textorientation
+    tc2=vcs.gettextcombined('std','7left')
+    # Check to see if tc is a textcombined
+    if istextcombined(tc):
+       # Print out all its attriubtes
+       tc.list()
+
+
+:param Tt_name_src: Name of created object
+:type Tt_name_src: str
+
+:param To_name_src: ???
+:type To_name_src: ???
+
+:param string: ???
+:param string: ???
+
+:param font: ???
+:type font: ???
+
+:param spacing: ???
+:type spacing: ???
+
+:param expansion: ???
+:type expansion: ???
+
+:param color: A color name from the `X11 Color Names list <https://en.wikipedia.org/wiki/X11_color_names>`_,
+              or an integer value from 0-255, or an RGB/RGBA tuple/list (e.g. (0,100,0), (100,100,0,50))
+:type color: str or int
+
+:param priority: The layer on which the object will be drawn.
+:type priority: int
+
+:param viewport: 4 floats between 0 and 1. These specify the area that the X/Y values are mapped to inside of the canvas
+:type viewport: list of floats
+
+:param worldcoordinate: List of 4 floats (xmin, xmax, ymin, ymax)
+:type worldcoordinate: list of floats
+
+:param x: List of lists of x coordinates. Values must be between worldcoordinate[0] and worldcoordinate[1].
+:type x: list of floats
+
+:param y: List of lists of y coordinates. Values must be between worldcoordinate[2] and worldcoordinate[3].
+:type y: list of floats
+
+:param height: ???
+:type height: ???
+
+:param angle: ???
+:type angle: ???
+
+:param path: ???
+:type path: ???
+
+:param halign: ???
+:type halign: ???
+
+:param valign: ???
+:type valign: ???
+
+:returns: A textcombined object
+:rtype: vcs.textcombined.Tc
+    """
 
     # Check to make sure the arguments passed in are a STRINGS
     if not isinstance(Tt_name_src, str):
@@ -1703,21 +2281,28 @@ gettext = gettextcombined
 
 def get3d_scalar(Gfdv3d_name_src='default'):
     """
-Function: get3d_scalar                        # Construct a new 3Dscalar graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dscalar function.)
-
-Example of Use:
-a.show('3d_scalar')                      # Show all the existing 3Dscalar graphics methods
-plot=vcs.get3d_scalar()                  # plot instance of 'default' dv3d graphics
-                                        # method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dscalar function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dscalar graphics methods
+    a.show('3d_scalar')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_scalar()
+
+:param Gfdv3d_name_src: String name of an existing 3d_scalar VCS object.
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_scalar VCS object
+:rtype: vcs.dv3d.Gf3Dscalar
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1732,47 +2317,60 @@ plot=vcs.get3d_scalar()                  # plot instance of 'default' dv3d graph
 
 def create3d_scalar(name=None, source='default'):
     """
-Function: create3d_scalar                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3d_scalar')
-plot=a.create3d_scalar()
+::
+
+    a=vcs.init()
+    a.show('3d_scalar')
+    plot=a.create3d_scalar()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_scalar or a string name of a 3d_scalar
+
+:returns: A 3d_scalar graphics method object
+:rtype: vcs.dv3d.Gf3Dscalar
 """
-# print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-# create3d_scalar
-# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
     name, source = check_name_source(name, source, '3d_scalar')
     return dv3d.Gf3Dscalar(name, source)
 
 
 def get3d_dual_scalar(Gfdv3d_name_src='default'):
     """
-Function: get3d_dual_scalar                        # Construct a new 3DDualScalar graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
+
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dscalar function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dscalar graphics methods
+    a.show('3d_dual_scalar')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_dual_scalar()
+
+:param Gfdv3d_name_src: String name of an existing 3d_dual_scalar VCS object
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_dual_scalar VCS object
+:rtype: vcs.dv3d.Gf3DDualScalar
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dscalar function.)
-
-Example of Use:
-a.show('3d_dual_scalar')                      # Show all the existing 3Dscalar graphics methods
-plot=vcs.get3d_dual_scalar()                  # plot instance of 'default' dv3d graphics
-                                        # method
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1787,47 +2385,60 @@ plot=vcs.get3d_dual_scalar()                  # plot instance of 'default' dv3d
 
 def create3d_dual_scalar(name=None, source='default'):
     """
-Function: create3d_dual_scalar                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3d_dual_scalar')
-plot=a.create3d_dual_scalar()
+::
+
+    a=vcs.init()
+    a.show('3d_dual_scalar')
+    plot=a.create3d_dual_scalar()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_dual_scalar or a string name of a 3d_dual_scalar
+
+:returns: A 3d_dual_scalar graphics method object
+:rtype: vcs.dv3d.Gf3DDualScalar
 """
-# print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-# create3d_scalar
-# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
     name, source = check_name_source(name, source, '3d_dual_scalar')
     return dv3d.Gf3DDualScalar(name, source)
 
 
 def get3d_vector(Gfdv3d_name_src='default'):
     """
-Function: get3d_vector                        # Construct a new 3Dvector graphics method
+    VCS contains a list of graphics methods. This function will create a
+    dv3d class object from an existing VCS dv3d graphics method. If
+    no dv3d name is given, then dv3d 'default' will be used.
 
-Description of Function:
-VCS contains a list of graphics methods. This function will create a
-dv3d class object from an existing VCS dv3d graphics method. If
-no dv3d name is given, then dv3d 'default' will be used.
-
-Note, VCS does not allow the modification of `default' attribute
-sets. However, a `default' attribute set that has been copied under a
-different name can be modified. (See the create3Dvector function.)
-
-Example of Use:
-a.show('3d_vector')                      # Show all the existing 3Dvector graphics methods
-plot=vcs.get3d_vector()                  # plot instance of 'default' dv3d graphics
-                                        # method
+    Note, VCS does not allow the modification of `default' attribute
+    sets. However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the create3Dvector function.)
+
+    :Example:
+
+::
+
+    # Show all the existing 3Dvector graphics methods
+    a.show('3d_vector')
+    # plot instance of 'default' dv3d graphics method
+    plot=vcs.get3d_vector()
+
+:param Gfdv3d_name_src: String name of an existing 3d_vector VCS object
+:type Gfdv3d_name_src: str
+
+:returns: A pre-existing 3d_vector VCS object
+:rtype: vcs.dv3d.Gf3Dvector
 """
 
     # Check to make sure the argument passed in is a STRING
@@ -1842,23 +2453,33 @@ plot=vcs.get3d_vector()                  # plot instance of 'default' dv3d graph
 
 def create3d_vector(name=None, source='default'):
     """
-Function: createdv3d                # Construct a new dv3d graphics method
+    Create a new dv3d graphics method given the the name and the existing
+    dv3d graphics method to copy the attributes from. If no existing
+    dv3d graphics method name is given, then the default dv3d graphics
+    method will be used as the graphics method to which the attributes will
+    be copied from.
 
-Description of Function:
-Create a new dv3d graphics method given the the name and the existing
-dv3d graphics method to copy the attributes from. If no existing
-dv3d graphics method name is given, then the default dv3d graphics
-method will be used as the graphics method to which the attributes will
-be copied from.
+    If the name provided already exists, then an error will be returned. Graphics
+    method names must be unique.
 
-If the name provided already exists, then a error will be returned. Graphics
-method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('3Dvector')
-plot=a.create3d_vector()
+::
+
+    a=vcs.init()
+    a.show('3d_vector')
+    plot=a.create3d_vector()
+
+:param name: The name of the created object
+:type name: str
+
+:param source: The object to inherit from
+:type source: a 3d_vector or a string name of a 3d_vector
+
+:returns: A 3d_vector graphics method object
+:rtype: vcs.dv3d.Gf3Dvector
 """
+
     name, source = check_name_source(name, source, '3d_vector')
     return dv3d.Gf3Dvector(name, source)
 
@@ -1871,25 +2492,33 @@ plot=a.create3d_vector()
 
 def createcolormap(Cp_name=None, Cp_name_src='default'):
     """
-Function: createcolormap               # Construct a new colormap secondary method
+    Create a new colormap secondary method given the the name and the existing
+    colormap secondary method to copy the attributes from. If no existing colormap
+    secondary method name is given, then the default colormap secondary method
+    will be used as the secondary method to which the attributes will be
+    copied from.
 
-Description of Function:
-Create a new colormap secondary method given the the name and the existing
-colormap secondary method to copy the attributes from. If no existing colormap
-secondary method name is given, then the default colormap secondary method
-will be used as the secondary method to which the attributes will be
-copied from.
+    If the name provided already exists, then an error will be returned.
+    Secondary method names must be unique.
 
-If the name provided already exists, then a error will be returned.
-Secondary method names must be unique.
+    :Example:
 
-Example of Use:
-a=vcs.init()
-cp=a.createcolormap('example1',)
-a.show('colormap')
-cp=a.createcolormap('example2','AMIP')
-a.show('colormap')
+::
+
+    a=vcs.init()
+    cp=a.createcolormap('example1',)
+    a.show('colormap')
+    cp=a.createcolormap('example2','AMIP')
+    a.show('colormap')
+
+:param Cp_name: The name of the created object
+:type Cp_name: str
 
+:param Cp_name_src: The object to inherit
+:type Cp_name_src: a colormap or a string name of a colormap
+
+:returns: A VCS colormap object
+:rtype: vcs.colormap.Cp
 """
     Cp_name, Cp_name_src = check_name_source(Cp_name, Cp_name_src, 'colormap')
     return colormap.Cp(Cp_name, Cp_name_src)
@@ -1897,24 +2526,31 @@ a.show('colormap')
 
 def getcolormap(Cp_name_src='default'):
     """
-Function: getcolormap                      # Construct a new colormap secondary method
+    VCS contains a list of secondary methods. This function will create a
+    colormap class object from an existing VCS colormap secondary method. If
+    no colormap name is given, then colormap 'default' will be used.
 
-Description of Function:
-VCS contains a list of secondary methods. This function will create a
-colormap class object from an existing VCS colormap secondary method. If
-no colormap name is given, then colormap 'default' will be used.
+    Note, VCS does not allow the modification of `default' attribute sets.
+    However, a `default' attribute set that has been copied under a
+    different name can be modified. (See the createcolormap function.)
 
-Note, VCS does not allow the modification of `default' attribute sets.
-However, a `default' attribute set that has been copied under a
-different name can be modified. (See the createcolormap function.)
+    :Example:
 
-Example of Use:
-a=vcs.init()
-a.show('colormap')                      # Show all the existing colormap secondary methods
-cp=a.getcolormap()                      # cp instance of 'default' colormap secondary
-                                        #       method
-cp2=a.getcolormap('quick')              # cp2 instance of existing 'quick' colormap
-                                        #       secondary method
+::
+
+    a=vcs.init()
+    # Show all the existing colormap secondary methods
+    a.show('colormap')
+    # cp instance of 'default' colormap secondary method
+    cp=a.getcolormap()
+    # cp2 instance of existing 'quick' colormap secondary method
+    cp2=a.getcolormap('quick')
+
+:param Cp_name_src: String name of an existing colormap VCS object
+:type Cp_name_src: str
+
+:returns: A pre-existing VCS colormap object
+:rtype: vcs.colormap.Cp
 """
     # Check to make sure the argument passed in is a STRING
     if not isinstance(Cp_name_src, str):
@@ -2043,7 +2679,7 @@ use the "del" function.
 Also note, The user is not allowed to remove a "default" class
 object.
 
-Example of Use:
+:Example:
 a=vcs.init()
 line=a.getline('red')       # To Modify an existing line object
 iso=x.createisoline('dean') # Create an instance of an isoline object
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 734dc1a7d..6307caa18 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -154,12 +154,24 @@ def dumpToJson(obj, fileout, skipped=[
 
 
 def getfontname(number):
+    """
+    Retrieve a font name for a given font index.
+
+    :param number: Index of the font to get the name of.
+    :type number: int
+    """
     if number not in vcs.elements["fontNumber"]:
         raise Exception("Error font number not existing %i" % number)
     return vcs.elements["fontNumber"][number]
 
 
 def getfontnumber(name):
+    """
+    Retrieve a font index for a given font name.
+
+    :param name: Name of the font to get the index of.
+    :type name: str
+    """
     for i in vcs.elements["fontNumber"]:
         if vcs.elements["fontNumber"][i] == name:
             return i
diff --git a/Packages/vcs/vcs/xmldocs.py b/Packages/vcs/vcs/xmldocs.py
index d1754e0f3..5993178ac 100644
--- a/Packages/vcs/vcs/xmldocs.py
+++ b/Packages/vcs/vcs/xmldocs.py
@@ -1,71 +1,128 @@
 plot_keywords_doc = """
-xaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -1 dim axis
-yaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -2 dim axis, only if slab has more than 1D
-zaxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -3 dim axis, only if slab has more than 2D
-taxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -4 dim axis, only if slab has more than 3D
-waxis :: (cdms2.axis.TransientAxis) () Axis object to replace the slab -5 dim axis, only if slab has more than 4D
-xrev :: (bool) () reverse x axis
-yrev :: (bool) () reverse y axis, only if slab has more than 1D
-xarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of x axis
-yarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of y axis, only if var has more than 1D
-zarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of z axis, only if var has more than 2D
-tarray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of t axis, only if var has more than 3D
-warray :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of w axis, only if var has more than 4D
-continents :: (int) () continents type number
-name :: (str) () replaces variable name on plot
-time "" (cdtime.comptime/cdtime.reltime/cdtime.abstime) () replaces time name on plot
-units :: (str) () replaces units value on plot
-ymd :: (str) () replaces year/month/day on plot
-hms :: (str) () replaces hh/mm/ss on plot
-file_comment :: (str) () replaces file_comment on plot
-xbounds :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of x axis bounds values
-ybounds :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () Values to use instead of y axis bounds values (if exist)
-xname :: (str) () replace xaxis name on plot
-yname :: (str) () replace yaxis name on plot (if exists)
-zname :: (str) () replace zaxis name on plot (if exists)
-tname :: (str) () replace taxis name on plot (if exists)
-wname :: (str) () replace waxis name on plot (if exists)
-xunits :: (str) () replace xaxis units on plot
-yunits :: (str) () replace yaxis units on plot (if exists)
-zunits :: (str) () replace zaxis units on plot (if exists)
-tunits :: (str) () replace taxis units on plot (if exists)
-wunits :: (str) () replace waxis units on plot (if exists)
-xweights :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () replace xaxis weights used for computing mean
-yweights :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) () replace xaxis weights used for computing mean
-comment1 :: (str) () replaces comment1 on plot
-comment2 :: (str) () replaces comment2 on plot
-comment3 :: (str) () replaces comment3 on plot
-comment4 :: (str) () replaces comment4 on plot
-long_name :: (str) () replaces long_name on plot
-grid :: (cdms2.grid.TransientRectGrid) () replaces array grid (if exists)
-bg :: (bool/int) () plots in background mode
-ratio :: (int/str) () sets the y/x ratio ,if passed as a string with 't' at the end, will aslo moves the ticks
+:param xaxis: Axis object to replace the slab -1 dim axis
+:param yaxis: Axis object to replace the slab -2 dim axis, only if slab has more than 1D
+:param zaxis: Axis object to replace the slab -3 dim axis, only if slab has more than 2D
+:param taxis: Axis object to replace the slab -4 dim axis, only if slab has more than 3D
+:param waxis: Axis object to replace the slab -5 dim axis, only if slab has more than 4D
+:param xrev: reverse x axis
+:param yrev: reverse y axis, only if slab has more than 1D
+:param xarray: Values to use instead of x axis
+:param yarray: Values to use instead of y axis, only if var has more than 1D
+:param zarray: Values to use instead of z axis, only if var has more than 2D
+:param tarray: Values to use instead of t axis, only if var has more than 3D
+:param warray: Values to use instead of w axis, only if var has more than 4D
+:param continents: continents type number
+:param name: replaces variable name on plot
+:param time: replaces time name on plot
+:param units: replaces units value on plot
+:param ymd: replaces year/month/day on plot
+:param hms: replaces hh/mm/ss on plot
+:param file_comment: replaces file_comment on plot
+:param xbounds: Values to use instead of x axis bounds values
+:param ybounds: Values to use instead of y axis bounds values (if exist)
+:param xname: replace xaxis name on plot
+:param yname: replace yaxis name on plot (if exists)
+:param zname: replace zaxis name on plot (if exists)
+:param tname: replace taxis name on plot (if exists)
+:param wname: replace waxis name on plot (if exists)
+:param xunits: replace xaxis units on plot
+:param yunits: replace yaxis units on plot (if exists)
+:param zunits: replace zaxis units on plot (if exists)
+:param tunits: replace taxis units on plot (if exists)
+:param wunits: replace waxis units on plot (if exists)
+:param xweights: replace xaxis weights used for computing mean
+:param yweights: replace xaxis weights used for computing mean
+:param comment1: replaces comment1 on plot
+:param comment2: replaces comment2 on plot
+:param comment3: replaces comment3 on plot
+:param comment4: replaces comment4 on plot
+:param long_name: replaces long_name on plot
+:param grid: replaces array grid (if exists)
+:param bg: plots in background mode
+:param ratio: sets the y/x ratio ,if passed as a string with 't' at the end, will aslo moves the ticks
+:type xaxis: cdms2.axis.TransientAxis
+:type yaxis: cdms2.axis.TransientAxis
+:type zaxis: cdms2.axis.TransientAxis
+:type taxis: cdms2.axis.TransientAxis
+:type waxis: cdms2.axis.TransientAxis
+:type xrev: bool
+:type yrev: bool
+:type xarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type yarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type zarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type tarray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type warray: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type continents: int
+:type name: str
+:type time: cdtime.comptime/cdtime.reltime/cdtime.abstime
+:type units: str
+:type ymd: str
+:type hms: str
+:type file_comment: str
+:type xbounds: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type ybounds: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type xname: str
+:type yname: str
+:type zname: str
+:type tname: str
+:type wname: str
+:type xunits: str
+:type yunits: str
+:type zunits: str
+:type tunits: str
+:type wunits: str
+:type xweights: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type yweights: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:type comment1: str
+:type comment2: str
+:type comment3: str
+:type comment4: str
+:type long_name: str
+:type grid: cdms2.grid.TransientRectGrid
+:type bg: bool/int
+:type ratio: int/str
 """  # noqa
 
 data_time = """
-    datawc_timeunits :: (str) ('days since 2000') units to use when disaplaying time dimension auto tick
-    datawc_calendar:: (int) (135441) calendar to use when displaying time dimension auto tick, default is proleptic gregorian calendar
+:param datawc_timeunits: (Ex: 'days since 2000') units to use when disaplaying time dimension auto tick
+:type datawc_timeunits: str
+:param datawc_calendar: (Ex: 135441) calendar to use when displaying time dimension auto tick, default is proleptic gregorian calendar
+:type datawc_calendar: int
 """  # noqa
 graphics_method_core_notime = """
-    xmtics1 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 1st side of y axis
-    xmtics2 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 2nd side of y axis
-    ymtics1 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 1st side of y axis
-    ymtics2 :: (str/{float:str}) ('') dictionary with location of intermediate tics as keys for 2nd side of y axis
-    xticlabels1 :: (str/{float:str}) ('*') values for labels on 1st side of x axis
-    xticlabels2 :: (str/{float:str}) ('*') values for labels on 2nd side of x axis
-    yticlabels1 :: (str/{float:str}) ('*') values for labels on 1st side of y axis
-    yticlabels2 :: (str/{float:str}) ('*') values for labels on 2nd side of y axis
-    projection :: (str/vcs.projection.Proj) ('default') projection to use, name or object
-    datawc_x1 :: (float) (1.E20) first value of xaxis on plot
-    datawc_x2 :: (float) (1.E20) second value of xaxis on plot
-    datawc_y1 :: (float) (1.E20) first value of yaxis on plot
-    datawc_y2 :: (float) (1.E20) second value of yaxis on plot
+:param xmtics1: (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis
+:type xmtics1: str/{float:str}
+:param xmtics2: (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis
+:type xmtics2: str/{float:str}
+:param ymtics1: (Ex: '') dictionary with location of intermediate tics as keys for 1st side of y axis
+:type ymtics1: str/{float:str}
+:param ymtics2: (Ex: '') dictionary with location of intermediate tics as keys for 2nd side of y axis
+:type ymtics2: str/{float:str}
+:param xticlabels1: (Ex: '*') values for labels on 1st side of x axis
+:type xticlabels1: str/{float:str}
+:param xticlabels2: (Ex: '*') values for labels on 2nd side of x axis
+:type xticlabels2: str/{float:str}
+:param yticlabels1: (Ex: '*') values for labels on 1st side of y axis
+:type yticlabels1: str/{float:str}
+:param yticlabels2: (Ex: '*') values for labels on 2nd side of y axis
+:type yticlabels2: str/{float:str}
+:param projection: (Ex: 'default') projection to use, name or object
+:type projection: str/vcs.projection.Proj
+:param datawc_x1: (Ex: 1.E20) first value of xaxis on plot
+:type datawc_x1: float
+:param datawc_x2: (Ex: 1.E20) second value of xaxis on plot
+:type datawc_x2: float
+:param datawc_y1: (Ex: 1.E20) first value of yaxis on plot
+:type datawc_y1: float
+:param datawc_y2: (Ex: 1.E20) second value of yaxis on plot
+:type datawc_y2: float
 """  # noqa
 graphics_method_core = """%s
 %s""" % (graphics_method_core_notime, data_time)
-axisconvert = """    %saxisconvert :: (str) ('linear') converting %saxis linear/log/log10/ln/exp/area_wt\n """
-xaxisconvert = axisconvert % ("x", "x")
-yaxisconvert = axisconvert % ("y", "y")
+axisconvert = """:param {axis}axisconvert: (Ex: 'linear') converting {axis}axis linear/log/log10/ln/exp/area_wt
+:type {axis}axisconvert: str\n"""
+xaxisconvert = axisconvert.format(axis="x")
+yaxisconvert = axisconvert.format(axis="y")
 axesconvert = xaxisconvert + yaxisconvert
 colorsdoc = """Options:::
            color1 :: (int) (16) value for color_1
@@ -176,29 +233,38 @@ markerdoc = """
 #############################################################################
 
 create_GM_input = """
-    new_GM_name :: (str) (0) name of the new graphics method object. If no name is given, then one will be created for use.
-    source_GM_name :: (str) (1) copy the contents of the source object to the newly created one. If no name is given, then the 'default' graphics methond contents is copied over to the new object.
+:param new_GM_name: (Ex: 'my_awesome_gm') name of the new graphics method object. If no name is given, then one will be created for use.
+:type new_GM_name: str
+:param source_GM_name: (Ex: 'default') copy the contents of the source object to the newly created one. If no name is given, then the 'default' graphics methond contents is copied over to the new object.
+:type source_GM_name: str
 """  # noqa
 
 get_GM_input = """
-    GM_name :: (str) (0) retrieve the graphics method object of the given name. If no name is given, then retrieve the 'default' graphics method.
+:param GM_name: (Ex: 'default') retrieve the graphics method object of the given name. If no name is given, then retrieve the 'default' graphics method.
+:type GM_name: str
 """  # noqa
 
 plot_1D_input = """
-       slab :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (0) Data at least 1D, last dimension will be plotted
+:param slab: (Ex: [1, 2]) Data at least 1D, last dimension will be plotted
+:type slab: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
 """  # noqa
 
 plot_2D_input = """
-       slab :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (0) Data at least 2D, last 2 dimensions will be plotted
+:param slab: (Ex: [[0, 1]]) Data at least 2D, last 2 dimensions will be plotted
+:type slab: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
 """  # noqa
 
 plot_2_1D_input = """
-       slab_or_primary_object :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list/vcs.fillarea.Tf/vcs.line.Tl/vcs.marker.Tm/vcs.textcombined.Tc) (None) Data at least 1D, last dimension(s) will be plotted, or primary vcs object
+:param slab_or_primary_object: Data at least 1D, last dimension(s) will be plotted, or secondary vcs object
+:type slab_or_primary_object: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list/vcs.fillarea.Tf/vcs.line.Tl/vcs.marker.Tm/vcs.textcombined.Tc
 """  # noqa
 plot_2_1D_options = """
-       slab2 :: (cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list) (None) Data at least 1D, last dimension(s) will be plotted
-       template :: (str/vcs.template.P) ('default') vcs template
-       gm :: (str/vcs.boxfill.Gfb/vcs.isofill.Gfi/vcs.isoline.Gi/vcs.meshfill.Gfm/vcs.vector.Gv/vcs.scatter.GSp/vcs.outline.Go/vcs.outline.Gfo/vcs.taylor.Gtd/vcs.xvsy.GXY/vcs.xyvsy.GXy/vcs.yxvsx.GYx/vcsaddons.core.VCSaddon/vcs.continents.Gcon) ('default') graphic method to use
+:param slab2: Data at least 1D, last dimension(s) will be plotted
+:type slab2: cdms2.tvariable.TransientVariable/numpy.core.ma.MaskedArray/numpy.ndarray/list
+:param template: ('default') vcs template to use
+:type template: str/vcs.template.P
+:param gm: (Ex: 'default') graphic method to use
+:type gm: str/vcs.boxfill.Gfb/vcs.isofill.Gfi/vcs.isoline.Gi/vcs.meshfill.Gfm/vcs.vector.Gv/vcs.scatter.GSp/vcs.outline.Go/vcs.outline.Gfo/vcs.taylor.Gtd/vcs.unified1d.G1d/vcsaddons.core.VCSaddon
 """  # noqa
 #############################################################################
 #                                                                           #
@@ -206,41 +272,42 @@ plot_2_1D_options = """
 #                                                                           #
 #############################################################################
 plot_output = """
-       display ::  (vcs.displayplot.Dp) (0) no default
+:return: Display Plot object representing the plot.
+:rtype: vcs.displayplot.Dp
 """
 
 boxfill_output = """
-       boxfill ::  (vcs.boxfill.Gfb) (0) no default
+       boxfill :: (Ex: 0) no default
 """
 
 isofill_output = """
-       isofill ::  (vcs.isofill.Gfi) (0) no default
+       isofill :: (Ex: 0) no default
 """
 
 isoline_output = """
-       isoline ::  (vcs.isoline.Gi) (0) no default
+       isoline :: (Ex: 0) no default
 """
 
 yxvsx_output = """
-       yxvsx ::  (vcs.yxvsx.GYx) (0) no default
+       yxvsx :: (Ex: 0) no default
 """
 
 xyvsy_output = """
-       xyvsy ::  (vcs.xyvsy.GXy) (0) no default
+       xyvsy :: (Ex: 0) no default
 """
 
 xvsy_output = """
-       xvsy ::  (vcs.xvsy.GXY) (0) no default
+       xvsy :: (Ex: 0) no default
 """
 
 scatter_output = """
-       scatter ::  (vcs.scatter.GSp) (0) no default
+       scatter :: (Ex: 0) no default
 """
 
 outfill_output = """
-       outfill ::  (vcs.outfill.Gfo) (0) no default
+       outfill :: (Ex: 0) no default
 """
 
 outline_output = """
-       outline ::  (vcs.outline.Go) (0) no default
+       outline :: (Ex: 0) no default
 """
-- 
GitLab


From 667750c9d7e0ce13acdc123d1d103e3aa9d5b19b Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 13 Jul 2016 12:52:23 -0400
Subject: [PATCH 195/196] Fixed text and quality of vector graphics as exported

---
 Packages/vcs/vcs/VTKPlots.py | 17 +++++++++--------
 1 file changed, 9 insertions(+), 8 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 4cb9e5df2..b2f1dc63b 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -1109,17 +1109,18 @@ class VTKVCSBackend(object):
 
         # Since the patterns are applied as textures on vtkPolyData, enabling
         # background rasterization is required to write them out
-        if self._rasterPropsInVectorFormats:
-            gl.Write3DPropsAsRasterImageOn()
+        # if self._rasterPropsInVectorFormats:
+        #     gl.Write3DPropsAsRasterImageOn()
 
         gl.SetInput(self.renWin)
         gl.SetCompress(0)  # Do not compress
         gl.SetFilePrefix(".".join(file.split(".")[:-1]))
 
         if textAsPaths:
-            gl.TextAsPathOff()
-        else:
             gl.TextAsPathOn()
+        else:
+            gl.TextAsPathOff()
+
         if output_type == "svg":
             gl.SetFileFormatToSVG()
         elif output_type == "ps":
@@ -1252,16 +1253,16 @@ class VTKVCSBackend(object):
             texttable = vcs.gettexttable(texttable)
 
         from vtk_ui.text import text_dimensions
-        
+
         text_property = vtk.vtkTextProperty()
         info = self.canvasinfo()
         win_size = info["width"], info["height"]
         vcs2vtk.prepTextProperty(text_property, win_size, to=textorientation, tt=texttable)
-        
+
         dpi = self.renWin.GetDPI()
-        
+
         length = max(len(texttable.string), len(texttable.x), len(texttable.y))
-        
+
         strings = texttable.string + [texttable.string[-1]] * (length - len(texttable.string))
         xs = texttable.x + [texttable.x[-1]] * (length - len(texttable.x))
         ys = texttable.y + [texttable.y[-1]] * (length - len(texttable.y))
-- 
GitLab


From 8991302edff913ddeae4ede4e997e50e0f24c23c Mon Sep 17 00:00:00 2001
From: Aashish Chaudhary <aashish.chaudhary@kitware.com>
Date: Wed, 13 Jul 2016 13:04:15 -0400
Subject: [PATCH 196/196] Export to vector works for transparent geometries as
 well

---
 Packages/vcs/vcs/VTKPlots.py | 12 ++++--------
 1 file changed, 4 insertions(+), 8 deletions(-)

diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index b2f1dc63b..ec388c726 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -688,21 +688,16 @@ class VTKVCSBackend(object):
         self.scaleLogo()
 
         # Decide whether to rasterize background in vector outputs
-        # Current criteria to rasterize:
+        # Current limitation to vectorize:
         #       * if fillarea style is either pattern or hatch
-        #       * if fillarea opacity is less than 100 for solid fill
         try:
             if gm.style and all(style != 'solid' for style in gm.style):
                 self._rasterPropsInVectorFormats = True
-            elif gm.opacity and not all(o == 100 for o in gm.opacity):
-                self._rasterPropsInVectorFormats = True
         except:
             pass
         try:
             if gm.fillareastyle in ['pattern', 'hatch']:
                 self._rasterPropsInVectorFormats = True
-            elif not all(o == 100 for o in gm.fillareaopacity):
-                self._rasterPropsInVectorFormats = True
         except:
             pass
 
@@ -1109,8 +1104,9 @@ class VTKVCSBackend(object):
 
         # Since the patterns are applied as textures on vtkPolyData, enabling
         # background rasterization is required to write them out
-        # if self._rasterPropsInVectorFormats:
-        #     gl.Write3DPropsAsRasterImageOn()
+
+        if self._rasterPropsInVectorFormats:
+            gl.Write3DPropsAsRasterImageOn()
 
         gl.SetInput(self.renWin)
         gl.SetCompress(0)  # Do not compress
-- 
GitLab