diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake
index 7024a9580889a9e60382d7dc290b1ac90bc0c419..2f82940422c885fc5fed8ece03f8fd598d4b693a 100644
--- a/CMake/cdat_modules/uvcmetrics_pkg.cmake
+++ b/CMake/cdat_modules/uvcmetrics_pkg.cmake
@@ -6,7 +6,7 @@ set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip)
 #set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP})
 set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git )
 set(UVCMETRICS_MD5)
-set(UVCMETRICS_BRANCH uvcdat-2.4.1)
+set(UVCMETRICS_BRANCH master)
 
 if (NOT CDAT_BUILD_LEAN)
   add_cdat_package(UVCMETRICS "" "" ON)
diff --git a/Packages/cdms2/Lib/avariable.py b/Packages/cdms2/Lib/avariable.py
index dd2a2792dbeacf7775403a4b7c96d2db295e66a6..f15d7a0691bad18d12128942b12b019aa124262e 100644
--- a/Packages/cdms2/Lib/avariable.py
+++ b/Packages/cdms2/Lib/avariable.py
@@ -363,6 +363,16 @@ class AbstractVariable(CdmsObj, Slab):
             if axisMatches(self.getAxis(i), axis_spec):
                 return i
         return -1
+    
+    def hasCellData(self):
+        '''
+        If any of the variable's axis has explicit bounds, we have cell data
+        otherwise we have point data.
+        '''
+        for axis in self.getAxisList():
+            if (axis.getExplicitBounds() is not None):
+                return True
+        return False
 
     def getAxisListIndex (self, axes=None, omit=None, order=None):
         """Return a list of indices of axis objects;
@@ -511,7 +521,6 @@ class AbstractVariable(CdmsObj, Slab):
                 
         return result
 
-
     # Get an order string, such as "tzyx"
     def getOrder(self, ids=0):
         """getOrder(ids=0) returns the order string, such as tzyx.
@@ -915,7 +924,9 @@ class AbstractVariable(CdmsObj, Slab):
                 else:
                     delta_beg_wrap_dimvalue = ncyclesrev*cycle
 
-                axis.setBounds(axis.getBounds() - delta_beg_wrap_dimvalue)
+                isGeneric = [False]
+                b = axis.getBounds(isGeneric) - delta_beg_wrap_dimvalue
+                axis.setBounds(b, isGeneric=isGeneric[0])
                 
                 axis[:]= (axis[:] - delta_beg_wrap_dimvalue).astype(axis.typecode())
 
diff --git a/Packages/cdms2/Lib/axis.py b/Packages/cdms2/Lib/axis.py
index df27788541dc802083729638ad60af2c313ec576..209fbda429930b0252175d9a7136e19b019c6d99 100644
--- a/Packages/cdms2/Lib/axis.py
+++ b/Packages/cdms2/Lib/axis.py
@@ -105,8 +105,8 @@ def getAutoBounds():
     return _autobounds
 
 # Create a transient axis
-def createAxis(data, bounds=None, id=None, copy=0):
-    return TransientAxis(data, bounds, id, copy=copy)
+def createAxis(data, bounds=None, id=None, copy=0, genericBounds=False):
+    return TransientAxis(data, bounds=bounds, id=id, copy=copy, genericBounds=genericBounds)
 
 # Generate a Gaussian latitude axis, north-to-south
 def createGaussianAxis(nlat):
@@ -959,13 +959,41 @@ class AbstractAxis(CdmsObj):
     def isLinear(self):
         raise CDMSError, MethodNotImplemented
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        isGeneric is a list with one boolean which says if the bounds
+        are read from file (False) or generated (True)
+        '''
         raise CDMSError, MethodNotImplemented
 
-    # Return None if not explicitly defined
     def getExplicitBounds(self):
+        '''
+        Return None if not explicitly defined
+        This is a way to determine if attributes are defined at cell
+        or at point level. If this function returns None attributes are
+        defined at points, otherwise they are defined at cells
+        '''
         raise CDMSError, MethodNotImplemented
 
+    def getBoundsForDualGrid(self, dualGrid):
+        '''
+        dualGrid changes the type of dataset from the current type to the dual.
+        So, if we have a point dataset we switch to a cell dataset and viceversa.
+        '''
+        explicitBounds = self.getExplicitBounds()
+        if (explicitBounds is None):
+            # point data
+            if (dualGrid):
+                return self.getBounds()
+            else:
+                return None
+        else:
+            # cell data
+            if (dualGrid):
+                return None
+            else:
+                return explicitBounds
+
     def setBounds(self, bounds):
         raise CDMSError, MethodNotImplemented
 
@@ -1279,7 +1307,8 @@ class AbstractAxis(CdmsObj):
         The stride k can be positive or negative. Wraparound is
         supported for longitude dimensions or those with a modulus attribute.
         """
-        fullBounds = self.getBounds()
+        isGeneric = [False]
+        fullBounds = self.getBounds(isGeneric)
         _debug=0
         _debugprefix="SS__XX subaxis "
         
@@ -1359,7 +1388,7 @@ class AbstractAxis(CdmsObj):
             else:
                 bounds = None
         
-        newaxis = TransientAxis(data, bounds, id=self.id, copy=1)
+        newaxis = TransientAxis(data, bounds, id=self.id, copy=1, genericBounds=isGeneric[0])
 
         if self.isLatitude(): newaxis.designateLatitude()
         if self.isLongitude(): newaxis.designateLongitude()
@@ -1440,17 +1469,18 @@ class AbstractAxis(CdmsObj):
         """clone (self, copyData=1)
         Return a copy of self as a transient axis.
         If copyData is 1, make a separate copy of the data."""
-        b = self.getBounds()
+        isGeneric = [False]
+        b = self.getBounds(isGeneric)
         if copyData==1:
             mycopy = createAxis(copy.copy(self[:]))
         else:
             mycopy = createAxis(self[:])
         mycopy.id = self.id
         try:
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=isGeneric[0])
         except CDMSError:
             b = mycopy.genGenericBounds()
-            mycopy.setBounds(b)
+            mycopy.setBounds(b, isGeneric=False)
         for k, v in self.attributes.items():
            setattr(mycopy, k, v)
         return mycopy
@@ -1570,7 +1600,13 @@ class Axis(AbstractAxis):
         return self._node_.dataRepresent==cdmsNode.CdLinear
 
     # Return the bounds array, or generate a default if autoBounds mode is on
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        '''
+        If isGeneric is a list with one element, we set its element to True if the
+        bounds were generated and False if bounds were read from the file.
+        '''
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             self.validateBounds(boundsArray)
@@ -1578,6 +1614,8 @@ class Axis(AbstractAxis):
             boundsArray = None
         abopt = getAutoBounds()
         if boundsArray is None and (abopt==1 or (abopt==2 and (self.isLatitude() or self.isLongitude()))) :
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1609,7 +1647,10 @@ class Axis(AbstractAxis):
 # In-memory coordinate axis
 class TransientAxis(AbstractAxis):
     axis_count = 0
-    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0):
+    def __init__(self, data, bounds=None, id=None, attributes=None, copy=0, genericBounds=False):
+        '''
+        genericBounds specify if bounds were generated (True) or read from a file (False)
+        '''
         AbstractAxis.__init__(self, None, None)
         if id is None:
             TransientAxis.axis_count = TransientAxis.axis_count + 1
@@ -1646,7 +1687,8 @@ class TransientAxis(AbstractAxis):
             self._data_ = numpy.array(data)
 
         self._doubledata_ = None
-        self.setBounds(bounds)
+        self._genericBounds_ = genericBounds
+        self.setBounds(bounds, isGeneric=genericBounds)
 
     def __getitem__(self, key):
         return self._data_[key]
@@ -1663,10 +1705,15 @@ class TransientAxis(AbstractAxis):
     def __len__(self):
         return len(self._data_)
 
-    def getBounds(self):
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = self._genericBounds_
         if self._bounds_ is not None:
             return copy.copy(self._bounds_)
         elif (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
+            self._genericBounds_ = True
             return self.genGenericBounds()
         else:
             return None
@@ -1675,14 +1722,17 @@ class TransientAxis(AbstractAxis):
         return self._data_
 
     def getExplicitBounds(self):
-        return copy.copy(self._bounds_)
+        if (self._genericBounds_):
+            return None
+        else:
+            return copy.copy(self._bounds_)
 
     # Set bounds. The persistent argument is for compatibility with
     # persistent versions, is ignored. Same for boundsid and index.
     #
     # mf 20010308 - add validate key word, by default do not validate
-    #
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is False if bounds were generated, True if they were read from a file
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if bounds is not None:
             if isinstance(bounds, numpy.ma.MaskedArray):
                 bounds = numpy.ma.filled(bounds)
@@ -1700,9 +1750,11 @@ class TransientAxis(AbstractAxis):
                     bounds2[:,1]=bounds[1::]
                     bounds=bounds2
             self._bounds_ = copy.copy(bounds)
+            self._genericBounds_ = isGeneric
         else:
             if (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
                 self._bounds_ = self.genGenericBounds()
+                self._genericBounds_ = True
             else:
                 self._bounds_ = None
 
@@ -1745,7 +1797,7 @@ class TransientVirtualAxis(TransientAxis):
         "Return true iff coordinate values are implicitly defined."
         return 1
 
-    def setBounds(self, bounds):
+    def setBounds(self, bounds, isGeneric=False):
         "No boundaries on virtual axes"
         self._bounds_ = None
 
@@ -1953,13 +2005,19 @@ class FileAxis(AbstractAxis):
         return 0                        # All file axes are vector representation
 
     # Return the bounds array, or generate a default if autobounds mode is set
-    def getBounds(self):
+    # If isGeneric is a list with one element, we set its element to True if the
+    # bounds were generated and False if bounds were read from the file.
+    def getBounds(self, isGeneric=None):
+        if (isGeneric):
+            isGeneric[0] = False
         boundsArray = self.getExplicitBounds()
         try:
             boundsArray = self.validateBounds(boundsArray)
         except Exception,err:
             boundsArray = None
         if boundsArray is None and (getAutoBounds()==1 or (getAutoBounds()==2 and (self.isLatitude() or self.isLongitude()))):
+            if (isGeneric):
+                isGeneric[0] = True
             boundsArray = self.genGenericBounds()
             
         return boundsArray
@@ -1989,7 +2047,8 @@ class FileAxis(AbstractAxis):
     # index in the extended dimension (default is index=0).
     # If the bounds variable is new, use the name boundsid, or 'bounds_<varid>'
     # if unspecified.
-    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None):
+    # isGeneric is only used for TransientAxis
+    def setBounds(self, bounds, persistent=0, validate=0, index=None, boundsid=None, isGeneric=False):
         if persistent:
             if index is None:
                 if validate:
@@ -2250,7 +2309,7 @@ def axisMatches(axis, specification):
 
        3. an axis object; will match if it is the same object as axis.
     """   
-    if isinstance(specification, types.StringType):
+    if isinstance(specification, basestring):
         s = string.lower(specification)
         s = s.strip()
         while s[0] == '(':
diff --git a/Packages/cdms2/Lib/cdscan.py b/Packages/cdms2/Lib/cdscan.py
new file mode 100755
index 0000000000000000000000000000000000000000..59896b1b43a58b5a1192d31915eaa88cf3d13ae2
--- /dev/null
+++ b/Packages/cdms2/Lib/cdscan.py
@@ -0,0 +1,1688 @@
+#!/usr/bin/env python
+
+import sys
+import getopt
+import cdms2
+from cdms2.grid import lookupArray
+from cdms2.axis import calendarToTag, tagToCalendar
+from cdms2.cdmsobj import CdFromObject,CdString,CdScalar,CdFloat,CdDouble,CdShort,CdInt,CdLong
+import numpy
+import string
+import cdtime
+import os.path
+import copy
+import types
+from cdms2 import cdmsNode
+import re
+
+usage = """Usage:
+    cdscan [options] <files>
+
+    Scan a list of files producing a CDMS dataset in XML representation. See Notes below
+    for a more complete explanation.
+
+Arguments:
+
+    <files> is a list of file paths to scan. The files can be listed in any order, and may
+    be in multiple directories.  A file may also be a CDML dataset (.xml or .cdml), in
+    which case the dataset(s) and files are combined into a new dataset.
+
+Options:
+
+    -a alias_file: change variable names to the aliases defined in an alias file.
+                   Each line of the alias file consists of two blank separated
+                   fields: variable_id alias. 'variable_id' is the ID of the variable
+                   in the file, and 'alias' is the name that will be substituted for
+                   it in the output dataset. Only variables with entries in the alias_file
+                   are renamed.
+
+    -c calendar:   either "gregorian", "proleptic_gregorian", "julian", "noleap", or "360_day". Default:
+                   "gregorian". This option should be used with caution, as it will
+                   override any calendar information in the files.
+
+    -d dataset_id: dataset identifier. Default: "none"
+
+    -e newattr:	   Add or modify attributes of a file, variable, or
+		   axis. The form of 'newattr' is either:
+
+		   'var.attr = value' to modify a variable or attribute, or
+		   '.attr = value' to modify a global (file) attribute.
+
+		   In either case, 'value' may be quoted to preserve spaces
+		   or force the attribute to be treated as a string. If
+		   'value' is not quoted and the first character is a
+		   digit, it is converted to integer or
+		   floating-point. This option does not modify the input
+		   datafiles. See notes and examples below.
+
+    --exclude var,var,...
+                   Exclude specified variables. The argument
+                   is a comma-separated list of variables containing no blanks.
+                   In contrast to --exclude-file, this skips the variables regardless
+                   of the file(s) in which they are contained, but processes other
+                   variables in the files.
+                   Also see --include.
+
+    --exclude-file pattern
+                   Exclude files with a basename matching the regular expression pattern.
+                   In contrast to --exclude, this skips the file entirely. Multiple patterns
+                   may be listed by separating with vertical bars (e.g. abc|def ). Note
+                   that the match is to the initial part of the basename. For example, the
+                   pattern 'st' matches any basename starting with 'st'.
+
+    -f file_list:  file containing a list of absolute data file names, one per
+                   line. <files> arguments are ignored.
+
+    --forecast     generate a description of a forecast dataset.
+                   This is not compatible with the -i, -r, -t, or -l options.
+                   A file can contain data for exactly one forecast; its
+                   forecast_reference_time (aka run time, analysis time, starting time,
+                   generating time, tau=0 time) is specified by the nbdate,nbsec variables.
+                   Each file's time axis will be interpreted as the forecast_period (aka
+                   tau, the interval from the forecast_reference_time to the current time)
+                   regardless of its units, standard_name, or other attributes.
+
+    -h:            print a help message.
+
+    -i time_delta: scan time as a 'linear' dimension. This is useful if the time dimension
+                   is very long. The argument is the time delta, a float or integer.  For
+                   example, if the time delta is 6 hours, and the reference units are
+                   "hours since xxxx", set the interval delta to 6.  The default value is
+                   the difference of the first two timepoints.
+
+    --ignore-open-error:
+                   Ignore open errors. Print a warning and continue.
+
+    --include var,var,...
+                   Only include specified variables in the output. The argument
+                   is a comma-separated list of variables containing no blanks.
+                   Also see --exclude.
+
+    --include-file pattern
+                   Only include files with a basename matching the regular expression pattern.
+                   In contrast to --include, this skips files entirely if they do not
+                   match the pattern. Multiple patterns
+                   may be listed by separating with vertical bars (e.g. abc|def ). Note
+                   that the match is to the initial part of the basename. For example, the
+                   pattern 'st' matches any basename starting with 'st'.
+
+    -j:		   scan time as a vector dimension. Time values are listed
+		   individually. Turns off the -i option.
+
+    -l levels:     list of levels, comma-separated. Only specify if files are partitioned by
+                   levels.
+
+    -m levelid:    name of the vertical level dimension. The default is the name of the
+                   vertical level dimension
+
+    --notrim-lat:  Don't trim latitude values (in degrees) to the range [-90..90]. By default
+		   latitude values are trimmed.
+
+    -p template:   Compatibility with pre-V3.0 datasets. 'cdimport -h' describes template strings.
+
+    -q:            quiet mode
+
+    -r time_units: time units of the form "<units> since yyyy-mm-dd hh:mi:ss", where
+                   <units> is one of "year", "month", "day", "hour", "minute", "second".
+                   Trailing fields may be omitted. The default is the units of the first
+                   time dimension found.
+
+    -s suffix_file: Append a suffix to variable names, depending on the directory
+                   containing the data file.  This can be used to distinguish variables
+                   having the same name but generated by different models or ensemble
+                   runs. 'suffix_file' is the name of a file describing a mapping between
+                   directories and suffixes.  Each line consists of two blank-separated
+                   fields: 'directory' 'suffix'. Each file path is compared to the
+                   directories in the suffix file. If the file path is in that directory
+                   or a subdirectory, the corresponding suffix is appended to the variable
+                   IDs in the file. If more than one such directory is found, the first
+                   directory found is used. If no match is made, the variable ids are not
+                   altered.  Regular expressions can be used: see the example in the Notes
+                   section.
+
+    -t timeid:     id of the partitioned time dimension. The default is the name of the time
+                   dimension.
+
+    --time-linear tzero,delta,units[,calendar]
+                   Override the time dimensions(s) with a linear time dimension. The arguments are
+                   a comma-separated list:
+                   
+                   tzero is the initial time point, a floating-point value.
+                   delta is the time delta, floating-point.
+                   units are time units as specified in the [-r] option.
+                   calendar is optional, and is specified as in the [-c] option. If omitted, it
+                     defaults to the value specified by [-c], otherwise as specified in the file.
+
+                   Example: --time-linear '0,1,months since 1980,noleap'
+
+                   Note (6) compares this option with [-i] and [-r]
+
+    --var-locate 'var,file_pattern':
+                   Only scan a variable if the basename of the file matches the pattern. This
+                   may be used to resolve duplicate variable errors. var and file_pattern are
+                   separated by a comma, with no blanks.
+                   
+                   var is the name of the variable
+                   file_pattern is a regular expression following the Python re module syntax.e
+
+                   Example: to scan variable ps from files starting with the string 'ps_':
+                     --var-locate 'ps,ps_.*'
+
+    -x xmlfile:    XML filename. By default, output is written to standard output.
+
+Example:
+
+    cdscan -c noleap -d test -x test.xml [uv]*.nc
+    cdscan -d pcmdi_6h -i 0.25 -r 'days since 1979-1-1' *6h*.ctl
+
+Notes:
+
+    (1) The files can be in netCDF, GrADS/GRIB, HDF, or DRS format, and can be listed in
+    any order. Most commonly, the files are the result of a single experiment, and the
+    'partitioned' dimension is time. The time dimension of a variable is the coordinate
+    variable having a name that starts with 'time' or having an attribute "axis='T'". If
+    this is not the case, specify the time dimension with the -t option. The time
+    dimension should be in the form supported by cdtime. If this is not the case (or to
+    override them) use the -r option.
+
+    (2) The basic form of the command is 'cdscan <files>'. By default, the time values are
+    listed explicitly in the output XML. This can cause a problem if the time dimension is
+    very long, say for 6-hourly data. To handle this the form 'cdscan -i delta <files>'
+    may be used. This generates a compact time representation of the form <start, length,
+    delta>. An exception is raised if the time dimension for a given file is not linear.
+
+    (3) Another form of the command is 'cdscan -l lev1,lev2,..,levn <files>'. This asserts
+    that the dataset is partitioned in both time and vertical level dimensions. The level
+    dimension of a variable is the dimension having a name that starts with "lev", or
+    having an attribute "axis=Z". If this is not the case, set the level name with the -m
+    option.
+
+    (4) An example of a suffix file:
+
+    /exp/pr/ncar-a  _ncar-a
+    /exp/pr/ecm-a   _ecm-a
+    /exp/ta/ncar-a  _ncar-a
+    /exp/ta/ecm-a   _ecm-a
+
+    For all files in directory /exp/pr/ncar-a or a subdirectory, the corresponding
+    variable ids will be appended with the suffix '_ncar-a'.  Regular expressions can be
+    used, as defined in the Python 're' module. For example, The previous example can be
+    replaced with the single line:
+
+    /exp/[^/]*/([^/]*) _\g<1>
+
+    Note the use of parentheses to delimit a group. The syntax \g<n> refers to the n-th
+    group matched in the regular expression, with the first group being n=1. The string
+    [^/]* matches any sequence of characters other than a forward slash.
+
+    (5) Adding or modifying attributes with the -e option:
+
+    time.units = "days since 1979-1-1"
+
+    sets the units of all variables/axes to "Days since 1979-1-1". Note
+    that since this is done before any other processing is done, it allows
+    overriding of non-COARDS time units.
+
+    .newattr=newvalue
+
+    Set the global file attribute 'newattr' to 'newvalue'.
+
+    (6) The [--time-linear] option overrides the time values in the file(s). The resulting
+    dimension does not have any gaps. In contrast, the [-i], [-r] options use the specified
+    time units (from [-r]), and calendar from [-c] if specified, to convert the file times
+    to the new units. The resulting linear dimension may have gaps.
+
+    In either case, the files are ordered by the time values in the files.
+
+    The [--time-linear] option should be used with caution, as it is applied to all the time
+    dimensions found.
+"""
+
+# Ensure that arrays are fully printed to XML files
+numpy.set_printoptions(threshold=numpy.inf)
+
+calendarMap = tagToCalendar
+
+reverseCalendarMap = calendarToTag
+
+attrPattern = re.compile(r'\s*(\w*)\.(\w+)\s*=\s*(.*)$')
+cdms2.setNetcdfUseParallelFlag(0)
+def timestamp():
+    "Generate a timestamp."
+    import time
+    y,m,d,h,mi,s,w,dy,ds = time.gmtime(time.time())
+    return "%d-%d-%d %d:%d:%d"%(y,m,d,h,mi,s)
+
+def timeindex(value, units, basetime, delta, calendar):
+    """ Calculate (t - basetime)/delu
+    where t = reltime(value, units)
+    and delu is the time interval (delta, delunits) (e.g., 1 month).
+    """
+    if string.find(units," as ")==-1:
+        tval = cdtime.reltime(value, units)
+    else:
+        tval = cdtime.abstime(value, units)
+    newval = tval.torel(basetime, calendar)
+    if delta is None:
+        return newval.value
+    else:
+        return newval.value/delta
+
+def combineKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
+    """ Combine dictionary keys into an axis.
+    dict: (i,j) => (path, axisname)
+    typedict is either timedict or levdict or fcdict.
+    timeIsLinear is true iff time has a linear representation.
+    referenceDelta is only used for error checks if timeIsLinear is true.
+    """
+
+    global verbose
+
+    # Sort the projected time, level indices
+    keys = dict.keys()
+    keys.sort()
+
+    axislist = []
+    prevend = None
+    prevpath = None
+    name0 = None
+    compressPart = []
+    partition = []
+    previ = 0
+    firstunits = None
+    prevvals = None
+    coordToInd = {(None,None):(None,None)}
+    linCoordToInd = {(None,None):(None,None)}
+    iadj = None
+    errorOccurred = 0
+    for i0, i1 in keys:
+        path, name = dict[(i0, i1)]
+        if name0 is None:
+            name0 = name
+        values, units, dummy = typedict[(path,name)]
+        if firstunits is None:
+            firstunits = units
+        if prevend is not None and prevend>=i0:
+            if prevend>=i1:
+                if verbose:
+                    print >> sys.stderr,  'Warning, file %s, dimension %s contains values in file %s'%(prevpath,name,path)
+                if timeIsLinear:
+                    iind = lookupArray(prevvals, values[0])
+                    jind = lookupArray(prevvals, values[-1])
+                else:
+                    iind = lookupArray(prevvals, i0)
+                    jind = lookupArray(prevvals, i1)
+                if len(values)!=(jind-iind+1):
+                    raise RuntimeError, 'Dimension %s in files %s [len(%s)=%d], %s [len(%s)=%d], is inconsistent'%(name, prevpath, name, (jind-iind+1), path, name, len(values))
+                coordToInd[(i0,i1)] = (iind, jind)
+                
+                prevspart, prevepart = partition[-1]
+                linCoordToInd[(i0,i1)] = (prevspart+iind, prevspart+jind+1)
+                continue
+            else:                       # Fix partial overlap
+                if timeIsLinear:
+                    jind = lookupArray(prevvals, values[0])
+                else:
+                    jind = lookupArray(prevvals, i0)
+                if verbose:
+                    print >> sys.stderr,  'Warning, file %s, dimension %s overlaps file %s, value=%f'%(prevpath,name,path,prevvals[jind])
+                previ, prevj = compressPart[-1]
+                prevj = previ + jind
+                axislist[-1] = prevvals[0:jind]
+                compressPart[-1] = (previ, prevj)
+                coordToInd[(prevvals[0], prevvals[-1])] = (previ, prevj)
+                previ = prevj
+
+                prevspart, prevepart = partition[-1]
+                prevepart = prevspart + jind
+                partition[-1] = (prevspart, prevepart)
+                linCoordToInd[(prevvals[0], prevvals[-1])] = (prevspart, prevepart)
+
+        axislist.append(values)
+        prevend = i1
+        prevpath = path
+        prevj = previ+len(values)
+        compressPart.append((previ, prevj))
+        coordToInd[(i0,i1)] = (previ, prevj)
+
+        if iadj is None:                # partition has to start with 0
+            iadj = int(i0)
+        spart = int(i0) - iadj
+        epart = int(i1) + 1 - iadj
+        partition.append((spart, epart))
+        linCoordToInd[(i0,i1)] = (spart, epart)
+        if timeIsLinear and len(values)!=(epart-spart):
+            # Find the bad values
+            diffs = values[1:]-values[:-1]
+            badindices = numpy.compress(numpy.not_equal(diffs,referenceDelta),range(len(values)))
+            badvalues = numpy.take(values, badindices)
+            if verbose:
+                print >> sys.stderr,  "Error: Missing values in %s after times: %s. Set delta with the -i option or turn off linear mode with the -j option."%(path,str(badvalues))
+            errorOccurred = 1
+
+        prevvals = values
+        previ = prevj
+        
+    fullaxis = numpy.ma.concatenate(axislist)
+    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
+
+def useKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
+    """ Use dictionary keys for an axis.  This is like combineKeys (same arguments, same return values,
+    was written by simplifying combineKeys), but this doesn't do nearly so much because this is
+    for an axis where there is no splitting across files, hence partitions are not needed.
+    dict: (i,j) => (path, axisname)
+    typedict is either timedict or levdict or fcdict.
+    timeIsLinear is true iff time has a linear representation.
+    referenceDelta is only used for error checks if timeIsLinear is true.
+    """
+    global verbose
+
+    # Sort the projected time, level indices
+    keys = dict.keys()
+    keys.sort()
+
+    axislist = []
+    name0 = None
+#    compressPart = []
+    compressPart = None
+#    partition = []
+    partition = None
+#    previ = 0
+    firstunits = None
+#    coordToInd = {(None,None):(None,None)}
+#    linCoordToInd = {(None,None):(None,None)}
+    coordToInd = None
+    linCoordToInd = None
+    errorOccurred = 0
+    for i0, i1 in keys:
+        path, name = dict[(i0, i1)]
+        if name0 is None:
+            name0 = name
+        values, units, dummy = typedict[(path,name)]
+        if firstunits is None:
+            firstunits = units
+        axislist.append(values)
+#        prevj = previ+len(values)
+#        coordToInd[(i0,i1)] = (previ, prevj)
+
+    fullaxis = numpy.ma.concatenate(axislist)
+    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
+
+def copyDict(dict):
+    """Copy a dictionary-like object dict to a true dictionary"""
+    result = {}
+    for key in dict.keys():
+        result[key] = dict[key]
+
+    return result
+
+def disambig(name, dict, num, comparator, value):
+    """ Make an unique name from name, wrt to the keys in dictionary dict.
+    Try using num first. comparator(value,dict[name]) returns 0 if equal, 1 if not.
+    """
+    if not dict.has_key(name) or not comparator(value, dict[name]):
+        uniqname = name
+    else:
+        uniqname = '%s_%d'%(name,num)
+        if dict.has_key(uniqname) and comparator(value, dict[uniqname]):
+            trial_name = uniqname
+            for letter in string.lowercase:
+                uniqname = '%s_%s'%(trial_name,letter)
+                if not dict.has_key(uniqname) or not comparator(value, dict[uniqname]):
+                    break
+            else:
+                raise 'Cannot make axis name unique: ',name
+
+    return uniqname
+
+def compareaxes(axis1, axis2):
+    """Return 0 if equal, 1 if not"""
+    return ((len(axis1)!=len(axis2)) or not numpy.ma.allclose(axis1[:],axis2[:]))
+
+def comparedomains(domain1, domain2):
+    """Return 0 if equal, 1 if not"""
+    if len(domain1)!=len(domain2):
+        return 1
+    for i in range(len(domain1)):
+        item1 = domain1[i]
+        item2 = domain2[i]
+        if type(item1)!=type(item2):
+            return 1
+        if type(item1)==types.StringType:
+            return item1!=item2
+        elif compareaxes(item1, item2):
+            return 1
+    return 0
+
+def compareVarDictValues(val1, val2):
+    return comparedomains(val1[0], val2[0])
+
+def cleanupAttrs(attrs):
+    for attname in attrs.keys():
+        attval = attrs[attname]
+        if type(attval) is numpy.ndarray:
+            if len(attval)==1:
+                attrs[attname] = attval[0]
+            else:
+                attrs[attname] = str(attval)
+    if attrs.has_key('missing_value') and attrs['missing_value'] is None:
+        del attrs['missing_value']
+
+def validateAttrs(node):
+    """Compare attributes against DTD."""
+
+    global verbose
+
+    if hasattr(node,'datatype'):
+        parenttype = node.datatype
+    else:
+        parenttype = None
+    atts = node.getExternalDict()
+    for attname in atts.keys():
+        (attval,datatype)=atts[attname] # (XML value, datatype)
+        constraint = node.extra.get(attname)
+        if constraint is not None:
+            (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
+            if reqtype==CdFromObject:
+                reqtype = parenttype
+            if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
+                if reqtype in (CdFloat,CdDouble) and type(attval)!=types.FloatType:
+                    try:
+                        attval = string.atof(attval)
+                    except:
+                        if verbose:
+                            print >> sys.stderr,  "Warning: %s=%s should be a float, id=%s"%(attname,attval,node.id),
+                        try:
+                            attval = string.atoi(attval)
+                            attval = float(attval)
+                            if verbose:
+                                print "(Recasting)"
+                            node.setExternalAttr(attname,attval)
+                        except:
+                            if attname in ['modulo', 'add_offset', 'scale_factor']:
+                                if verbose:
+                                    print "(Removing)"
+                                attdict = node.getExternalDict()
+                                del attdict[attname]
+                            else:
+                                if verbose:
+                                    print ""
+                elif reqtype in (CdShort,CdInt,CdLong) and type(attval)!=types.IntType:
+                    try:
+                        attval = string.atoi(attval)
+                    except:
+                        if verbose:
+                            print >> sys.stderr,  "Warning: %s=%s should be an integer, id=%s"%(attname,attval,node.id),
+                        try:
+                            attval = string.atof(attval)
+                            attval = int(attval)
+                            if verbose:
+                                print "(Recasting)"
+                            node.setExternalAttr(attname,attval)
+                        except:
+                            if verbose:
+                                print ""
+
+def cloneWithLatCheck(axis):
+    """Clone an axis, ensuring that latitudes (in degrees) are in the range [-90:90]"""
+
+    global verbose
+    global notrimlat
+
+    axisvals = origvals = axis[:]
+    if axis.isLatitude() and hasattr(axis,"units") and string.lower(axis.units[0:6])=="degree":
+        if notrimlat==0:
+            axisvals = numpy.maximum(-90.0, numpy.minimum(90.0,axisvals))
+        if not numpy.ma.allclose(axisvals, origvals) and verbose:
+            print >> sys.stderr,  "Warning: resetting latitude values: ",origvals," to: ",axisvals
+
+    b = axis.getBounds()
+    mycopy = cdms2.createAxis(copy.copy(axisvals))
+    mycopy.id = axis.id
+    try:
+        mycopy.setBounds(b)
+    except CDMSError:
+        b = mycopy.genGenericBounds()
+        mycopy.setBounds(b)
+    for k, v in axis.attributes.items():
+       setattr(mycopy, k, v)
+    return mycopy
+
+def addAttrs(fobj, eattrs):
+    """Add extra attributes to file/dataset fobj.
+    eattrs has the form [(varid,attr,value), (varid,attr,value), ...]
+    where if varid is '', set the global attribute."""
+    for evar,eattr,evalue in eattrs:
+        if evar=='':
+            fobj.__dict__[eattr] = evalue
+        else:
+            varobj = fobj[evar]
+            if varobj is not None:
+                varobj.__dict__[eattr] = evalue
+
+def setNodeDict(node, dict):
+    for key in dict.keys():
+        value = dict[key]
+        if (isinstance(value, numpy.integer) or isinstance(value, types.IntType)):
+            datatype = CdLong
+        elif (isinstance(value, numpy.floating) or isinstance(value, types.FloatType)):
+            datatype = CdDouble
+        else:
+            datatype = CdString
+        node.attribute[key]=(value,datatype)
+
+def initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
+                        referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
+                        dirlen, overrideCalendar ):
+    # This function was formerly part of the body of "main".
+        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
+        # where tc0 is the first time index relative to the reference time, tc1 the last,
+        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
+        # of the time dimension of the variable, levid is the id of the level dimension
+        # 
+        # timedict : (path, timeid) => (timearray, timeunits, calendar)
+        #
+        # levdict : (path, levelid) => (levelarray, levelunits, None)
+        #
+    initfilemap = cdms2.dataset.parseFileMap(extendDset.cdms_filemap)
+    dsetdirec = extendDset.directory
+    for namelist, slicelist in initfilemap:
+        for name in namelist:
+            var  = extendDset[name]
+            timeaxis = var.getTime()
+            if timeaxis is not None and not overrideCalendar:
+                calendar = timeaxis.getCalendar()
+            if splitOnTime and timeaxis is not None:
+                if hasattr(timeaxis, 'name_in_file'):
+                    timeid = timeaxis.name_in_file
+                else:
+                    timeid = timeaxis.id
+                if referenceTime is None:
+                    referenceTime = timeaxis.units
+                if timeIsLinear in [None,1]:
+                    timeIsLinear = timeaxis.isLinear()
+                    if timeIsLinear:
+                        if len(timeaxis)>1:
+                            referenceDelta = timeaxis[1]-timeaxis[0]
+                        else:
+                            referenceDelta = 1.0
+                    else:
+                        referenceDelta = None
+            else:
+                timeid = None
+            levelaxis = var.getLevel()
+            if splitOnLevel and levelaxis is not None:
+                if hasattr(levelaxis, 'name_in_file'):
+                    levid = levelaxis.name_in_file
+                else:
+                    levid = levelaxis.id
+            else:
+                levid = None
+
+            varmaplist = []
+            for t0, t1, lev0, lev1, path in slicelist:
+                fullpath = os.path.join(dsetdirec,path)
+                basepath = fullpath[dirlen:]
+                if t0 is not None:
+                    tc0 = timeindex(timeaxis[t0], timeaxis.units, referenceTime, referenceDelta, calendar)
+                    tc1 = timeindex(timeaxis[t1-1], timeaxis.units, referenceTime, referenceDelta, calendar)
+                    if not timedict.has_key((basepath, timeid, calendar)):
+                        values = timeaxis[t0:t1]
+                        timedict[(basepath, timeid)] = (values, timeaxis.units, calendar)
+                else:
+                    tc0 = tc1 = None
+                if lev0 is not None:
+                    lc0 = levelaxis[lev0]
+                    lc1 = levelaxis[lev1-1]
+                    if not levdict.has_key((basepath, levid, None)):
+                        values = levelaxis[lev0:lev1]
+                        levdict[(basepath, levid)] = (values, levelaxis.units, None)
+                else:
+                    lc0 = lc1 = None
+                varmaplist.append((tc0, tc1, lc0, lc1, basepath, timeid, levid, calendar))
+            if filemap.has_key(name):
+                filemap[name].extend(varmaplist)
+            else:
+                filemap[name] = varmaplist
+
+#---------------------------------------------------------------------------------------------
+
+verbose = 1
+
+def main(argv):
+
+    global verbose
+    global notrimlat
+
+    try:
+        args, lastargs = getopt.getopt( \
+            argv[1:], "a:c:d:e:f:hi:jl:m:p:qr:s:t:x:", \
+            ["include=","include-file=","exclude=","exclude-file=","forecast","time-linear=", \
+             "notrim-lat","var-locate=","ignore-open-error" ] )
+    except getopt.error:
+        print >> sys.stderr,  sys.exc_value
+        print >> sys.stderr,  usage
+        sys.exit(0)
+
+    calendar = None
+    calenkey = None
+    timeid = None
+    levelid = None
+    notrimlat = 0
+    referenceTime = None
+    referenceDelta = None
+    readFromFile = 0
+    splitOnTime = 1
+    splitOnLevel = 0
+    datasetid = "none"
+    timeIsLinear = None
+    writeToStdout = 1
+    templatestr = None
+    timeIsVector = None
+    modelMapFile = None
+    aliasMapFile = None
+    overrideCalendar = 0
+    extraAttrs = []
+    extraDict = {}
+    includeList = None
+    excludeList = None
+    overrideTimeLinear = None
+    varLocate = None
+    ignoreOpenError = False
+    excludePattern = None
+    includePattern = None
+    forecast = False
+    for flag, arg in args:
+        if flag=='-a':
+            aliasMapFile = arg
+        elif flag=='-c':
+            calenkey = string.lower(arg)
+            calendar = calendarMap[calenkey]
+            overrideCalendar = 1
+        elif flag=='-d':
+            datasetid = arg
+        elif flag=='-e':
+            matchObj = attrPattern.match(arg)
+            if matchObj is None:
+                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
+            matchGroups = matchObj.groups()
+            if len(matchGroups)!=3:
+                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
+            matchValue = matchGroups[2]
+            if len(matchValue)>0 and (matchValue[0].isdigit() or matchValue[0] in ['"',"'","-","+"]): #"
+                matcheval = eval(matchValue)
+            else:
+                matcheval = str(matchValue)
+            extraAttrs.append((matchGroups[0], matchGroups[1], matcheval))
+        elif flag=='--exclude':
+            if arg[0]=='-':
+                raise RuntimeError, "--exclude option requires an argument"
+            excludeList = string.split(arg,',')
+        elif flag=='--exclude-file':
+            excludePattern = arg
+        elif flag=='-f':
+            readFromFile = 1
+            filelistpath = arg
+        elif flag=='--forecast':  # experimental forecast mode
+            forecast = True
+            splitOnTime = 0
+            splitOnLevel = 0
+        elif flag=='-h':
+            print usage
+            sys.exit(0)
+        elif flag=='-i':
+            splitOnTime = 1
+            referenceDelta = string.atof(arg)
+            timeIsLinear = 1
+            timeIsVector = None
+        elif flag=='--ignore-open-error':
+            ignoreOpenError = True
+        elif flag=='--include':
+            if arg[0]=='-':
+                raise RuntimeError, "--include option requires an argument"
+            includeList = string.split(arg,',')
+        elif flag=='--include-file':
+            includePattern = arg
+        elif flag=='-j':
+            timeIsVector = 1
+            timeIsLinear = None
+        elif flag=='-l':
+            splitOnLevel = 1
+            levelstr = string.split(arg,',')
+            levellist = map(string.atof, levelstr)
+            levels = numpy.array(levellist)
+            levels = numpy.sort(levels)
+        elif flag=='-m':
+            levelid = arg
+            args.append(('-e','%s.axis=Z'%levelid)) # Add axis=Z attribute
+        elif flag=='--notrim-lat':
+            notrimlat = 1
+        elif flag=='-p':
+            templatestr = arg
+        elif flag=='-q':
+            verbose = 0
+        elif flag=='-r':
+            splitOnTime = 1
+            referenceTime = arg
+        elif flag=='-s':
+            modelMapFile = arg
+        elif flag=='-t':
+            splitOnTime = 1
+            timeid = arg
+            args.append(('-e','%s.axis=T'%timeid)) # Add axis=T attribute
+        elif flag=='--time-linear':
+            targlist = string.split(arg,',')
+            ttzero = string.atof(targlist[0])
+            tdelta = string.atof(targlist[1])
+            tunits = string.strip(targlist[2])
+            if len(targlist)==4:
+                tcalendar = string.strip(targlist[3])
+            else:
+                tcalendar = None
+            overrideTimeLinear = [ttzero,tdelta,tunits,tcalendar]
+        elif flag=='--var-locate':
+            if varLocate is None:
+                varLocate = {}
+            vname, pattern = string.split(arg,',')
+            varLocate[vname]=pattern
+        elif flag=='-x':
+            writeToStdout = 0
+            xmlpath = arg
+
+    # If overriding time, process time as vector so that no gaps result
+    if overrideTimeLinear is not None:
+        timeIsVector = 1
+        timeIsLinear = None
+        if overrideCalendar==1:
+            overrideTimeLinear[3]=calenkey
+
+    if verbose:
+        print 'Finding common directory ...'
+    if readFromFile:
+        f = open(filelistpath)
+        lastargs = f.readlines()
+        f.close()
+
+    # Ignore blank paths
+    realargs = []
+    for arg in lastargs:
+        sarg = string.strip(arg)
+        if len(sarg)>0:
+            realargs.append(sarg)
+    lastargs = realargs
+
+    # Split lastargs into files and datasets
+    fileargs = []
+    dsetargs = []
+    for arg in lastargs:
+        base, suffix = os.path.splitext(arg)
+        if string.lower(suffix) in ['.xml','.cdml']:
+            dsetargs.append(arg)
+        else:
+            fileargs.append(arg)
+
+    # Generate a list of pathnames for datasets
+    dsetfiles = []
+    for path in dsetargs:
+        dset = cdms2.open(path)
+        if not hasattr(dset, 'cdms_filemap'):
+            raise RuntimeError,'Dataset must have a cdms_filemap attribute: '+path
+        if not hasattr(dset, 'directory'):
+            raise RuntimeError,'Dataset must have a directory attribute: '+path
+        dsetdirec = dset.directory
+        initfilemap = cdms2.dataset.parseFileMap(dset.cdms_filemap)
+        for namelist, slicelist in initfilemap:
+            for t0, t1, lev0, lev1, path in slicelist:
+                dsetfiles.append(os.path.join(dsetdirec, path))
+    augmentedArgs = fileargs + dsetfiles
+
+    # Find the common directory
+    directory = os.path.commonprefix(augmentedArgs)
+    firstpath = augmentedArgs[0][len(directory):]
+    if not os.path.isfile(os.path.join(directory,firstpath)):
+        dnew = os.path.dirname(directory)
+        if len(dnew)>0 and directory[len(dnew)]=='/':
+            directory = dnew+'/'
+        else:
+            directory = dnew
+    if verbose:
+        print 'Common directory:',directory
+
+    dirlen = len(directory)
+
+    if templatestr is not None:
+        if os.path.isabs(templatestr):
+            templatestr = templatestr[dirlen:]
+        templatere, ignore = cdms2.cdmsobj.templateToRegex(templatestr)
+        template = re.compile(templatere+'$')
+    else:
+        template = None
+
+    axisdict = {}
+    vardict = {}
+    filemap = {}
+    timedict = {}
+    levdict = {}
+    fcdict = {}
+    global_attrs = None
+    fctau0 = None
+
+    if modelMapFile is not None:
+        mfile = open(modelMapFile)
+        modelMap = {}
+        modelDirs = []
+        for line in mfile.readlines():
+            mdirec, model = string.split(line)
+            modelMap[mdirec] = model
+            modelDirs.append(mdirec)
+        mfile.close()
+
+    if aliasMapFile is not None:
+        afile = open(aliasMapFile)
+        aliasMap = {}
+        for line in afile.readlines():
+            if line[0] not in ["'",'"']: #"
+                varid, alias = string.split(line)
+            else:
+                dummy, varid, alias = string.split(line,line[0])
+                alias = string.strip(alias)
+            aliasMap[varid] = alias
+        afile.close()
+
+    # Save extra attribute information for new axes
+    for evar, eattr, evalue in extraAttrs:
+        if evar=='':
+            continue
+        if extraDict.has_key(evar):
+            curval = extraDict[evar]
+            curval.append((eattr,evalue))
+        else:
+            extraDict[evar] = [(eattr,evalue)]
+
+    #---------------------------------------------------------------------------------------------
+    # Initialize dictionaries if adding to an existing dataset
+    if verbose and len(dsetargs)>0:
+        print 'Scanning datasets ...'
+    for extendPath in dsetargs:
+        if verbose:
+            print extendPath
+        extendDset = cdms2.open(extendPath)
+
+        # Add/modify attributes
+        addAttrs(extendDset, extraAttrs)
+
+        # Copy the global attribute dictionary if necessary. Note that copy.copy
+        # can't be used here, since .attributes is now a 'fake' dictionary.
+        if global_attrs is None:
+            global_attrs = copyDict(extendDset.attributes)
+
+        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
+        # where tc0 is the first time index relative to the reference time, tc1 the last,
+        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
+        # of the time dimension of the variable, levid is the id of the level dimension
+        # 
+        # timedict : (path, timeid) => (timearray, timeunits, calendar)
+        #
+        # levdict : (path, levelid) => (levelarray, levelunits, None)
+        #
+        initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
+                            referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
+                            dirlen, overrideCalendar )
+
+        # axisdict : id => transient_axis
+        #   for non-partitioned axes only
+        #
+        tempmap = {}
+        for axis in extendDset.axes.values():
+            if not ( (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
+                     (splitOnLevel and (axis.isLevel() or axis.id==levelid)) ):
+                axis = cloneWithLatCheck(axis)
+                if axisdict.has_key(axis.id):
+                    currentaxis = axisdict[axis.id]
+
+                    # Check that the axis has the same length and values as the saved value. If not,
+                    # create an unambiguous name in the axis dictionary.
+                    if compareaxes(axis, currentaxis):
+                        sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
+                        axis.name_in_file = axis.id
+                        oldid = axis.id
+                        axis.id = sepname
+                        axisdict[sepname] = axis
+                        tempmap[oldid] = sepname
+                else:
+                    axisdict[axis.id] = axis
+
+        # vardict : varid => [domain, attributeDict, typecode]
+        #   where domain = [axis_or_id, axis_or_id,...]
+        #   and axis_or_id is the id of a partitioned dimension, or
+        #   the transient axis object associated with a non-partitioned dimension
+        #
+        for var in extendDset.variables.values():
+            tempdomain = []
+            for id in var.getAxisIds():
+                if tempmap.has_key(id):
+                    id = tempmap[id]
+                if axisdict.has_key(id):
+                    tempdomain.append(axisdict[id])
+                else:
+                    axis = extendDset[id]
+                    if hasattr(axis,'name_in_file'):
+                        id = axis.name_in_file
+                    tempdomain.append(id)
+            varattrs = copyDict(var.attributes)
+            vardict[var.id] = [tempdomain, varattrs, var.typecode()]
+
+        extendDset.close()
+
+        # end of loop "for extendPath in dsetargs"
+
+    #---------------------------------------------------------------------------------------------
+    if verbose:
+        print 'Scanning files ...'
+
+    boundsmap = {}                      # boundsmap : varid => timebounds_id
+    boundsdict = {}                     # Same as vardict for time bounds
+    for path in fileargs:
+        path = string.strip(path)
+
+        # Check if the path is included
+        if includePattern is not None:
+            base = os.path.basename(path)
+            mobj = re.match(includePattern, base)
+            if mobj is None:
+                continue
+
+        # Check if the path is excluded
+        if excludePattern is not None:
+            base = os.path.basename(path)
+            mobj = re.match(excludePattern, base)
+            if mobj is not None:
+                continue
+
+        if verbose:
+            print path
+        try:
+            f = cdms2.open(path)
+        except:
+            if not ignoreOpenError:
+                raise RuntimeError,'Error opening file '+path
+            else:
+                print >> sys.stderr,  'Warning: cannot open file, skipping: %s'%path
+                continue
+
+        # Add/modify attributes
+        addAttrs(f, extraAttrs)
+
+        # Determine the variable ID suffix, if any
+        varsuffix = None
+        if modelMapFile is not None:
+            for direc in modelDirs:
+                mo = re.match(direc, path)
+                if mo is not None:
+                    suffixPattern = modelMap[direc]
+                    def gensuffix(m, mo=mo):
+                        i = string.atoi(m.group(1))
+                        return mo.group(i)
+                    varsuffix = re.sub(r'\\g<(\d)>', gensuffix, suffixPattern)
+                    break
+
+        # Copy the global attribute dictionary if necessary. Note that copy.copy
+        # can't be used here, since .attributes is now a 'fake' dictionary.
+        if global_attrs is None:
+            global_attrs = copyDict(f.attributes)
+
+        basepath = path[dirlen:]
+        if template is not None and template.match(basepath) is None:
+            if verbose:
+                print >> sys.stderr,  'Warning: path %s does not match template %s'%(basepath, templatestr)
+
+        # Find time boundary variables
+        boundsids = []
+        if splitOnTime:
+            tmpdict = {}
+            for axisname in f.axes.keys():
+                axis = f[axisname]
+                #was if axis.isTime() and hasattr(axis, 'bounds'):
+                if axis.isTime() and (axis.getBounds() is not None):
+                    tmpdict[axis.bounds] = 1
+            boundsids = tmpdict.keys()
+
+        # For forecasts, get the time at which the forecast begins (tau=0) which
+        # is nbdate,nbsec
+        if forecast:
+            nbdate = numpy.int( f('nbdate') )  # f('nbdate') is numpy.int32 which gets truncated
+            nbsec = f('nbsec')
+            fctau0 = nbdate*100000 + nbsec  # hopefully nbsec<(seconds per day)=86400<100000
+            fctau0time = cdtime.abstime( nbdate,"day as %Y%m%d" )
+            fctau0time = fctau0time.add( nbsec, cdtime.Seconds )  # fctau0 as type comptime
+            fc_time_attrs = []
+
+        varnames = f.variables.keys()
+
+        # Try to force all axes to be included, but only small ones, length<100.
+        # This section was motivated by a need to preserve the cloud axes isccp_prs,isccp_tau.
+        # If we ever need to preserve longer axes as well, we could create one variable per axis...
+        crude_var_axes = [ [ ax[0] for ax in var.getDomain() ] for var in f.variables.values() ]
+        var_axes = set().union( *crude_var_axes )
+        other_axes = list( set(f.axes.values()) - var_axes )
+        if len(other_axes)>0:
+            other_axes = [ax for ax in other_axes if len(ax)<100]
+            other_axes.sort( key=(lambda ax:ax.id) )
+            axisvar = cdms2.createVariable( numpy.ones([len(ax) for ax in other_axes]),
+                                            axes=other_axes, id='allaxesdummy')
+            axisvar.autoApiInfo = None    # all CdmsObj objects have this attribute, but for unknown
+            # reasons datasetnode.dump() fails trying to dump this attribute's default value (jfp)
+            varnames.append( axisvar.id )
+        # ...try to force all axes to be considered
+
+        varnames.sort()
+        for varname in varnames:
+
+            # If --var-locate is specified for the variable, match the basename before processing
+            if varLocate is not None and varLocate.has_key(varname):
+                varpattern = varLocate[varname]
+                base = os.path.basename(path)
+                mobj = re.match(varpattern, base)
+                if mobj is None:
+                    continue
+
+            # was var = f.variables[varname]
+            if varname=='allaxesdummy':
+                var = axisvar
+            else:
+                var = f.variables[varname]
+
+            # Reset the variable ID to any specified alias
+            if aliasMapFile is not None:
+                varalias = aliasMap.get(var.id)
+                if varalias is not None:
+                    var.name_in_file = var.id
+                    var.id = varalias
+                    varname = varalias
+
+            # Append a suffix to the variable ID, if applicable
+            if varsuffix is not None:
+                if not hasattr(var, 'name_in_file'):
+                    var.name_in_file = var.id
+                var.id += varsuffix
+                varname += varsuffix
+
+            varentry = [None]*9         # [timestart, timeend, levstart, levend, path, timeid, levid, calendar, fctau0]
+            varentry[4] = basepath
+            varentry[8] = fctau0
+
+            # Generate a temporary domain entry, and
+            # create axis dictionary entries.
+            domain = var.getDomain()
+            if forecast:
+                tempdomain = ['fctau0']
+            else:
+                tempdomain = []         # List of axis names and/or objects (if not partitioned)
+            for axis, start, length, truelen in domain:
+                if (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
+                   (splitOnLevel and (axis.isLevel() or axis.id==levelid)):
+                    tempdomain.append(axis.id)
+                elif forecast and  (axis.isTime() or axis.id==timeid):
+                    # time axis isn't split but needs special treatment for forecasts
+                    tempdomain.append(axis.id)
+                    fc_time_attrs.append(axis.attributes)
+                else:
+                    axis = cloneWithLatCheck(axis) # Transient copy
+                    if axisdict.has_key(axis.id):
+                        currentaxis = axisdict[axis.id]
+
+                        # Check that the axis has the same length and values as the saved value. If not,
+                        # create an unambiguous name in the axis dictionary.
+                        if compareaxes(axis, currentaxis):
+                            sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
+                            axis.name_in_file = axis.id
+                            axis.id = sepname
+
+                            # Fix boundary variable names if using suffixes.
+                            if varsuffix is not None and hasattr(axis, 'bounds'):
+                                axis.bounds += varsuffix
+                            axisdict[sepname] = axis
+                        else:
+                            axis = currentaxis
+                    else:
+                        # Fix boundary variable names if using suffixes.
+                        if varsuffix is not None and hasattr(axis, 'bounds'):
+                            axis.bounds += varsuffix
+                        axisdict[axis.id] = axis
+                    tempdomain.append(axis)
+
+            # Create a dictionary entry for the variable if not already there.
+            if var.id in boundsids:
+                boundsattrs = copyDict(var.attributes)
+                boundsdict[var.id] = [tempdomain, boundsattrs, var.typecode()]
+                continue                # Don't set a filemap entry until axes are sorted out
+            elif not vardict.has_key(var.id):
+                varattrs = copyDict(var.attributes)
+                if varsuffix is not None or aliasMapFile is not None:
+                    varattrs['name_in_file'] = var.name_in_file
+                vardict[var.id] = [tempdomain, varattrs, var.typecode()]
+            else:
+                currentdomain, attrs, tcode = vardict[var.id]
+                if comparedomains(currentdomain, tempdomain):
+                    sepname = disambig(var.id, vardict, var.size(), compareVarDictValues, (tempdomain, None))
+                    saveid = var.id
+                    varname  = var.id = sepname
+                    varattrs = copyDict(var.attributes)
+                    var.name_in_file = varattrs['name_in_file']  = saveid
+                    vardict[sepname] = [tempdomain, varattrs, var.typecode()]
+
+            # Create a filemap entry for this variable/file, if split on time or forecast
+            axisids = map(lambda x: x[0].id, var.getDomain())
+            if splitOnTime or forecast:
+                vartime = None
+                if timeid is not None:
+                    if timeid in axisids:
+                        vartime = f.axes.get(timeid)
+                    else:
+                        if verbose:
+                            print >> sys.stderr,  'Warning, time axis %s not found, -t option ignored'%timeid
+                if vartime is None:
+                    vartime = var.getTime()
+                if vartime is not None:
+                    if not overrideCalendar:
+                        calendar = vartime.getCalendar()
+                    if referenceTime is None:
+                        referenceTime = vartime.units
+                    if verbose and not forecast:
+                        print 'Setting reference time units to', referenceTime
+                    if timeIsLinear is None and timeIsVector is None:
+                        timeIsLinear = (string.lower(string.split(referenceTime)[0]) in ['hour','hours','minute','minutes','second','seconds'])
+                        if timeIsLinear and verbose:
+                            print 'Setting time representation to "linear"' #'
+                    if timeIsLinear and referenceDelta is None:
+                        if len(vartime)>1:
+                            time1 = timeindex(vartime[1], vartime.units, referenceTime, None, calendar)
+                            time0 = timeindex(vartime[0], vartime.units, referenceTime, None, calendar)
+                            referenceDelta = time1 - time0
+                        else:
+                            referenceDelta = 1
+                        if verbose:
+                            print 'Setting time delta to', referenceDelta
+
+#                    starttime = vartime[0]
+#                    endtime = vartime[-1]
+                    startindex = timeindex(vartime[0], vartime.units, referenceTime, referenceDelta, calendar)
+                    endindex = timeindex(vartime[-1], vartime.units, referenceTime, referenceDelta, calendar)
+                    if forecast:
+                        # split on forecast, hence no split on time 
+                        varentry[0] = None
+                        varentry[1] = None
+                        referenceTime = None
+                    else:
+                        varentry[0] = startindex
+                        varentry[1] = endindex
+                    varentry[5] = vartime.id
+                    varentry[7] = calendar
+
+                    if not timedict.has_key((basepath,vartime.id)):
+                        values = vartime[:]
+                        timedict[(basepath,vartime.id)] = (values, vartime.units, calendar)
+
+            if splitOnLevel:
+                varlev = None
+                if (levelid is not None) and (levelid in axisids):
+                    varlev = f.axes.get(levelid)
+                if varlev is None:
+                    varlev = var.getLevel()
+                if varlev is not None:
+                    startlev = varlev[0]
+                    if type(startlev) is numpy.ndarray:
+                        startlev = startlev[0]
+                    endlev = varlev[-1]
+                    if type(endlev) is numpy.ndarray:
+                        endlev = endlev[0]
+                    varentry[2] = startlev
+                    varentry[3] = endlev
+                    varentry[6] = varlev.id
+
+                    if not levdict.has_key((basepath, varlev.id, None)):
+                        values = varlev[:]
+                        levdict[(basepath,varlev.id)] = (values, varlev.units, None)
+
+            if forecast:
+                if not fcdict.has_key((basepath, 'fctau0')):
+                    fcdict[(basepath, 'fctau0')] = ( [fctau0], None, None )
+
+            if filemap.has_key(varname):
+                filemap[varname].append(tuple(varentry))
+            else:
+                filemap[varname] = [tuple(varentry)]
+
+            # Set boundsmap : varid => timebounds_id
+            #was if splitOnTime and vartime is not None and hasattr(vartime, "bounds") and not boundsmap.has_key(varname):
+            if splitOnTime and vartime is not None and (vartime.getBounds() is not None) and\
+                    not boundsmap.has_key(varname):
+                boundsmap[varname] = vartime.bounds
+
+            # End of loop "for varname in varnames"
+
+        f.close()
+        # End of loop "for path in fileargs"
+
+    #---------------------------------------------------------------------------------------------
+
+    # Generate varindex, by combining variable names with
+    # identical varentry values.
+    varindex = []
+    varnames = filemap.keys()
+    varnames.sort()
+    for varname in varnames:
+        varentry = filemap[varname]
+        varentry.sort()
+
+        for varindexname, varindexvalue in varindex:
+            if varentry == varindexvalue:
+                varindexname.append(varname)
+                break
+        else:
+            varindex.append(([varname],varentry))
+
+    # If a variable is not a function of one of the partitioned dimensions,
+    # no indexing is necessary: just read from the first file containing it.
+    for varlist, slicelist in varindex:
+        slice0 = slicelist[0]
+        a,b,c,d,path0,timename,levname,calen,fctau0 = slice0
+        if (a,b,c,d,fctau0)==(None,None,None,None,None):
+            del slicelist[1:]
+
+    # Change times to constant units
+    sameCalendars = 1                   # True iff all time calendars are the same
+    prevcal = None
+    if forecast:
+        # The data files' time axis is interpreted to be tau time, i.e. the forecast_period.
+        # Find the axis, and remember it in timedict.
+        for key in timedict.keys():
+            values, units, calendar = timedict[key]
+            if prevcal is not None and calendar != prevcal:
+                sameCalendars = 0
+            prevcal = calendar
+            if string.find(units," as ")==-1:
+                time0 = cdtime.reltime(values[0],units)
+            else:
+                time0 = cdtime.abstime(values[0],units)
+            offset = time0.torel( units, calendar ).value  # normally will be 0
+            values = values+offset-values[0]
+            # Switch units from "normal" time such as "days since 2001-06-01"
+            # to "basic" time such as "days", which makes sense for a forecast_period.
+            baslen = time0.units.find(' since ')
+            basic_units = time0.units[0:baslen]  # e.g. 'days'
+            fc_units = basic_units
+            timedict[key] = (values, fc_units, calendar)
+    else:       # splitOnTime is true
+        for key in timedict.keys():
+            values, units, calendar = timedict[key]
+            if prevcal is not None and calendar != prevcal:
+                sameCalendars = 0
+            prevcal = calendar
+            if string.find(units," as ")==-1:
+                time0 = cdtime.reltime(values[0],units)
+            else:
+                time0 = cdtime.abstime(values[0],units)
+            offset = time0.torel(referenceTime, calendar).value
+            values = values+offset-values[0]
+            timedict[key] = (values, referenceTime, calendar)
+
+    if sameCalendars and prevcal is not None:
+        calenkey = reverseCalendarMap[calendar]
+        
+    if forecast:
+        # For forecasts, make sure that the above has made all timedict values the same.
+        # >>> It's conceivable that different forecasts will have different time (really, tau)
+        # >>> axes.  If so, at this point we'll want to merge and mask all the time values, so
+        # >>> that all variables can have the same time axis..  For now, just raise an error
+        # >>> if there are time axis differences at this point.
+        values0,units0,calendar0 = timedict[ timedict.keys()[0] ]
+        timedict_same = all( [ ((values0==values).all() and units0==units and calendar0==calendar) \
+                               for (values,units,calendar) in timedict.values() ] )
+        if not timedict_same:
+            raise CDMSError, 'cdscan is confused about times for a forecast set'
+        # Earlier we had saved all the time axis attributes.  Keep whatever they have in common.
+        fc_time_attr = fc_time_attrs[0]
+        for fcta in fc_time_attrs:             # go through all time attributes (each a dictionary)
+            for attrn in fc_time_attr.keys():
+                if not fcta.has_key(attrn):
+                    del fc_time_attr[attrn]    # key attrn isn't in all time attributes
+                elif fcta[attrn]!=fc_time_attr[attrn]:
+                    del fc_time_attr[attrn]    # not all time attributes have the same value for attrn
+        # At this point fc_time_attr is the dictionary of those time attributes which are common to
+        # all time axes encountered (in the context of a forecast dataset).
+        # Finally, add the appropriate standard_name to it, if we haven't already gotten one from
+        # the data file.  If the file has anything other than 'forecast_period', it's wrong, but
+        # we'll stick with it anyway.
+        if not 'standard_name' in fc_time_attr.keys():
+            fc_time_attr['standard_name'] = 'forecast_period'
+        
+    # Create partitioned axes
+    axes = []
+    masterCoordToInd = {}               # varkey => (timeCoordToInd, levCoordToInd)
+    errorOccurred = 0
+    for varlist, varentry in varindex:
+
+        # Project time, level indices
+        timeproj = {}
+        levproj = {}
+        fctproj = {}
+        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varentry:
+            if timename is not None:
+                timeproj[(time0, time1)] = (path, timename)
+            if levname is not None:
+                try:
+                    levproj[(lev0, lev1)] = (path, levname)
+                except:
+                    print >> sys.stderr,  'Cannot hash level %s range (%f,%f)'%(levname,lev0,lev1)
+                    print >> sys.stderr,  type(lev0)
+                    raise
+            if fctau0 is not None:
+                fctproj[(fctau0,fctau0)] = (path, 'fctau0')
+
+        # and combine the projected indices into axes
+        timeCoordToInd = None
+        timelinCoordToInd = None
+        if splitOnTime and timename is not None:
+            fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
+                      combineKeys(timeproj, timedict, timeIsLinear, referenceDelta)
+            axes.append( ( varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
+                           timelinCoordToInd, calendar ) )
+            if errflag: errorOccurred = 1
+        levCoordToInd = None
+        if splitOnLevel and levname is not None:
+            fullaxis, name, partition, levCoordToInd, units, opartition, levlinCoordToInd, errflag = \
+                      combineKeys(levproj, levdict)
+            axes.append((varlist,fullaxis,name,partition,levCoordToInd,units,opartition,levlinCoordToInd, None))
+            if errflag: errorOccurred = 1
+        fcCoordToInd = None
+        if forecast:
+            fullaxis, name, partition, fcCoordToInd, units, opartition, fclinCoordToInd, errflag = \
+                      combineKeys(fctproj, fcdict, forecast=forecast)
+            axes.append((varlist,fullaxis,name,partition,fcCoordToInd,units,opartition,fclinCoordToInd, None))
+            if errflag: errorOccurred = 1
+            if len(timeproj)>0:     # i.e., if time is in this variable's domain.
+                # The useKeys call is like combineKeys, except that it's for a variable not partitioned
+                # among files.  It just sets up axis data and (in the context of this variable loop)
+                # propagates what's in timedict to every variable with time in its domain.
+                fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
+                          useKeys(timeproj, timedict, timeIsLinear, referenceDelta)
+                axes.append( (varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
+                              timelinCoordToInd, calendar) )
+                if errflag: errorOccurred = 1
+            
+
+        masterCoordToInd[varlist[0]] = (timeCoordToInd, levCoordToInd, timelinCoordToInd, fcCoordToInd)
+
+    if errorOccurred:
+        raise RuntimeError, 'Error(s) determining axis values - see previous message(s)'
+    
+    # Eliminate duplicate axes
+    axes2 = []
+    for vlist1, axis1, name1, partition1, coordToInd1, units1, opartition1, linCoordToInd1, calen1 in axes:
+        for vlist2, axis2, name2, partition2, coordToInd2, units2, opartition2, linCoordToInd2, calen2 in axes2:
+            if len(axis1)==len(axis2) and name1==name2 and partition1==partition2 and units1==units2 and \
+                   numpy.ma.allclose(axis1,axis2)==1 and calen1==calen2:
+                vlist2.extend(vlist1)
+                break
+        else:
+            axes2.append((copy.copy(vlist1),axis1, name1, partition1, coordToInd1, units1, opartition1, \
+                          linCoordToInd1, calen1))
+
+    # For each axis described by axis2, disambiguate its name, create the axis object, etc.
+    assignedBounds = {}
+    for vlist, axis, name, partition, coordToInd, units, opartition, linCoordToInd, calendar in axes2:
+        # print vlist, coordToInd
+        uniqname = disambig(name, axisdict, len(axis), compareaxes, axis)
+        axisobj = cdms2.createAxis(axis)
+        axisobj.name_in_file = name
+        axisobj.id = uniqname
+        axisobj.units = units
+        if forecast and axisobj.isTime():   # For forecasts, give the time axis some saved attributes.
+            for attr in fc_time_attr.keys():
+                if not hasattr(axisobj,attr):
+                    setattr(axisobj,attr,fc_time_attr[attr])
+        if timeIsLinear and axisobj.isTime():
+            axisobj.partition = numpy.ma.ravel(numpy.ma.array(opartition))
+            axisobj.length = axisobj.partition[-1]-axisobj.partition[0]
+            mopartition = numpy.array(opartition)
+            partition_length = numpy.sum(mopartition[:,1]-mopartition[:,0])
+            if partition_length<axisobj.length:
+                axisobj.partition_length = partition_length
+        elif partition is not None:
+            axisobj.partition = numpy.ma.ravel(numpy.ma.array(partition))
+        if axisobj.isTime():
+            axisobj.calendar = reverseCalendarMap[calendar]
+        # axisobj.reference_partition = str(numpy.ma.ravel(numpy.ma.array(opartition)))
+        if not axisdict.has_key(uniqname):
+            axisdict[uniqname] = axisobj
+        for varname in vlist:
+            domain, attributes, tcode = vardict[varname]
+            for i in range(len(domain)):
+                item = domain[i]
+                if type(item)==types.StringType and item==name:
+                    domain[i] = axisobj
+
+        # Add bounds variables to vardict, varindex
+        if axisobj.isTime():
+            reprVar = vlist[0]              # 'Representative' variable having this time axis
+            if boundsmap.has_key(reprVar):
+                boundsname = boundsmap[reprVar]
+                boundsinfo = boundsdict[boundsname]
+                boundsattrs = boundsinfo[1]
+                if uniqname!=name:
+                    boundsattrs['name_in_file'] = boundsname
+                    boundsname = uniqname+'_bnds'
+                if not assignedBounds.has_key(boundsname):
+                    axisobj.bounds = boundsname
+                    for varids, ranges in varindex:
+                        if reprVar in varids:
+                            varids.append(boundsname)
+                    tmpdom = boundsinfo[0]
+                    if type(tmpdom[1])==types.StringType:
+                        bndsobj = tmpdom[0]
+                        boundsdomain = (bndsobj, axisobj)
+                    else:
+                        bndsobj = tmpdom[1]
+                        boundsdomain = (axisobj, bndsobj)
+                    vardict[boundsname] = (boundsdomain, boundsinfo[1], boundsinfo[2])
+                    assignedBounds[boundsname] = 1
+
+    # Collapse like indices in filemap. For example, transform
+    # [x,[[0,10,-,-,file1], [0,10,-,-,file2]]] into
+    # [x,[[0,10,-,-,file1]]]
+    # This occurs for variables such as time boundaries, which are
+    # often duplicated in different files.
+    cdms_filemap_list = []
+    duplicatevars = {}
+    for varindexname, varindexvalue in varindex:
+        timeCoordToInd, levCoordToInd, linCoordToInd, fcCoordToInd = masterCoordToInd[varindexname[0]]
+        newslicedict = {}
+        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varindexvalue:
+            if timeCoordToInd is not None:
+                if timeIsLinear:
+                    i0, i1 = linCoordToInd[(time0, time1)]
+                else:
+                    i0, i1 = timeCoordToInd[(time0, time1)]
+            else:
+                i0 = i1 = None
+            if levCoordToInd is not None:
+                j0, j1 = levCoordToInd[(lev0, lev1)]
+            else:
+                j0 = j1 = None
+            if newslicedict.has_key((i0,i1,j0,j1,fctau0)):
+                currentpath = newslicedict[(i0,i1,j0,j1,fctau0)]
+                if not duplicatevars.has_key(tuple(varindexname)):
+                    duplicatevars[tuple(varindexname)] = (currentpath, path)
+            else:
+                newslicedict[(i0,i1,j0,j1,fctau0)] = path
+        keys = newslicedict.keys()
+        keys.sort()
+        newslicelist = []
+        for i0,i1,j0,j1,fctau0 in keys:
+            path = newslicedict[(i0,i1,j0,j1,fctau0)]
+            newslicelist.append([i0, i1, j0, j1, fctau0, path])
+        cdms_filemap_list.append([varindexname, newslicelist])
+
+    # Check if any duplicated variables are a function of longitude or latitude.
+    # Raise an exception if so.
+    illegalvars = []
+    for varlist in duplicatevars.keys():
+        for varname in varlist:
+            if (excludeList is not None) and (varname in excludeList):
+                continue
+            dom, attrs, tcode = vardict[varname]
+            for axisobj in dom:
+                if axisobj.isLatitude() or axisobj.isLongitude():
+                    path1, path2 = duplicatevars[varlist]
+                    illegalvars.append((varname, path1, path2))
+    if len(illegalvars)>0:
+        raise RuntimeError, "Variable '%s' is duplicated, and is a function of lat or lon: files %s, %s"%illegalvars[0]
+        
+    if verbose and len(duplicatevars.values())>0:
+        print >> sys.stderr,  'Duplicate variables:'
+        for varlist in duplicatevars.keys():
+            path1, path2 = duplicatevars[varlist]
+            print >> sys.stderr,  '\t',varlist,'\t',path1,'\t',path2
+
+    # Generate the cdms_filemap attribute
+    cdms_filemap = str(cdms_filemap_list)
+    cdms_filemap = string.replace(cdms_filemap, ' ', '')
+    cdms_filemap = string.replace(cdms_filemap, 'None', '-')
+    cdms_filemap = string.replace(cdms_filemap, '"', '') #"
+    cdms_filemap = string.replace(cdms_filemap, "'", '')
+
+    # Dump to XML
+    datasetnode = cdmsNode.DatasetNode(datasetid)
+    global_attrs['cdms_filemap'] = cdms_filemap
+    global_attrs['directory'] = directory
+    if sameCalendars and calenkey is not None:
+        global_attrs['calendar'] = calenkey
+    elif global_attrs.has_key('calendar'):
+        del global_attrs['calendar']
+    cleanupAttrs(global_attrs)
+    # datasetnode.setExternalDict(global_attrs)
+    setNodeDict(datasetnode, global_attrs)
+    validateAttrs(datasetnode)
+
+    timeWasOverridden = 0
+    keys = axisdict.keys()
+    keys.sort()
+    for key in keys:
+        axis = axisdict[key]
+        tcode = axis.typecode()
+        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
+            tcode = numpy.sctype2char(tcode)
+        cdtype = cdmsNode.NumericToCdType[tcode]
+        node = cdmsNode.AxisNode(axis.id, len(axis), cdtype)
+
+        # Override the time axis as a linear axis
+        if axis.isTime() and (overrideTimeLinear is not None):
+            ttzero = overrideTimeLinear[0]
+            ttdelta = overrideTimeLinear[1]
+            axis.units = overrideTimeLinear[2]
+            if overrideTimeLinear[3] is None:
+                axis.calendar = reverseCalendarMap[axis.getCalendar()]
+            else:
+                axis.calendar = overrideTimeLinear[3]
+            linearnode = cdmsNode.LinearDataNode(ttzero, ttdelta, len(axis))
+            node.setLinearData(linearnode)
+            if verbose:
+                if timeWasOverridden==0:
+                    print "Overriding values for axis '%s'"%axis.id
+                else:
+                    print >> sys.stderr,  'Warning, overriding more than one time axis (%s)'%axis.id
+            timeWasOverridden = 1
+
+        # Represent time as linear axis using time values in the file
+        elif axis.isTime() and timeIsLinear:
+            reference_length = axis.partition[-1]-axis.partition[0]
+            linearnode = cdmsNode.LinearDataNode(axis[0], referenceDelta, reference_length)
+            node.setLinearData(linearnode)
+        else:
+            try:
+                node.setData(axis[:])
+            except cdms2.cdmsNode.NotMonotonicError:
+                if verbose:
+                    print >> sys.stderr,  'Warning: Axis values for axis %s are not monotonic:'%axis.id,axis[:]
+                    print >> sys.stderr,  'Warning: Resetting axis %s values to:'%axis.id, numpy.arange(len(axis))
+                node.setData(numpy.arange(len(axis)))
+        axisattrs = copyDict(axis.attributes)
+
+        # Make sure that new axes have attribute mods
+        if extraDict.has_key(key):
+            for eattr, evalue in extraDict[key]:
+                axisattrs[eattr] = evalue
+        cleanupAttrs(axisattrs)
+        # node.setExternalDict(axisattrs)
+        setNodeDict(node, axisattrs)
+        validateAttrs(node)
+        datasetnode.addId(axis.id, node)
+
+    keys = vardict.keys()
+    keys.sort()
+    for key in keys:
+        if (includeList is not None) and (key not in includeList):
+            continue
+        if (excludeList is not None) and (key in excludeList):
+            continue
+        domain, attrs, tcode = vardict[key]
+        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
+            tcode = numpy.sctype2char(tcode)
+        domainNode = cdmsNode.DomainNode()
+        cdtype = cdmsNode.NumericToCdType[tcode]
+        node = cdmsNode.VariableNode(key, cdtype, domainNode)
+        cleanupAttrs(attrs)
+        # node.setExternalDict(attrs)
+        setNodeDict(node, attrs)
+        validateAttrs(node)
+        for axis in domain:
+            if hasattr(axis,'length'):
+                length = axis.length
+            else:
+                length = len(axis)
+            try:
+                elemnode = cdmsNode.DomElemNode(axis.id, 0, length)
+            except AttributeError:
+                print >> sys.stderr,  'Axis %s for variable %s does not have attribute "id"'%(`axis`, key)
+            if hasattr(axis, 'partition_length'):
+                elemnode.setExternalAttr('partition_length',axis.partition_length)
+            domainNode.add(elemnode)
+        datasetnode.addId(key, node)
+
+    # Add the Conventions attribute if not present
+    conventions = datasetnode.getExternalAttr('Conventions')
+    if conventions is None: datasetnode.setExternalAttr('Conventions','')
+    if templatestr is not None:
+        datasetnode.setExternalAttr('template',templatestr)
+
+    # Add/modify history
+    history = datasetnode.getExternalAttr('history')
+    if history is None:
+        history = ""
+    stringargv = reduce(lambda x,y: x+' '+y, argv)
+    stringtime = "\n[%s] "%timestamp()
+    if len(stringargv)<=256:
+        history += stringtime+stringargv
+    else:
+        history += stringtime+stringargv[:256]+" ..."
+    datasetnode.setExternalAttr('history',history)
+
+    ## datasetnode.validate()
+    if writeToStdout:
+        datasetnode.dump()
+    else:
+        datasetnode.dump(xmlpath)
+        if verbose:
+            print xmlpath,'written'
+
+#--------------------------------------------------------------------------------------------------------------------------
+if __name__ == '__main__':
+    main(sys.argv)
+    try:
+        from mpi4py import MPI
+        comm = MPI.Comm.Get_parent()
+        comm.send('done', dest=0)
+    except:
+        pass
+        
diff --git a/Packages/cdms2/Lib/grid.py b/Packages/cdms2/Lib/grid.py
index f11ca1764d923153698d053963009c7dd4640b80..9930f263a676ab3312f9705336ffcc4f83fc9013 100644
--- a/Packages/cdms2/Lib/grid.py
+++ b/Packages/cdms2/Lib/grid.py
@@ -496,14 +496,6 @@ class AbstractRectGrid(AbstractGrid):
         else:
             latbnds = lat.genGenericBounds()
 
-        # Stretch latitude bounds to +/- 90.0
-        if ascending:
-            latbnds[0,0] = min(latbnds[0,0],-90.0)
-            latbnds[-1,1] = max(latbnds[-1,1],90.0)
-        else:
-            latbnds[0,0] = max(latbnds[0,0],+90.0)
-            latbnds[-1,1] = min(latbnds[-1,1],-90.0)
-
         # Get longitude bounds
         lon = self.getLongitude()
         if len(lon)>1:
diff --git a/Packages/cdms2/Lib/tvariable.py b/Packages/cdms2/Lib/tvariable.py
index 27cab8156879a543d753028bb062ac01ae52f128..152875adf58480778e567c0cb7b0f59074fff31a 100644
--- a/Packages/cdms2/Lib/tvariable.py
+++ b/Packages/cdms2/Lib/tvariable.py
@@ -384,8 +384,9 @@ class TransientVariable(AbstractVariable,numpy.ma.MaskedArray):
         if n < 0: n = n + self.rank()
         if not isinstance(axis, AbstractAxis):
             raise CDMSError,"copydimension, other not an axis."
-        b = axis.getBounds()
-        mycopy = createAxis(axis[:], b)
+        isGeneric = [False]
+        b = axis.getBounds(isGeneric)
+        mycopy = createAxis(axis[:], b, genericBounds=isGeneric[0])
         mycopy.id = axis.id
         for k, v in axis.attributes.items():
            setattr(mycopy, k, v)
diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
deleted file mode 100755
index 59896b1b43a58b5a1192d31915eaa88cf3d13ae2..0000000000000000000000000000000000000000
--- a/Packages/cdms2/Script/cdscan
+++ /dev/null
@@ -1,1688 +0,0 @@
-#!/usr/bin/env python
-
-import sys
-import getopt
-import cdms2
-from cdms2.grid import lookupArray
-from cdms2.axis import calendarToTag, tagToCalendar
-from cdms2.cdmsobj import CdFromObject,CdString,CdScalar,CdFloat,CdDouble,CdShort,CdInt,CdLong
-import numpy
-import string
-import cdtime
-import os.path
-import copy
-import types
-from cdms2 import cdmsNode
-import re
-
-usage = """Usage:
-    cdscan [options] <files>
-
-    Scan a list of files producing a CDMS dataset in XML representation. See Notes below
-    for a more complete explanation.
-
-Arguments:
-
-    <files> is a list of file paths to scan. The files can be listed in any order, and may
-    be in multiple directories.  A file may also be a CDML dataset (.xml or .cdml), in
-    which case the dataset(s) and files are combined into a new dataset.
-
-Options:
-
-    -a alias_file: change variable names to the aliases defined in an alias file.
-                   Each line of the alias file consists of two blank separated
-                   fields: variable_id alias. 'variable_id' is the ID of the variable
-                   in the file, and 'alias' is the name that will be substituted for
-                   it in the output dataset. Only variables with entries in the alias_file
-                   are renamed.
-
-    -c calendar:   either "gregorian", "proleptic_gregorian", "julian", "noleap", or "360_day". Default:
-                   "gregorian". This option should be used with caution, as it will
-                   override any calendar information in the files.
-
-    -d dataset_id: dataset identifier. Default: "none"
-
-    -e newattr:	   Add or modify attributes of a file, variable, or
-		   axis. The form of 'newattr' is either:
-
-		   'var.attr = value' to modify a variable or attribute, or
-		   '.attr = value' to modify a global (file) attribute.
-
-		   In either case, 'value' may be quoted to preserve spaces
-		   or force the attribute to be treated as a string. If
-		   'value' is not quoted and the first character is a
-		   digit, it is converted to integer or
-		   floating-point. This option does not modify the input
-		   datafiles. See notes and examples below.
-
-    --exclude var,var,...
-                   Exclude specified variables. The argument
-                   is a comma-separated list of variables containing no blanks.
-                   In contrast to --exclude-file, this skips the variables regardless
-                   of the file(s) in which they are contained, but processes other
-                   variables in the files.
-                   Also see --include.
-
-    --exclude-file pattern
-                   Exclude files with a basename matching the regular expression pattern.
-                   In contrast to --exclude, this skips the file entirely. Multiple patterns
-                   may be listed by separating with vertical bars (e.g. abc|def ). Note
-                   that the match is to the initial part of the basename. For example, the
-                   pattern 'st' matches any basename starting with 'st'.
-
-    -f file_list:  file containing a list of absolute data file names, one per
-                   line. <files> arguments are ignored.
-
-    --forecast     generate a description of a forecast dataset.
-                   This is not compatible with the -i, -r, -t, or -l options.
-                   A file can contain data for exactly one forecast; its
-                   forecast_reference_time (aka run time, analysis time, starting time,
-                   generating time, tau=0 time) is specified by the nbdate,nbsec variables.
-                   Each file's time axis will be interpreted as the forecast_period (aka
-                   tau, the interval from the forecast_reference_time to the current time)
-                   regardless of its units, standard_name, or other attributes.
-
-    -h:            print a help message.
-
-    -i time_delta: scan time as a 'linear' dimension. This is useful if the time dimension
-                   is very long. The argument is the time delta, a float or integer.  For
-                   example, if the time delta is 6 hours, and the reference units are
-                   "hours since xxxx", set the interval delta to 6.  The default value is
-                   the difference of the first two timepoints.
-
-    --ignore-open-error:
-                   Ignore open errors. Print a warning and continue.
-
-    --include var,var,...
-                   Only include specified variables in the output. The argument
-                   is a comma-separated list of variables containing no blanks.
-                   Also see --exclude.
-
-    --include-file pattern
-                   Only include files with a basename matching the regular expression pattern.
-                   In contrast to --include, this skips files entirely if they do not
-                   match the pattern. Multiple patterns
-                   may be listed by separating with vertical bars (e.g. abc|def ). Note
-                   that the match is to the initial part of the basename. For example, the
-                   pattern 'st' matches any basename starting with 'st'.
-
-    -j:		   scan time as a vector dimension. Time values are listed
-		   individually. Turns off the -i option.
-
-    -l levels:     list of levels, comma-separated. Only specify if files are partitioned by
-                   levels.
-
-    -m levelid:    name of the vertical level dimension. The default is the name of the
-                   vertical level dimension
-
-    --notrim-lat:  Don't trim latitude values (in degrees) to the range [-90..90]. By default
-		   latitude values are trimmed.
-
-    -p template:   Compatibility with pre-V3.0 datasets. 'cdimport -h' describes template strings.
-
-    -q:            quiet mode
-
-    -r time_units: time units of the form "<units> since yyyy-mm-dd hh:mi:ss", where
-                   <units> is one of "year", "month", "day", "hour", "minute", "second".
-                   Trailing fields may be omitted. The default is the units of the first
-                   time dimension found.
-
-    -s suffix_file: Append a suffix to variable names, depending on the directory
-                   containing the data file.  This can be used to distinguish variables
-                   having the same name but generated by different models or ensemble
-                   runs. 'suffix_file' is the name of a file describing a mapping between
-                   directories and suffixes.  Each line consists of two blank-separated
-                   fields: 'directory' 'suffix'. Each file path is compared to the
-                   directories in the suffix file. If the file path is in that directory
-                   or a subdirectory, the corresponding suffix is appended to the variable
-                   IDs in the file. If more than one such directory is found, the first
-                   directory found is used. If no match is made, the variable ids are not
-                   altered.  Regular expressions can be used: see the example in the Notes
-                   section.
-
-    -t timeid:     id of the partitioned time dimension. The default is the name of the time
-                   dimension.
-
-    --time-linear tzero,delta,units[,calendar]
-                   Override the time dimensions(s) with a linear time dimension. The arguments are
-                   a comma-separated list:
-                   
-                   tzero is the initial time point, a floating-point value.
-                   delta is the time delta, floating-point.
-                   units are time units as specified in the [-r] option.
-                   calendar is optional, and is specified as in the [-c] option. If omitted, it
-                     defaults to the value specified by [-c], otherwise as specified in the file.
-
-                   Example: --time-linear '0,1,months since 1980,noleap'
-
-                   Note (6) compares this option with [-i] and [-r]
-
-    --var-locate 'var,file_pattern':
-                   Only scan a variable if the basename of the file matches the pattern. This
-                   may be used to resolve duplicate variable errors. var and file_pattern are
-                   separated by a comma, with no blanks.
-                   
-                   var is the name of the variable
-                   file_pattern is a regular expression following the Python re module syntax.e
-
-                   Example: to scan variable ps from files starting with the string 'ps_':
-                     --var-locate 'ps,ps_.*'
-
-    -x xmlfile:    XML filename. By default, output is written to standard output.
-
-Example:
-
-    cdscan -c noleap -d test -x test.xml [uv]*.nc
-    cdscan -d pcmdi_6h -i 0.25 -r 'days since 1979-1-1' *6h*.ctl
-
-Notes:
-
-    (1) The files can be in netCDF, GrADS/GRIB, HDF, or DRS format, and can be listed in
-    any order. Most commonly, the files are the result of a single experiment, and the
-    'partitioned' dimension is time. The time dimension of a variable is the coordinate
-    variable having a name that starts with 'time' or having an attribute "axis='T'". If
-    this is not the case, specify the time dimension with the -t option. The time
-    dimension should be in the form supported by cdtime. If this is not the case (or to
-    override them) use the -r option.
-
-    (2) The basic form of the command is 'cdscan <files>'. By default, the time values are
-    listed explicitly in the output XML. This can cause a problem if the time dimension is
-    very long, say for 6-hourly data. To handle this the form 'cdscan -i delta <files>'
-    may be used. This generates a compact time representation of the form <start, length,
-    delta>. An exception is raised if the time dimension for a given file is not linear.
-
-    (3) Another form of the command is 'cdscan -l lev1,lev2,..,levn <files>'. This asserts
-    that the dataset is partitioned in both time and vertical level dimensions. The level
-    dimension of a variable is the dimension having a name that starts with "lev", or
-    having an attribute "axis=Z". If this is not the case, set the level name with the -m
-    option.
-
-    (4) An example of a suffix file:
-
-    /exp/pr/ncar-a  _ncar-a
-    /exp/pr/ecm-a   _ecm-a
-    /exp/ta/ncar-a  _ncar-a
-    /exp/ta/ecm-a   _ecm-a
-
-    For all files in directory /exp/pr/ncar-a or a subdirectory, the corresponding
-    variable ids will be appended with the suffix '_ncar-a'.  Regular expressions can be
-    used, as defined in the Python 're' module. For example, The previous example can be
-    replaced with the single line:
-
-    /exp/[^/]*/([^/]*) _\g<1>
-
-    Note the use of parentheses to delimit a group. The syntax \g<n> refers to the n-th
-    group matched in the regular expression, with the first group being n=1. The string
-    [^/]* matches any sequence of characters other than a forward slash.
-
-    (5) Adding or modifying attributes with the -e option:
-
-    time.units = "days since 1979-1-1"
-
-    sets the units of all variables/axes to "Days since 1979-1-1". Note
-    that since this is done before any other processing is done, it allows
-    overriding of non-COARDS time units.
-
-    .newattr=newvalue
-
-    Set the global file attribute 'newattr' to 'newvalue'.
-
-    (6) The [--time-linear] option overrides the time values in the file(s). The resulting
-    dimension does not have any gaps. In contrast, the [-i], [-r] options use the specified
-    time units (from [-r]), and calendar from [-c] if specified, to convert the file times
-    to the new units. The resulting linear dimension may have gaps.
-
-    In either case, the files are ordered by the time values in the files.
-
-    The [--time-linear] option should be used with caution, as it is applied to all the time
-    dimensions found.
-"""
-
-# Ensure that arrays are fully printed to XML files
-numpy.set_printoptions(threshold=numpy.inf)
-
-calendarMap = tagToCalendar
-
-reverseCalendarMap = calendarToTag
-
-attrPattern = re.compile(r'\s*(\w*)\.(\w+)\s*=\s*(.*)$')
-cdms2.setNetcdfUseParallelFlag(0)
-def timestamp():
-    "Generate a timestamp."
-    import time
-    y,m,d,h,mi,s,w,dy,ds = time.gmtime(time.time())
-    return "%d-%d-%d %d:%d:%d"%(y,m,d,h,mi,s)
-
-def timeindex(value, units, basetime, delta, calendar):
-    """ Calculate (t - basetime)/delu
-    where t = reltime(value, units)
-    and delu is the time interval (delta, delunits) (e.g., 1 month).
-    """
-    if string.find(units," as ")==-1:
-        tval = cdtime.reltime(value, units)
-    else:
-        tval = cdtime.abstime(value, units)
-    newval = tval.torel(basetime, calendar)
-    if delta is None:
-        return newval.value
-    else:
-        return newval.value/delta
-
-def combineKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
-    """ Combine dictionary keys into an axis.
-    dict: (i,j) => (path, axisname)
-    typedict is either timedict or levdict or fcdict.
-    timeIsLinear is true iff time has a linear representation.
-    referenceDelta is only used for error checks if timeIsLinear is true.
-    """
-
-    global verbose
-
-    # Sort the projected time, level indices
-    keys = dict.keys()
-    keys.sort()
-
-    axislist = []
-    prevend = None
-    prevpath = None
-    name0 = None
-    compressPart = []
-    partition = []
-    previ = 0
-    firstunits = None
-    prevvals = None
-    coordToInd = {(None,None):(None,None)}
-    linCoordToInd = {(None,None):(None,None)}
-    iadj = None
-    errorOccurred = 0
-    for i0, i1 in keys:
-        path, name = dict[(i0, i1)]
-        if name0 is None:
-            name0 = name
-        values, units, dummy = typedict[(path,name)]
-        if firstunits is None:
-            firstunits = units
-        if prevend is not None and prevend>=i0:
-            if prevend>=i1:
-                if verbose:
-                    print >> sys.stderr,  'Warning, file %s, dimension %s contains values in file %s'%(prevpath,name,path)
-                if timeIsLinear:
-                    iind = lookupArray(prevvals, values[0])
-                    jind = lookupArray(prevvals, values[-1])
-                else:
-                    iind = lookupArray(prevvals, i0)
-                    jind = lookupArray(prevvals, i1)
-                if len(values)!=(jind-iind+1):
-                    raise RuntimeError, 'Dimension %s in files %s [len(%s)=%d], %s [len(%s)=%d], is inconsistent'%(name, prevpath, name, (jind-iind+1), path, name, len(values))
-                coordToInd[(i0,i1)] = (iind, jind)
-                
-                prevspart, prevepart = partition[-1]
-                linCoordToInd[(i0,i1)] = (prevspart+iind, prevspart+jind+1)
-                continue
-            else:                       # Fix partial overlap
-                if timeIsLinear:
-                    jind = lookupArray(prevvals, values[0])
-                else:
-                    jind = lookupArray(prevvals, i0)
-                if verbose:
-                    print >> sys.stderr,  'Warning, file %s, dimension %s overlaps file %s, value=%f'%(prevpath,name,path,prevvals[jind])
-                previ, prevj = compressPart[-1]
-                prevj = previ + jind
-                axislist[-1] = prevvals[0:jind]
-                compressPart[-1] = (previ, prevj)
-                coordToInd[(prevvals[0], prevvals[-1])] = (previ, prevj)
-                previ = prevj
-
-                prevspart, prevepart = partition[-1]
-                prevepart = prevspart + jind
-                partition[-1] = (prevspart, prevepart)
-                linCoordToInd[(prevvals[0], prevvals[-1])] = (prevspart, prevepart)
-
-        axislist.append(values)
-        prevend = i1
-        prevpath = path
-        prevj = previ+len(values)
-        compressPart.append((previ, prevj))
-        coordToInd[(i0,i1)] = (previ, prevj)
-
-        if iadj is None:                # partition has to start with 0
-            iadj = int(i0)
-        spart = int(i0) - iadj
-        epart = int(i1) + 1 - iadj
-        partition.append((spart, epart))
-        linCoordToInd[(i0,i1)] = (spart, epart)
-        if timeIsLinear and len(values)!=(epart-spart):
-            # Find the bad values
-            diffs = values[1:]-values[:-1]
-            badindices = numpy.compress(numpy.not_equal(diffs,referenceDelta),range(len(values)))
-            badvalues = numpy.take(values, badindices)
-            if verbose:
-                print >> sys.stderr,  "Error: Missing values in %s after times: %s. Set delta with the -i option or turn off linear mode with the -j option."%(path,str(badvalues))
-            errorOccurred = 1
-
-        prevvals = values
-        previ = prevj
-        
-    fullaxis = numpy.ma.concatenate(axislist)
-    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
-
-def useKeys(dict, typedict, timeIsLinear=0, referenceDelta = None, forecast=None):
-    """ Use dictionary keys for an axis.  This is like combineKeys (same arguments, same return values,
-    was written by simplifying combineKeys), but this doesn't do nearly so much because this is
-    for an axis where there is no splitting across files, hence partitions are not needed.
-    dict: (i,j) => (path, axisname)
-    typedict is either timedict or levdict or fcdict.
-    timeIsLinear is true iff time has a linear representation.
-    referenceDelta is only used for error checks if timeIsLinear is true.
-    """
-    global verbose
-
-    # Sort the projected time, level indices
-    keys = dict.keys()
-    keys.sort()
-
-    axislist = []
-    name0 = None
-#    compressPart = []
-    compressPart = None
-#    partition = []
-    partition = None
-#    previ = 0
-    firstunits = None
-#    coordToInd = {(None,None):(None,None)}
-#    linCoordToInd = {(None,None):(None,None)}
-    coordToInd = None
-    linCoordToInd = None
-    errorOccurred = 0
-    for i0, i1 in keys:
-        path, name = dict[(i0, i1)]
-        if name0 is None:
-            name0 = name
-        values, units, dummy = typedict[(path,name)]
-        if firstunits is None:
-            firstunits = units
-        axislist.append(values)
-#        prevj = previ+len(values)
-#        coordToInd[(i0,i1)] = (previ, prevj)
-
-    fullaxis = numpy.ma.concatenate(axislist)
-    return fullaxis, name0, compressPart, coordToInd, firstunits, partition, linCoordToInd, errorOccurred
-
-def copyDict(dict):
-    """Copy a dictionary-like object dict to a true dictionary"""
-    result = {}
-    for key in dict.keys():
-        result[key] = dict[key]
-
-    return result
-
-def disambig(name, dict, num, comparator, value):
-    """ Make an unique name from name, wrt to the keys in dictionary dict.
-    Try using num first. comparator(value,dict[name]) returns 0 if equal, 1 if not.
-    """
-    if not dict.has_key(name) or not comparator(value, dict[name]):
-        uniqname = name
-    else:
-        uniqname = '%s_%d'%(name,num)
-        if dict.has_key(uniqname) and comparator(value, dict[uniqname]):
-            trial_name = uniqname
-            for letter in string.lowercase:
-                uniqname = '%s_%s'%(trial_name,letter)
-                if not dict.has_key(uniqname) or not comparator(value, dict[uniqname]):
-                    break
-            else:
-                raise 'Cannot make axis name unique: ',name
-
-    return uniqname
-
-def compareaxes(axis1, axis2):
-    """Return 0 if equal, 1 if not"""
-    return ((len(axis1)!=len(axis2)) or not numpy.ma.allclose(axis1[:],axis2[:]))
-
-def comparedomains(domain1, domain2):
-    """Return 0 if equal, 1 if not"""
-    if len(domain1)!=len(domain2):
-        return 1
-    for i in range(len(domain1)):
-        item1 = domain1[i]
-        item2 = domain2[i]
-        if type(item1)!=type(item2):
-            return 1
-        if type(item1)==types.StringType:
-            return item1!=item2
-        elif compareaxes(item1, item2):
-            return 1
-    return 0
-
-def compareVarDictValues(val1, val2):
-    return comparedomains(val1[0], val2[0])
-
-def cleanupAttrs(attrs):
-    for attname in attrs.keys():
-        attval = attrs[attname]
-        if type(attval) is numpy.ndarray:
-            if len(attval)==1:
-                attrs[attname] = attval[0]
-            else:
-                attrs[attname] = str(attval)
-    if attrs.has_key('missing_value') and attrs['missing_value'] is None:
-        del attrs['missing_value']
-
-def validateAttrs(node):
-    """Compare attributes against DTD."""
-
-    global verbose
-
-    if hasattr(node,'datatype'):
-        parenttype = node.datatype
-    else:
-        parenttype = None
-    atts = node.getExternalDict()
-    for attname in atts.keys():
-        (attval,datatype)=atts[attname] # (XML value, datatype)
-        constraint = node.extra.get(attname)
-        if constraint is not None:
-            (scaletype,reqtype)=constraint # (CdScalar|CdArray, required type)
-            if reqtype==CdFromObject:
-                reqtype = parenttype
-            if reqtype!=datatype and datatype==CdString and scaletype==CdScalar:
-                if reqtype in (CdFloat,CdDouble) and type(attval)!=types.FloatType:
-                    try:
-                        attval = string.atof(attval)
-                    except:
-                        if verbose:
-                            print >> sys.stderr,  "Warning: %s=%s should be a float, id=%s"%(attname,attval,node.id),
-                        try:
-                            attval = string.atoi(attval)
-                            attval = float(attval)
-                            if verbose:
-                                print "(Recasting)"
-                            node.setExternalAttr(attname,attval)
-                        except:
-                            if attname in ['modulo', 'add_offset', 'scale_factor']:
-                                if verbose:
-                                    print "(Removing)"
-                                attdict = node.getExternalDict()
-                                del attdict[attname]
-                            else:
-                                if verbose:
-                                    print ""
-                elif reqtype in (CdShort,CdInt,CdLong) and type(attval)!=types.IntType:
-                    try:
-                        attval = string.atoi(attval)
-                    except:
-                        if verbose:
-                            print >> sys.stderr,  "Warning: %s=%s should be an integer, id=%s"%(attname,attval,node.id),
-                        try:
-                            attval = string.atof(attval)
-                            attval = int(attval)
-                            if verbose:
-                                print "(Recasting)"
-                            node.setExternalAttr(attname,attval)
-                        except:
-                            if verbose:
-                                print ""
-
-def cloneWithLatCheck(axis):
-    """Clone an axis, ensuring that latitudes (in degrees) are in the range [-90:90]"""
-
-    global verbose
-    global notrimlat
-
-    axisvals = origvals = axis[:]
-    if axis.isLatitude() and hasattr(axis,"units") and string.lower(axis.units[0:6])=="degree":
-        if notrimlat==0:
-            axisvals = numpy.maximum(-90.0, numpy.minimum(90.0,axisvals))
-        if not numpy.ma.allclose(axisvals, origvals) and verbose:
-            print >> sys.stderr,  "Warning: resetting latitude values: ",origvals," to: ",axisvals
-
-    b = axis.getBounds()
-    mycopy = cdms2.createAxis(copy.copy(axisvals))
-    mycopy.id = axis.id
-    try:
-        mycopy.setBounds(b)
-    except CDMSError:
-        b = mycopy.genGenericBounds()
-        mycopy.setBounds(b)
-    for k, v in axis.attributes.items():
-       setattr(mycopy, k, v)
-    return mycopy
-
-def addAttrs(fobj, eattrs):
-    """Add extra attributes to file/dataset fobj.
-    eattrs has the form [(varid,attr,value), (varid,attr,value), ...]
-    where if varid is '', set the global attribute."""
-    for evar,eattr,evalue in eattrs:
-        if evar=='':
-            fobj.__dict__[eattr] = evalue
-        else:
-            varobj = fobj[evar]
-            if varobj is not None:
-                varobj.__dict__[eattr] = evalue
-
-def setNodeDict(node, dict):
-    for key in dict.keys():
-        value = dict[key]
-        if (isinstance(value, numpy.integer) or isinstance(value, types.IntType)):
-            datatype = CdLong
-        elif (isinstance(value, numpy.floating) or isinstance(value, types.FloatType)):
-            datatype = CdDouble
-        else:
-            datatype = CdString
-        node.attribute[key]=(value,datatype)
-
-def initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
-                        referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
-                        dirlen, overrideCalendar ):
-    # This function was formerly part of the body of "main".
-        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
-        # where tc0 is the first time index relative to the reference time, tc1 the last,
-        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
-        # of the time dimension of the variable, levid is the id of the level dimension
-        # 
-        # timedict : (path, timeid) => (timearray, timeunits, calendar)
-        #
-        # levdict : (path, levelid) => (levelarray, levelunits, None)
-        #
-    initfilemap = cdms2.dataset.parseFileMap(extendDset.cdms_filemap)
-    dsetdirec = extendDset.directory
-    for namelist, slicelist in initfilemap:
-        for name in namelist:
-            var  = extendDset[name]
-            timeaxis = var.getTime()
-            if timeaxis is not None and not overrideCalendar:
-                calendar = timeaxis.getCalendar()
-            if splitOnTime and timeaxis is not None:
-                if hasattr(timeaxis, 'name_in_file'):
-                    timeid = timeaxis.name_in_file
-                else:
-                    timeid = timeaxis.id
-                if referenceTime is None:
-                    referenceTime = timeaxis.units
-                if timeIsLinear in [None,1]:
-                    timeIsLinear = timeaxis.isLinear()
-                    if timeIsLinear:
-                        if len(timeaxis)>1:
-                            referenceDelta = timeaxis[1]-timeaxis[0]
-                        else:
-                            referenceDelta = 1.0
-                    else:
-                        referenceDelta = None
-            else:
-                timeid = None
-            levelaxis = var.getLevel()
-            if splitOnLevel and levelaxis is not None:
-                if hasattr(levelaxis, 'name_in_file'):
-                    levid = levelaxis.name_in_file
-                else:
-                    levid = levelaxis.id
-            else:
-                levid = None
-
-            varmaplist = []
-            for t0, t1, lev0, lev1, path in slicelist:
-                fullpath = os.path.join(dsetdirec,path)
-                basepath = fullpath[dirlen:]
-                if t0 is not None:
-                    tc0 = timeindex(timeaxis[t0], timeaxis.units, referenceTime, referenceDelta, calendar)
-                    tc1 = timeindex(timeaxis[t1-1], timeaxis.units, referenceTime, referenceDelta, calendar)
-                    if not timedict.has_key((basepath, timeid, calendar)):
-                        values = timeaxis[t0:t1]
-                        timedict[(basepath, timeid)] = (values, timeaxis.units, calendar)
-                else:
-                    tc0 = tc1 = None
-                if lev0 is not None:
-                    lc0 = levelaxis[lev0]
-                    lc1 = levelaxis[lev1-1]
-                    if not levdict.has_key((basepath, levid, None)):
-                        values = levelaxis[lev0:lev1]
-                        levdict[(basepath, levid)] = (values, levelaxis.units, None)
-                else:
-                    lc0 = lc1 = None
-                varmaplist.append((tc0, tc1, lc0, lc1, basepath, timeid, levid, calendar))
-            if filemap.has_key(name):
-                filemap[name].extend(varmaplist)
-            else:
-                filemap[name] = varmaplist
-
-#---------------------------------------------------------------------------------------------
-
-verbose = 1
-
-def main(argv):
-
-    global verbose
-    global notrimlat
-
-    try:
-        args, lastargs = getopt.getopt( \
-            argv[1:], "a:c:d:e:f:hi:jl:m:p:qr:s:t:x:", \
-            ["include=","include-file=","exclude=","exclude-file=","forecast","time-linear=", \
-             "notrim-lat","var-locate=","ignore-open-error" ] )
-    except getopt.error:
-        print >> sys.stderr,  sys.exc_value
-        print >> sys.stderr,  usage
-        sys.exit(0)
-
-    calendar = None
-    calenkey = None
-    timeid = None
-    levelid = None
-    notrimlat = 0
-    referenceTime = None
-    referenceDelta = None
-    readFromFile = 0
-    splitOnTime = 1
-    splitOnLevel = 0
-    datasetid = "none"
-    timeIsLinear = None
-    writeToStdout = 1
-    templatestr = None
-    timeIsVector = None
-    modelMapFile = None
-    aliasMapFile = None
-    overrideCalendar = 0
-    extraAttrs = []
-    extraDict = {}
-    includeList = None
-    excludeList = None
-    overrideTimeLinear = None
-    varLocate = None
-    ignoreOpenError = False
-    excludePattern = None
-    includePattern = None
-    forecast = False
-    for flag, arg in args:
-        if flag=='-a':
-            aliasMapFile = arg
-        elif flag=='-c':
-            calenkey = string.lower(arg)
-            calendar = calendarMap[calenkey]
-            overrideCalendar = 1
-        elif flag=='-d':
-            datasetid = arg
-        elif flag=='-e':
-            matchObj = attrPattern.match(arg)
-            if matchObj is None:
-                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
-            matchGroups = matchObj.groups()
-            if len(matchGroups)!=3:
-                raise RuntimeError, "Expression must have form '[var].attr=value': %s"%arg
-            matchValue = matchGroups[2]
-            if len(matchValue)>0 and (matchValue[0].isdigit() or matchValue[0] in ['"',"'","-","+"]): #"
-                matcheval = eval(matchValue)
-            else:
-                matcheval = str(matchValue)
-            extraAttrs.append((matchGroups[0], matchGroups[1], matcheval))
-        elif flag=='--exclude':
-            if arg[0]=='-':
-                raise RuntimeError, "--exclude option requires an argument"
-            excludeList = string.split(arg,',')
-        elif flag=='--exclude-file':
-            excludePattern = arg
-        elif flag=='-f':
-            readFromFile = 1
-            filelistpath = arg
-        elif flag=='--forecast':  # experimental forecast mode
-            forecast = True
-            splitOnTime = 0
-            splitOnLevel = 0
-        elif flag=='-h':
-            print usage
-            sys.exit(0)
-        elif flag=='-i':
-            splitOnTime = 1
-            referenceDelta = string.atof(arg)
-            timeIsLinear = 1
-            timeIsVector = None
-        elif flag=='--ignore-open-error':
-            ignoreOpenError = True
-        elif flag=='--include':
-            if arg[0]=='-':
-                raise RuntimeError, "--include option requires an argument"
-            includeList = string.split(arg,',')
-        elif flag=='--include-file':
-            includePattern = arg
-        elif flag=='-j':
-            timeIsVector = 1
-            timeIsLinear = None
-        elif flag=='-l':
-            splitOnLevel = 1
-            levelstr = string.split(arg,',')
-            levellist = map(string.atof, levelstr)
-            levels = numpy.array(levellist)
-            levels = numpy.sort(levels)
-        elif flag=='-m':
-            levelid = arg
-            args.append(('-e','%s.axis=Z'%levelid)) # Add axis=Z attribute
-        elif flag=='--notrim-lat':
-            notrimlat = 1
-        elif flag=='-p':
-            templatestr = arg
-        elif flag=='-q':
-            verbose = 0
-        elif flag=='-r':
-            splitOnTime = 1
-            referenceTime = arg
-        elif flag=='-s':
-            modelMapFile = arg
-        elif flag=='-t':
-            splitOnTime = 1
-            timeid = arg
-            args.append(('-e','%s.axis=T'%timeid)) # Add axis=T attribute
-        elif flag=='--time-linear':
-            targlist = string.split(arg,',')
-            ttzero = string.atof(targlist[0])
-            tdelta = string.atof(targlist[1])
-            tunits = string.strip(targlist[2])
-            if len(targlist)==4:
-                tcalendar = string.strip(targlist[3])
-            else:
-                tcalendar = None
-            overrideTimeLinear = [ttzero,tdelta,tunits,tcalendar]
-        elif flag=='--var-locate':
-            if varLocate is None:
-                varLocate = {}
-            vname, pattern = string.split(arg,',')
-            varLocate[vname]=pattern
-        elif flag=='-x':
-            writeToStdout = 0
-            xmlpath = arg
-
-    # If overriding time, process time as vector so that no gaps result
-    if overrideTimeLinear is not None:
-        timeIsVector = 1
-        timeIsLinear = None
-        if overrideCalendar==1:
-            overrideTimeLinear[3]=calenkey
-
-    if verbose:
-        print 'Finding common directory ...'
-    if readFromFile:
-        f = open(filelistpath)
-        lastargs = f.readlines()
-        f.close()
-
-    # Ignore blank paths
-    realargs = []
-    for arg in lastargs:
-        sarg = string.strip(arg)
-        if len(sarg)>0:
-            realargs.append(sarg)
-    lastargs = realargs
-
-    # Split lastargs into files and datasets
-    fileargs = []
-    dsetargs = []
-    for arg in lastargs:
-        base, suffix = os.path.splitext(arg)
-        if string.lower(suffix) in ['.xml','.cdml']:
-            dsetargs.append(arg)
-        else:
-            fileargs.append(arg)
-
-    # Generate a list of pathnames for datasets
-    dsetfiles = []
-    for path in dsetargs:
-        dset = cdms2.open(path)
-        if not hasattr(dset, 'cdms_filemap'):
-            raise RuntimeError,'Dataset must have a cdms_filemap attribute: '+path
-        if not hasattr(dset, 'directory'):
-            raise RuntimeError,'Dataset must have a directory attribute: '+path
-        dsetdirec = dset.directory
-        initfilemap = cdms2.dataset.parseFileMap(dset.cdms_filemap)
-        for namelist, slicelist in initfilemap:
-            for t0, t1, lev0, lev1, path in slicelist:
-                dsetfiles.append(os.path.join(dsetdirec, path))
-    augmentedArgs = fileargs + dsetfiles
-
-    # Find the common directory
-    directory = os.path.commonprefix(augmentedArgs)
-    firstpath = augmentedArgs[0][len(directory):]
-    if not os.path.isfile(os.path.join(directory,firstpath)):
-        dnew = os.path.dirname(directory)
-        if len(dnew)>0 and directory[len(dnew)]=='/':
-            directory = dnew+'/'
-        else:
-            directory = dnew
-    if verbose:
-        print 'Common directory:',directory
-
-    dirlen = len(directory)
-
-    if templatestr is not None:
-        if os.path.isabs(templatestr):
-            templatestr = templatestr[dirlen:]
-        templatere, ignore = cdms2.cdmsobj.templateToRegex(templatestr)
-        template = re.compile(templatere+'$')
-    else:
-        template = None
-
-    axisdict = {}
-    vardict = {}
-    filemap = {}
-    timedict = {}
-    levdict = {}
-    fcdict = {}
-    global_attrs = None
-    fctau0 = None
-
-    if modelMapFile is not None:
-        mfile = open(modelMapFile)
-        modelMap = {}
-        modelDirs = []
-        for line in mfile.readlines():
-            mdirec, model = string.split(line)
-            modelMap[mdirec] = model
-            modelDirs.append(mdirec)
-        mfile.close()
-
-    if aliasMapFile is not None:
-        afile = open(aliasMapFile)
-        aliasMap = {}
-        for line in afile.readlines():
-            if line[0] not in ["'",'"']: #"
-                varid, alias = string.split(line)
-            else:
-                dummy, varid, alias = string.split(line,line[0])
-                alias = string.strip(alias)
-            aliasMap[varid] = alias
-        afile.close()
-
-    # Save extra attribute information for new axes
-    for evar, eattr, evalue in extraAttrs:
-        if evar=='':
-            continue
-        if extraDict.has_key(evar):
-            curval = extraDict[evar]
-            curval.append((eattr,evalue))
-        else:
-            extraDict[evar] = [(eattr,evalue)]
-
-    #---------------------------------------------------------------------------------------------
-    # Initialize dictionaries if adding to an existing dataset
-    if verbose and len(dsetargs)>0:
-        print 'Scanning datasets ...'
-    for extendPath in dsetargs:
-        if verbose:
-            print extendPath
-        extendDset = cdms2.open(extendPath)
-
-        # Add/modify attributes
-        addAttrs(extendDset, extraAttrs)
-
-        # Copy the global attribute dictionary if necessary. Note that copy.copy
-        # can't be used here, since .attributes is now a 'fake' dictionary.
-        if global_attrs is None:
-            global_attrs = copyDict(extendDset.attributes)
-
-        # Initialize filemap : varid => (tc0, tc1, lc0, lc1, path, timeid, levid)
-        # where tc0 is the first time index relative to the reference time, tc1 the last,
-        # lc0 is the first level, lc1 the last, path is the filename, timeid is the id
-        # of the time dimension of the variable, levid is the id of the level dimension
-        # 
-        # timedict : (path, timeid) => (timearray, timeunits, calendar)
-        #
-        # levdict : (path, levelid) => (levelarray, levelunits, None)
-        #
-        initialize_filemap( filemap, timedict, levdict, timeid, extendDset, splitOnTime, \
-                            referenceTime, timeIsLinear, referenceDelta, splitOnLevel, \
-                            dirlen, overrideCalendar )
-
-        # axisdict : id => transient_axis
-        #   for non-partitioned axes only
-        #
-        tempmap = {}
-        for axis in extendDset.axes.values():
-            if not ( (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
-                     (splitOnLevel and (axis.isLevel() or axis.id==levelid)) ):
-                axis = cloneWithLatCheck(axis)
-                if axisdict.has_key(axis.id):
-                    currentaxis = axisdict[axis.id]
-
-                    # Check that the axis has the same length and values as the saved value. If not,
-                    # create an unambiguous name in the axis dictionary.
-                    if compareaxes(axis, currentaxis):
-                        sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
-                        axis.name_in_file = axis.id
-                        oldid = axis.id
-                        axis.id = sepname
-                        axisdict[sepname] = axis
-                        tempmap[oldid] = sepname
-                else:
-                    axisdict[axis.id] = axis
-
-        # vardict : varid => [domain, attributeDict, typecode]
-        #   where domain = [axis_or_id, axis_or_id,...]
-        #   and axis_or_id is the id of a partitioned dimension, or
-        #   the transient axis object associated with a non-partitioned dimension
-        #
-        for var in extendDset.variables.values():
-            tempdomain = []
-            for id in var.getAxisIds():
-                if tempmap.has_key(id):
-                    id = tempmap[id]
-                if axisdict.has_key(id):
-                    tempdomain.append(axisdict[id])
-                else:
-                    axis = extendDset[id]
-                    if hasattr(axis,'name_in_file'):
-                        id = axis.name_in_file
-                    tempdomain.append(id)
-            varattrs = copyDict(var.attributes)
-            vardict[var.id] = [tempdomain, varattrs, var.typecode()]
-
-        extendDset.close()
-
-        # end of loop "for extendPath in dsetargs"
-
-    #---------------------------------------------------------------------------------------------
-    if verbose:
-        print 'Scanning files ...'
-
-    boundsmap = {}                      # boundsmap : varid => timebounds_id
-    boundsdict = {}                     # Same as vardict for time bounds
-    for path in fileargs:
-        path = string.strip(path)
-
-        # Check if the path is included
-        if includePattern is not None:
-            base = os.path.basename(path)
-            mobj = re.match(includePattern, base)
-            if mobj is None:
-                continue
-
-        # Check if the path is excluded
-        if excludePattern is not None:
-            base = os.path.basename(path)
-            mobj = re.match(excludePattern, base)
-            if mobj is not None:
-                continue
-
-        if verbose:
-            print path
-        try:
-            f = cdms2.open(path)
-        except:
-            if not ignoreOpenError:
-                raise RuntimeError,'Error opening file '+path
-            else:
-                print >> sys.stderr,  'Warning: cannot open file, skipping: %s'%path
-                continue
-
-        # Add/modify attributes
-        addAttrs(f, extraAttrs)
-
-        # Determine the variable ID suffix, if any
-        varsuffix = None
-        if modelMapFile is not None:
-            for direc in modelDirs:
-                mo = re.match(direc, path)
-                if mo is not None:
-                    suffixPattern = modelMap[direc]
-                    def gensuffix(m, mo=mo):
-                        i = string.atoi(m.group(1))
-                        return mo.group(i)
-                    varsuffix = re.sub(r'\\g<(\d)>', gensuffix, suffixPattern)
-                    break
-
-        # Copy the global attribute dictionary if necessary. Note that copy.copy
-        # can't be used here, since .attributes is now a 'fake' dictionary.
-        if global_attrs is None:
-            global_attrs = copyDict(f.attributes)
-
-        basepath = path[dirlen:]
-        if template is not None and template.match(basepath) is None:
-            if verbose:
-                print >> sys.stderr,  'Warning: path %s does not match template %s'%(basepath, templatestr)
-
-        # Find time boundary variables
-        boundsids = []
-        if splitOnTime:
-            tmpdict = {}
-            for axisname in f.axes.keys():
-                axis = f[axisname]
-                #was if axis.isTime() and hasattr(axis, 'bounds'):
-                if axis.isTime() and (axis.getBounds() is not None):
-                    tmpdict[axis.bounds] = 1
-            boundsids = tmpdict.keys()
-
-        # For forecasts, get the time at which the forecast begins (tau=0) which
-        # is nbdate,nbsec
-        if forecast:
-            nbdate = numpy.int( f('nbdate') )  # f('nbdate') is numpy.int32 which gets truncated
-            nbsec = f('nbsec')
-            fctau0 = nbdate*100000 + nbsec  # hopefully nbsec<(seconds per day)=86400<100000
-            fctau0time = cdtime.abstime( nbdate,"day as %Y%m%d" )
-            fctau0time = fctau0time.add( nbsec, cdtime.Seconds )  # fctau0 as type comptime
-            fc_time_attrs = []
-
-        varnames = f.variables.keys()
-
-        # Try to force all axes to be included, but only small ones, length<100.
-        # This section was motivated by a need to preserve the cloud axes isccp_prs,isccp_tau.
-        # If we ever need to preserve longer axes as well, we could create one variable per axis...
-        crude_var_axes = [ [ ax[0] for ax in var.getDomain() ] for var in f.variables.values() ]
-        var_axes = set().union( *crude_var_axes )
-        other_axes = list( set(f.axes.values()) - var_axes )
-        if len(other_axes)>0:
-            other_axes = [ax for ax in other_axes if len(ax)<100]
-            other_axes.sort( key=(lambda ax:ax.id) )
-            axisvar = cdms2.createVariable( numpy.ones([len(ax) for ax in other_axes]),
-                                            axes=other_axes, id='allaxesdummy')
-            axisvar.autoApiInfo = None    # all CdmsObj objects have this attribute, but for unknown
-            # reasons datasetnode.dump() fails trying to dump this attribute's default value (jfp)
-            varnames.append( axisvar.id )
-        # ...try to force all axes to be considered
-
-        varnames.sort()
-        for varname in varnames:
-
-            # If --var-locate is specified for the variable, match the basename before processing
-            if varLocate is not None and varLocate.has_key(varname):
-                varpattern = varLocate[varname]
-                base = os.path.basename(path)
-                mobj = re.match(varpattern, base)
-                if mobj is None:
-                    continue
-
-            # was var = f.variables[varname]
-            if varname=='allaxesdummy':
-                var = axisvar
-            else:
-                var = f.variables[varname]
-
-            # Reset the variable ID to any specified alias
-            if aliasMapFile is not None:
-                varalias = aliasMap.get(var.id)
-                if varalias is not None:
-                    var.name_in_file = var.id
-                    var.id = varalias
-                    varname = varalias
-
-            # Append a suffix to the variable ID, if applicable
-            if varsuffix is not None:
-                if not hasattr(var, 'name_in_file'):
-                    var.name_in_file = var.id
-                var.id += varsuffix
-                varname += varsuffix
-
-            varentry = [None]*9         # [timestart, timeend, levstart, levend, path, timeid, levid, calendar, fctau0]
-            varentry[4] = basepath
-            varentry[8] = fctau0
-
-            # Generate a temporary domain entry, and
-            # create axis dictionary entries.
-            domain = var.getDomain()
-            if forecast:
-                tempdomain = ['fctau0']
-            else:
-                tempdomain = []         # List of axis names and/or objects (if not partitioned)
-            for axis, start, length, truelen in domain:
-                if (splitOnTime and (axis.isTime() or axis.id==timeid)) or \
-                   (splitOnLevel and (axis.isLevel() or axis.id==levelid)):
-                    tempdomain.append(axis.id)
-                elif forecast and  (axis.isTime() or axis.id==timeid):
-                    # time axis isn't split but needs special treatment for forecasts
-                    tempdomain.append(axis.id)
-                    fc_time_attrs.append(axis.attributes)
-                else:
-                    axis = cloneWithLatCheck(axis) # Transient copy
-                    if axisdict.has_key(axis.id):
-                        currentaxis = axisdict[axis.id]
-
-                        # Check that the axis has the same length and values as the saved value. If not,
-                        # create an unambiguous name in the axis dictionary.
-                        if compareaxes(axis, currentaxis):
-                            sepname = disambig(axis.id, axisdict, len(axis), compareaxes, axis)
-                            axis.name_in_file = axis.id
-                            axis.id = sepname
-
-                            # Fix boundary variable names if using suffixes.
-                            if varsuffix is not None and hasattr(axis, 'bounds'):
-                                axis.bounds += varsuffix
-                            axisdict[sepname] = axis
-                        else:
-                            axis = currentaxis
-                    else:
-                        # Fix boundary variable names if using suffixes.
-                        if varsuffix is not None and hasattr(axis, 'bounds'):
-                            axis.bounds += varsuffix
-                        axisdict[axis.id] = axis
-                    tempdomain.append(axis)
-
-            # Create a dictionary entry for the variable if not already there.
-            if var.id in boundsids:
-                boundsattrs = copyDict(var.attributes)
-                boundsdict[var.id] = [tempdomain, boundsattrs, var.typecode()]
-                continue                # Don't set a filemap entry until axes are sorted out
-            elif not vardict.has_key(var.id):
-                varattrs = copyDict(var.attributes)
-                if varsuffix is not None or aliasMapFile is not None:
-                    varattrs['name_in_file'] = var.name_in_file
-                vardict[var.id] = [tempdomain, varattrs, var.typecode()]
-            else:
-                currentdomain, attrs, tcode = vardict[var.id]
-                if comparedomains(currentdomain, tempdomain):
-                    sepname = disambig(var.id, vardict, var.size(), compareVarDictValues, (tempdomain, None))
-                    saveid = var.id
-                    varname  = var.id = sepname
-                    varattrs = copyDict(var.attributes)
-                    var.name_in_file = varattrs['name_in_file']  = saveid
-                    vardict[sepname] = [tempdomain, varattrs, var.typecode()]
-
-            # Create a filemap entry for this variable/file, if split on time or forecast
-            axisids = map(lambda x: x[0].id, var.getDomain())
-            if splitOnTime or forecast:
-                vartime = None
-                if timeid is not None:
-                    if timeid in axisids:
-                        vartime = f.axes.get(timeid)
-                    else:
-                        if verbose:
-                            print >> sys.stderr,  'Warning, time axis %s not found, -t option ignored'%timeid
-                if vartime is None:
-                    vartime = var.getTime()
-                if vartime is not None:
-                    if not overrideCalendar:
-                        calendar = vartime.getCalendar()
-                    if referenceTime is None:
-                        referenceTime = vartime.units
-                    if verbose and not forecast:
-                        print 'Setting reference time units to', referenceTime
-                    if timeIsLinear is None and timeIsVector is None:
-                        timeIsLinear = (string.lower(string.split(referenceTime)[0]) in ['hour','hours','minute','minutes','second','seconds'])
-                        if timeIsLinear and verbose:
-                            print 'Setting time representation to "linear"' #'
-                    if timeIsLinear and referenceDelta is None:
-                        if len(vartime)>1:
-                            time1 = timeindex(vartime[1], vartime.units, referenceTime, None, calendar)
-                            time0 = timeindex(vartime[0], vartime.units, referenceTime, None, calendar)
-                            referenceDelta = time1 - time0
-                        else:
-                            referenceDelta = 1
-                        if verbose:
-                            print 'Setting time delta to', referenceDelta
-
-#                    starttime = vartime[0]
-#                    endtime = vartime[-1]
-                    startindex = timeindex(vartime[0], vartime.units, referenceTime, referenceDelta, calendar)
-                    endindex = timeindex(vartime[-1], vartime.units, referenceTime, referenceDelta, calendar)
-                    if forecast:
-                        # split on forecast, hence no split on time 
-                        varentry[0] = None
-                        varentry[1] = None
-                        referenceTime = None
-                    else:
-                        varentry[0] = startindex
-                        varentry[1] = endindex
-                    varentry[5] = vartime.id
-                    varentry[7] = calendar
-
-                    if not timedict.has_key((basepath,vartime.id)):
-                        values = vartime[:]
-                        timedict[(basepath,vartime.id)] = (values, vartime.units, calendar)
-
-            if splitOnLevel:
-                varlev = None
-                if (levelid is not None) and (levelid in axisids):
-                    varlev = f.axes.get(levelid)
-                if varlev is None:
-                    varlev = var.getLevel()
-                if varlev is not None:
-                    startlev = varlev[0]
-                    if type(startlev) is numpy.ndarray:
-                        startlev = startlev[0]
-                    endlev = varlev[-1]
-                    if type(endlev) is numpy.ndarray:
-                        endlev = endlev[0]
-                    varentry[2] = startlev
-                    varentry[3] = endlev
-                    varentry[6] = varlev.id
-
-                    if not levdict.has_key((basepath, varlev.id, None)):
-                        values = varlev[:]
-                        levdict[(basepath,varlev.id)] = (values, varlev.units, None)
-
-            if forecast:
-                if not fcdict.has_key((basepath, 'fctau0')):
-                    fcdict[(basepath, 'fctau0')] = ( [fctau0], None, None )
-
-            if filemap.has_key(varname):
-                filemap[varname].append(tuple(varentry))
-            else:
-                filemap[varname] = [tuple(varentry)]
-
-            # Set boundsmap : varid => timebounds_id
-            #was if splitOnTime and vartime is not None and hasattr(vartime, "bounds") and not boundsmap.has_key(varname):
-            if splitOnTime and vartime is not None and (vartime.getBounds() is not None) and\
-                    not boundsmap.has_key(varname):
-                boundsmap[varname] = vartime.bounds
-
-            # End of loop "for varname in varnames"
-
-        f.close()
-        # End of loop "for path in fileargs"
-
-    #---------------------------------------------------------------------------------------------
-
-    # Generate varindex, by combining variable names with
-    # identical varentry values.
-    varindex = []
-    varnames = filemap.keys()
-    varnames.sort()
-    for varname in varnames:
-        varentry = filemap[varname]
-        varentry.sort()
-
-        for varindexname, varindexvalue in varindex:
-            if varentry == varindexvalue:
-                varindexname.append(varname)
-                break
-        else:
-            varindex.append(([varname],varentry))
-
-    # If a variable is not a function of one of the partitioned dimensions,
-    # no indexing is necessary: just read from the first file containing it.
-    for varlist, slicelist in varindex:
-        slice0 = slicelist[0]
-        a,b,c,d,path0,timename,levname,calen,fctau0 = slice0
-        if (a,b,c,d,fctau0)==(None,None,None,None,None):
-            del slicelist[1:]
-
-    # Change times to constant units
-    sameCalendars = 1                   # True iff all time calendars are the same
-    prevcal = None
-    if forecast:
-        # The data files' time axis is interpreted to be tau time, i.e. the forecast_period.
-        # Find the axis, and remember it in timedict.
-        for key in timedict.keys():
-            values, units, calendar = timedict[key]
-            if prevcal is not None and calendar != prevcal:
-                sameCalendars = 0
-            prevcal = calendar
-            if string.find(units," as ")==-1:
-                time0 = cdtime.reltime(values[0],units)
-            else:
-                time0 = cdtime.abstime(values[0],units)
-            offset = time0.torel( units, calendar ).value  # normally will be 0
-            values = values+offset-values[0]
-            # Switch units from "normal" time such as "days since 2001-06-01"
-            # to "basic" time such as "days", which makes sense for a forecast_period.
-            baslen = time0.units.find(' since ')
-            basic_units = time0.units[0:baslen]  # e.g. 'days'
-            fc_units = basic_units
-            timedict[key] = (values, fc_units, calendar)
-    else:       # splitOnTime is true
-        for key in timedict.keys():
-            values, units, calendar = timedict[key]
-            if prevcal is not None and calendar != prevcal:
-                sameCalendars = 0
-            prevcal = calendar
-            if string.find(units," as ")==-1:
-                time0 = cdtime.reltime(values[0],units)
-            else:
-                time0 = cdtime.abstime(values[0],units)
-            offset = time0.torel(referenceTime, calendar).value
-            values = values+offset-values[0]
-            timedict[key] = (values, referenceTime, calendar)
-
-    if sameCalendars and prevcal is not None:
-        calenkey = reverseCalendarMap[calendar]
-        
-    if forecast:
-        # For forecasts, make sure that the above has made all timedict values the same.
-        # >>> It's conceivable that different forecasts will have different time (really, tau)
-        # >>> axes.  If so, at this point we'll want to merge and mask all the time values, so
-        # >>> that all variables can have the same time axis..  For now, just raise an error
-        # >>> if there are time axis differences at this point.
-        values0,units0,calendar0 = timedict[ timedict.keys()[0] ]
-        timedict_same = all( [ ((values0==values).all() and units0==units and calendar0==calendar) \
-                               for (values,units,calendar) in timedict.values() ] )
-        if not timedict_same:
-            raise CDMSError, 'cdscan is confused about times for a forecast set'
-        # Earlier we had saved all the time axis attributes.  Keep whatever they have in common.
-        fc_time_attr = fc_time_attrs[0]
-        for fcta in fc_time_attrs:             # go through all time attributes (each a dictionary)
-            for attrn in fc_time_attr.keys():
-                if not fcta.has_key(attrn):
-                    del fc_time_attr[attrn]    # key attrn isn't in all time attributes
-                elif fcta[attrn]!=fc_time_attr[attrn]:
-                    del fc_time_attr[attrn]    # not all time attributes have the same value for attrn
-        # At this point fc_time_attr is the dictionary of those time attributes which are common to
-        # all time axes encountered (in the context of a forecast dataset).
-        # Finally, add the appropriate standard_name to it, if we haven't already gotten one from
-        # the data file.  If the file has anything other than 'forecast_period', it's wrong, but
-        # we'll stick with it anyway.
-        if not 'standard_name' in fc_time_attr.keys():
-            fc_time_attr['standard_name'] = 'forecast_period'
-        
-    # Create partitioned axes
-    axes = []
-    masterCoordToInd = {}               # varkey => (timeCoordToInd, levCoordToInd)
-    errorOccurred = 0
-    for varlist, varentry in varindex:
-
-        # Project time, level indices
-        timeproj = {}
-        levproj = {}
-        fctproj = {}
-        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varentry:
-            if timename is not None:
-                timeproj[(time0, time1)] = (path, timename)
-            if levname is not None:
-                try:
-                    levproj[(lev0, lev1)] = (path, levname)
-                except:
-                    print >> sys.stderr,  'Cannot hash level %s range (%f,%f)'%(levname,lev0,lev1)
-                    print >> sys.stderr,  type(lev0)
-                    raise
-            if fctau0 is not None:
-                fctproj[(fctau0,fctau0)] = (path, 'fctau0')
-
-        # and combine the projected indices into axes
-        timeCoordToInd = None
-        timelinCoordToInd = None
-        if splitOnTime and timename is not None:
-            fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
-                      combineKeys(timeproj, timedict, timeIsLinear, referenceDelta)
-            axes.append( ( varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
-                           timelinCoordToInd, calendar ) )
-            if errflag: errorOccurred = 1
-        levCoordToInd = None
-        if splitOnLevel and levname is not None:
-            fullaxis, name, partition, levCoordToInd, units, opartition, levlinCoordToInd, errflag = \
-                      combineKeys(levproj, levdict)
-            axes.append((varlist,fullaxis,name,partition,levCoordToInd,units,opartition,levlinCoordToInd, None))
-            if errflag: errorOccurred = 1
-        fcCoordToInd = None
-        if forecast:
-            fullaxis, name, partition, fcCoordToInd, units, opartition, fclinCoordToInd, errflag = \
-                      combineKeys(fctproj, fcdict, forecast=forecast)
-            axes.append((varlist,fullaxis,name,partition,fcCoordToInd,units,opartition,fclinCoordToInd, None))
-            if errflag: errorOccurred = 1
-            if len(timeproj)>0:     # i.e., if time is in this variable's domain.
-                # The useKeys call is like combineKeys, except that it's for a variable not partitioned
-                # among files.  It just sets up axis data and (in the context of this variable loop)
-                # propagates what's in timedict to every variable with time in its domain.
-                fullaxis, name, partition, timeCoordToInd, units, opartition, timelinCoordToInd, errflag = \
-                          useKeys(timeproj, timedict, timeIsLinear, referenceDelta)
-                axes.append( (varlist,fullaxis,name,partition,timeCoordToInd,units,opartition, \
-                              timelinCoordToInd, calendar) )
-                if errflag: errorOccurred = 1
-            
-
-        masterCoordToInd[varlist[0]] = (timeCoordToInd, levCoordToInd, timelinCoordToInd, fcCoordToInd)
-
-    if errorOccurred:
-        raise RuntimeError, 'Error(s) determining axis values - see previous message(s)'
-    
-    # Eliminate duplicate axes
-    axes2 = []
-    for vlist1, axis1, name1, partition1, coordToInd1, units1, opartition1, linCoordToInd1, calen1 in axes:
-        for vlist2, axis2, name2, partition2, coordToInd2, units2, opartition2, linCoordToInd2, calen2 in axes2:
-            if len(axis1)==len(axis2) and name1==name2 and partition1==partition2 and units1==units2 and \
-                   numpy.ma.allclose(axis1,axis2)==1 and calen1==calen2:
-                vlist2.extend(vlist1)
-                break
-        else:
-            axes2.append((copy.copy(vlist1),axis1, name1, partition1, coordToInd1, units1, opartition1, \
-                          linCoordToInd1, calen1))
-
-    # For each axis described by axis2, disambiguate its name, create the axis object, etc.
-    assignedBounds = {}
-    for vlist, axis, name, partition, coordToInd, units, opartition, linCoordToInd, calendar in axes2:
-        # print vlist, coordToInd
-        uniqname = disambig(name, axisdict, len(axis), compareaxes, axis)
-        axisobj = cdms2.createAxis(axis)
-        axisobj.name_in_file = name
-        axisobj.id = uniqname
-        axisobj.units = units
-        if forecast and axisobj.isTime():   # For forecasts, give the time axis some saved attributes.
-            for attr in fc_time_attr.keys():
-                if not hasattr(axisobj,attr):
-                    setattr(axisobj,attr,fc_time_attr[attr])
-        if timeIsLinear and axisobj.isTime():
-            axisobj.partition = numpy.ma.ravel(numpy.ma.array(opartition))
-            axisobj.length = axisobj.partition[-1]-axisobj.partition[0]
-            mopartition = numpy.array(opartition)
-            partition_length = numpy.sum(mopartition[:,1]-mopartition[:,0])
-            if partition_length<axisobj.length:
-                axisobj.partition_length = partition_length
-        elif partition is not None:
-            axisobj.partition = numpy.ma.ravel(numpy.ma.array(partition))
-        if axisobj.isTime():
-            axisobj.calendar = reverseCalendarMap[calendar]
-        # axisobj.reference_partition = str(numpy.ma.ravel(numpy.ma.array(opartition)))
-        if not axisdict.has_key(uniqname):
-            axisdict[uniqname] = axisobj
-        for varname in vlist:
-            domain, attributes, tcode = vardict[varname]
-            for i in range(len(domain)):
-                item = domain[i]
-                if type(item)==types.StringType and item==name:
-                    domain[i] = axisobj
-
-        # Add bounds variables to vardict, varindex
-        if axisobj.isTime():
-            reprVar = vlist[0]              # 'Representative' variable having this time axis
-            if boundsmap.has_key(reprVar):
-                boundsname = boundsmap[reprVar]
-                boundsinfo = boundsdict[boundsname]
-                boundsattrs = boundsinfo[1]
-                if uniqname!=name:
-                    boundsattrs['name_in_file'] = boundsname
-                    boundsname = uniqname+'_bnds'
-                if not assignedBounds.has_key(boundsname):
-                    axisobj.bounds = boundsname
-                    for varids, ranges in varindex:
-                        if reprVar in varids:
-                            varids.append(boundsname)
-                    tmpdom = boundsinfo[0]
-                    if type(tmpdom[1])==types.StringType:
-                        bndsobj = tmpdom[0]
-                        boundsdomain = (bndsobj, axisobj)
-                    else:
-                        bndsobj = tmpdom[1]
-                        boundsdomain = (axisobj, bndsobj)
-                    vardict[boundsname] = (boundsdomain, boundsinfo[1], boundsinfo[2])
-                    assignedBounds[boundsname] = 1
-
-    # Collapse like indices in filemap. For example, transform
-    # [x,[[0,10,-,-,file1], [0,10,-,-,file2]]] into
-    # [x,[[0,10,-,-,file1]]]
-    # This occurs for variables such as time boundaries, which are
-    # often duplicated in different files.
-    cdms_filemap_list = []
-    duplicatevars = {}
-    for varindexname, varindexvalue in varindex:
-        timeCoordToInd, levCoordToInd, linCoordToInd, fcCoordToInd = masterCoordToInd[varindexname[0]]
-        newslicedict = {}
-        for time0, time1, lev0, lev1, path, timename, levname, calendar, fctau0 in varindexvalue:
-            if timeCoordToInd is not None:
-                if timeIsLinear:
-                    i0, i1 = linCoordToInd[(time0, time1)]
-                else:
-                    i0, i1 = timeCoordToInd[(time0, time1)]
-            else:
-                i0 = i1 = None
-            if levCoordToInd is not None:
-                j0, j1 = levCoordToInd[(lev0, lev1)]
-            else:
-                j0 = j1 = None
-            if newslicedict.has_key((i0,i1,j0,j1,fctau0)):
-                currentpath = newslicedict[(i0,i1,j0,j1,fctau0)]
-                if not duplicatevars.has_key(tuple(varindexname)):
-                    duplicatevars[tuple(varindexname)] = (currentpath, path)
-            else:
-                newslicedict[(i0,i1,j0,j1,fctau0)] = path
-        keys = newslicedict.keys()
-        keys.sort()
-        newslicelist = []
-        for i0,i1,j0,j1,fctau0 in keys:
-            path = newslicedict[(i0,i1,j0,j1,fctau0)]
-            newslicelist.append([i0, i1, j0, j1, fctau0, path])
-        cdms_filemap_list.append([varindexname, newslicelist])
-
-    # Check if any duplicated variables are a function of longitude or latitude.
-    # Raise an exception if so.
-    illegalvars = []
-    for varlist in duplicatevars.keys():
-        for varname in varlist:
-            if (excludeList is not None) and (varname in excludeList):
-                continue
-            dom, attrs, tcode = vardict[varname]
-            for axisobj in dom:
-                if axisobj.isLatitude() or axisobj.isLongitude():
-                    path1, path2 = duplicatevars[varlist]
-                    illegalvars.append((varname, path1, path2))
-    if len(illegalvars)>0:
-        raise RuntimeError, "Variable '%s' is duplicated, and is a function of lat or lon: files %s, %s"%illegalvars[0]
-        
-    if verbose and len(duplicatevars.values())>0:
-        print >> sys.stderr,  'Duplicate variables:'
-        for varlist in duplicatevars.keys():
-            path1, path2 = duplicatevars[varlist]
-            print >> sys.stderr,  '\t',varlist,'\t',path1,'\t',path2
-
-    # Generate the cdms_filemap attribute
-    cdms_filemap = str(cdms_filemap_list)
-    cdms_filemap = string.replace(cdms_filemap, ' ', '')
-    cdms_filemap = string.replace(cdms_filemap, 'None', '-')
-    cdms_filemap = string.replace(cdms_filemap, '"', '') #"
-    cdms_filemap = string.replace(cdms_filemap, "'", '')
-
-    # Dump to XML
-    datasetnode = cdmsNode.DatasetNode(datasetid)
-    global_attrs['cdms_filemap'] = cdms_filemap
-    global_attrs['directory'] = directory
-    if sameCalendars and calenkey is not None:
-        global_attrs['calendar'] = calenkey
-    elif global_attrs.has_key('calendar'):
-        del global_attrs['calendar']
-    cleanupAttrs(global_attrs)
-    # datasetnode.setExternalDict(global_attrs)
-    setNodeDict(datasetnode, global_attrs)
-    validateAttrs(datasetnode)
-
-    timeWasOverridden = 0
-    keys = axisdict.keys()
-    keys.sort()
-    for key in keys:
-        axis = axisdict[key]
-        tcode = axis.typecode()
-        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
-            tcode = numpy.sctype2char(tcode)
-        cdtype = cdmsNode.NumericToCdType[tcode]
-        node = cdmsNode.AxisNode(axis.id, len(axis), cdtype)
-
-        # Override the time axis as a linear axis
-        if axis.isTime() and (overrideTimeLinear is not None):
-            ttzero = overrideTimeLinear[0]
-            ttdelta = overrideTimeLinear[1]
-            axis.units = overrideTimeLinear[2]
-            if overrideTimeLinear[3] is None:
-                axis.calendar = reverseCalendarMap[axis.getCalendar()]
-            else:
-                axis.calendar = overrideTimeLinear[3]
-            linearnode = cdmsNode.LinearDataNode(ttzero, ttdelta, len(axis))
-            node.setLinearData(linearnode)
-            if verbose:
-                if timeWasOverridden==0:
-                    print "Overriding values for axis '%s'"%axis.id
-                else:
-                    print >> sys.stderr,  'Warning, overriding more than one time axis (%s)'%axis.id
-            timeWasOverridden = 1
-
-        # Represent time as linear axis using time values in the file
-        elif axis.isTime() and timeIsLinear:
-            reference_length = axis.partition[-1]-axis.partition[0]
-            linearnode = cdmsNode.LinearDataNode(axis[0], referenceDelta, reference_length)
-            node.setLinearData(linearnode)
-        else:
-            try:
-                node.setData(axis[:])
-            except cdms2.cdmsNode.NotMonotonicError:
-                if verbose:
-                    print >> sys.stderr,  'Warning: Axis values for axis %s are not monotonic:'%axis.id,axis[:]
-                    print >> sys.stderr,  'Warning: Resetting axis %s values to:'%axis.id, numpy.arange(len(axis))
-                node.setData(numpy.arange(len(axis)))
-        axisattrs = copyDict(axis.attributes)
-
-        # Make sure that new axes have attribute mods
-        if extraDict.has_key(key):
-            for eattr, evalue in extraDict[key]:
-                axisattrs[eattr] = evalue
-        cleanupAttrs(axisattrs)
-        # node.setExternalDict(axisattrs)
-        setNodeDict(node, axisattrs)
-        validateAttrs(node)
-        datasetnode.addId(axis.id, node)
-
-    keys = vardict.keys()
-    keys.sort()
-    for key in keys:
-        if (includeList is not None) and (key not in includeList):
-            continue
-        if (excludeList is not None) and (key in excludeList):
-            continue
-        domain, attrs, tcode = vardict[key]
-        if tcode in [numpy.float32, numpy.float, numpy.int16, numpy.int32, numpy.int, numpy.intc, numpy.int8]:
-            tcode = numpy.sctype2char(tcode)
-        domainNode = cdmsNode.DomainNode()
-        cdtype = cdmsNode.NumericToCdType[tcode]
-        node = cdmsNode.VariableNode(key, cdtype, domainNode)
-        cleanupAttrs(attrs)
-        # node.setExternalDict(attrs)
-        setNodeDict(node, attrs)
-        validateAttrs(node)
-        for axis in domain:
-            if hasattr(axis,'length'):
-                length = axis.length
-            else:
-                length = len(axis)
-            try:
-                elemnode = cdmsNode.DomElemNode(axis.id, 0, length)
-            except AttributeError:
-                print >> sys.stderr,  'Axis %s for variable %s does not have attribute "id"'%(`axis`, key)
-            if hasattr(axis, 'partition_length'):
-                elemnode.setExternalAttr('partition_length',axis.partition_length)
-            domainNode.add(elemnode)
-        datasetnode.addId(key, node)
-
-    # Add the Conventions attribute if not present
-    conventions = datasetnode.getExternalAttr('Conventions')
-    if conventions is None: datasetnode.setExternalAttr('Conventions','')
-    if templatestr is not None:
-        datasetnode.setExternalAttr('template',templatestr)
-
-    # Add/modify history
-    history = datasetnode.getExternalAttr('history')
-    if history is None:
-        history = ""
-    stringargv = reduce(lambda x,y: x+' '+y, argv)
-    stringtime = "\n[%s] "%timestamp()
-    if len(stringargv)<=256:
-        history += stringtime+stringargv
-    else:
-        history += stringtime+stringargv[:256]+" ..."
-    datasetnode.setExternalAttr('history',history)
-
-    ## datasetnode.validate()
-    if writeToStdout:
-        datasetnode.dump()
-    else:
-        datasetnode.dump(xmlpath)
-        if verbose:
-            print xmlpath,'written'
-
-#--------------------------------------------------------------------------------------------------------------------------
-if __name__ == '__main__':
-    main(sys.argv)
-    try:
-        from mpi4py import MPI
-        comm = MPI.Comm.Get_parent()
-        comm.send('done', dest=0)
-    except:
-        pass
-        
diff --git a/Packages/cdms2/Script/cdscan b/Packages/cdms2/Script/cdscan
new file mode 120000
index 0000000000000000000000000000000000000000..11d1e0bf94069e9ee435058dd2b6ba8e9fdb1187
--- /dev/null
+++ b/Packages/cdms2/Script/cdscan
@@ -0,0 +1 @@
+../Lib/cdscan.py
\ No newline at end of file
diff --git a/Packages/testing/__init__.py b/Packages/testing/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/Packages/testing/common.py b/Packages/testing/common.py
new file mode 100644
index 0000000000000000000000000000000000000000..7a32bcec518658892de1f1247214389961097eb0
--- /dev/null
+++ b/Packages/testing/common.py
@@ -0,0 +1,22 @@
+def test_values_setting(gm,attributes,good_values=[],bad_values=[]):
+  if isinstance(attributes,str):
+    attributes=[attributes,]
+  for att in attributes:
+    for val in good_values:
+      setattr(gm,att,val)
+    for val in bad_values:
+      try:
+        setattr(gm,att,val)
+        success = True
+      except:
+        success = False
+      else:
+        if success:
+          if hasattr(gm,"g_name"):
+            nm = gm.g_name
+          elif hasattr(gm,"s_name"):
+            nm = gm.s_name
+          else:
+            nm=gm.p_name
+          raise Exception,"Should not be able to set %s attribute '%s' to %s" % (nm,att,repr(val))
+          sys.exit(1)
diff --git a/testing/checkimage.py b/Packages/testing/regression.py
similarity index 75%
rename from testing/checkimage.py
rename to Packages/testing/regression.py
index e0ad9db82a7ed0adb35b080d9da48b929ce712e7..25255fdc0dadf55162f3c61d8b9310a77a91f321 100644
--- a/testing/checkimage.py
+++ b/Packages/testing/regression.py
@@ -9,11 +9,38 @@ import numpy
 import vtk
 import os
 import os.path
+import re
 import sys
 import logging
+import vcs
 
 defaultThreshold=10.0
 
+def init(*args, **kwargs):
+    testingDir = os.path.join(os.path.dirname(__file__), "..")
+    sys.path.append(testingDir)
+
+    if ((('bg' in kwargs and kwargs['bg']) or ('bg' not in kwargs)) and
+        ('geometry' not in kwargs)):
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=1))
+        vcsinst.setbgoutputdimensions(1200, 1091, units="pixels")
+    else:
+        vcsinst = vcs.init(*args, **dict(kwargs, bg=0))
+
+    vcsinst.setantialiasing(0)
+    vcsinst.drawlogooff()
+    return vcsinst
+
+def run(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and exit after comparsion."""
+    vcsinst.png(fname)
+    sys.exit(check_result_image(fname, baseline, threshold))
+
+def run_wo_terminate(vcsinst, fname, baseline=sys.argv[1], threshold=defaultThreshold):
+    """Export plot to a png and return comparison with baseline."""
+    vcsinst.png(fname)
+    return check_result_image(fname, baseline, threshold)
+
 def image_compare(testImage, baselineImage):
     imageDiff = vtk.vtkImageDifference()
     imageDiff.SetInputData(testImage)
@@ -40,18 +67,20 @@ def image_from_file(fname):
         print "Problem opening file '%s': %s"%(fname,err)
         return None
 
+# find alternate baselines for fname of the form basename_d.ext
+# where fname = basename.ext and d is a digit between 1 and 9
 def find_alternates(fname):
     dirname = os.path.dirname(fname)
     prefix, ext = os.path.splitext(os.path.split(fname)[1])
     files = os.listdir(dirname)
     results = [fname]
     for i in files:
-        if i.startswith(prefix) and i.endswith(ext) and i != prefix+ext:
+        if (re.match(prefix + '_[1-9]' + ext, i)):
             results.append(os.path.join(dirname, i))
     return results
 
-def check_result_image(fname, baselinefname, threshold = defaultThreshold,
-                       baseline = True, cleanup=True):
+def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold,
+                       baseline=True, cleanup=True):
     testImage = image_from_file(fname)
     if testImage is None:
         print "Testing image missing, test failed."
@@ -116,6 +145,7 @@ def check_result_image(fname, baselinefname, threshold = defaultThreshold,
     printDart("ValidImage", "image/png", os.path.abspath(bestFilename), "File")
     return -1
 
+
 def main():
     if len(sys.argv) != 4:
         print "Error:"
diff --git a/Packages/testing/setup.py b/Packages/testing/setup.py
new file mode 100755
index 0000000000000000000000000000000000000000..85c270ed8642eae41095cfbf680307ec57e53428
--- /dev/null
+++ b/Packages/testing/setup.py
@@ -0,0 +1,13 @@
+import os, sys
+from distutils.core import setup
+import cdat_info
+
+sys.path.append(os.environ.get('BUILD_DIR',"build"))
+
+setup(name="testing",
+      version=cdat_info.Version,
+      description="Testing infrastructure for cdat",
+      url="http://uvcdat.llnl.gov",
+      packages=['testing'],
+      package_dir = {'testing': '', }
+)
diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py
index 860f7858d82f7eb06be0c3e2179a595c45dac9ab..49361248ced19bc284b41fbd38b4decf4c1e2bd2 100644
--- a/Packages/vcs/vcs/Canvas.py
+++ b/Packages/vcs/vcs/Canvas.py
@@ -73,6 +73,9 @@ import vcs.manageElements  # noqa
 import configurator  # noqa
 from projection import round_projections  # noqa
 
+# Python < 3 DeprecationWarning ignored by default
+warnings.simplefilter('default')
+
 
 class SIGNAL(object):
 
@@ -2244,13 +2247,13 @@ Options:::
     # Set alias for the secondary drawtextcombined.
     drawtext = drawtextcombined
 
-    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'yaxis', 'xrev', 'yrev', 'continents', 'xarray', 'yarray',
-                       'name', 'time', 'units', 'ymd', 'file_comment',
-                       'xbounds', 'ybounds', 'xname', 'yname', 'xunits', 'yunits', 'xweights', 'yweights',
-                       'comment1', 'comment2', 'comment3', 'comment4', 'hms', 'long_name', 'zaxis',
-                       'zarray', 'zname', 'zunits', 'taxis', 'tarray', 'tname', 'tunits', 'waxis', 'warray',
-                       'wname', 'wunits', 'bg', 'ratio', 'donotstoredisplay', 'render', 'continents_line']
+    _plot_keywords_ = ['variable', 'grid', 'xaxis', 'xarray',  'xrev', 'yaxis', 'yarray', 'yrev', 'continents',
+                       'xbounds', 'ybounds', 'zaxis', 'zarray', 'taxis', 'tarray', 'waxis', 'warray', 'bg', 'ratio',
+                       'donotstoredisplay', 'render', 'continents_line', "display_name"]
 
+    _deprecated_plot_keywords_ = ["time", "units", "file_comment", "xname", "yname", "zname", "tname", "wname",
+                                  "xunits", "yunits", "zunits", "tunits", "wunits", "comment1", "comment2", "comment3",
+                                  "comment4", "long_name"]
     # def replot(self):
     #    """ Clears and plots with last used plot arguments
     #    """
@@ -2297,28 +2300,10 @@ Options:::
             '3',3: y dim will be 3 times bigger than x dim (restricted to original tempalte.data area
             Adding a 't' at the end of the ratio, makes the tickmarks and boxes move along.
 
-    Variable attribute keys:
-       comment1         = string   #Comment plotted above file_comment
-       comment2         = string   #Comment plotted above comment1
-       comment3         = string   #Comment plotted above comment2
-       comment4         = string   #Comment plotted above comment4
-       file_comment     = string   #Comment (defaults to file.comment)
-       hms              = string (hh:mm:ss) #Hour, minute, second
-       long_name        = string   #Descriptive variable name
-       name             = string   #Variable name (defaults to var.id)
-       time             = cdtime   #instance (relative or absolute),
-                                    cdtime, reltime or abstime value
-       units            = string   #Variable units
-       ymd              = string (yy/mm/dd) #Year, month, day
-
     Dimension attribute keys (dimension length=n):
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y|z|t|w]array = NumPy array of length n    # x or y Dimension values
        [x|y]bounds       = NumPy array of shape (n,2) # x or y Dimension boundaries
-       [x|y|z|t|w]name   = string                     # x or y Dimension name
-       [x|y|z|t|w]units  = string                     # x or y Dimension units
-       [x|y]weights      = NumPy array of length n    # x or y Dimension weights (used to
-                                                        calculate area-weighted mean)
 
     CDMS object:
        [x|y|z|t|w]axis   = CDMS axis object           # x or y Axis
@@ -2345,7 +2330,17 @@ Options:::
                                                       # Values 6 through 11 signify the line type
                                                       # defined by the files data_continent_other7
                                                       # through data_continent_other12.
-
+        continents_line = vcs.getline("default")    # VCS line object to define continent appearance
+        donotstoredisplay = True|False              # Whether the displayplot object generated by this plot are stored
+        render = True|False                         # Whether to actually render the plot or not (useful for doing a
+                                                    # bunch of plots in a row)
+        display_name = "__display_123"                # VCS Display plot name (used to prevent duplicate display plots)
+        ratio = 1.5|"autot"|"auto"                   # Ratio of height/width for the plot; autot and auto will choose a
+                                                     # "good" ratio for you.
+        plot_based_dual_grid = True | False          # Plot the actual grid or the dual grid based on what is
+                                                     # needed by the plot: isofill, isoline, vector need
+                                                     # point attributes, boxfill and meshfill need cell attributes
+                                                     # The default is True (if the parameter is not specified).
     Graphics Output in Background Mode:
        bg                 = 0|1   # if ==1, create images in the background
                                                              (Don't display the VCS Canvas)
@@ -3464,8 +3459,13 @@ Options:::
                     "unknown taylordiagram graphic method: %s" %
                     arglist[4])
             t.plot(arglist[0], canvas=self, template=arglist[2], **keyargs)
-            nm, src = self.check_name_source(None, "default", "display")
-            dn = displayplot.Dp(nm)
+
+            dname = keyargs.get("display_name")
+            if dname is not None:
+                dn = vcs.elements["display"][dname]
+            else:
+                nm, src = self.check_name_source(None, "default", "display")
+                dn = displayplot.Dp(nm)
             dn.continents = self.getcontinentstype()
             dn.continents_line = self.getcontinentsline()
             dn.template = arglist[2]
@@ -3473,7 +3473,6 @@ Options:::
             dn.g_name = arglist[4]
             dn.array = arglist[:2]
             dn.extradisplays = t.displays
-# dn.array=arglist[0]
             for p in slab_changed_attributes.keys():
                 tmp = slab_changed_attributes[p]
                 if tmp == (None, None):
@@ -3502,11 +3501,14 @@ Options:::
                     doratio == "0" or doratio[:4] == "auto"):
                 doratio = "1t"
             for keyarg in keyargs.keys():
-                if keyarg not in self.__class__._plot_keywords_ + \
-                        self.backend._plot_keywords:
-                    warnings.warn(
-                        'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
-                        (keyarg, self.backend.type))
+                if keyarg not in self.__class__._plot_keywords_ + self.backend._plot_keywords:
+                    if keyarg in self.__class__._deprecated_plot_keywords_:
+                        warnings.warn("Deprecation Warning: Keyword '%s' will be removed in the next version"
+                                      "of UV-CDAT." % keyarg)
+                    else:
+                        warnings.warn(
+                            'Unrecognized vcs plot keyword: %s, assuming backend (%s) keyword' %
+                            (keyarg, self.backend.type))
 
             if arglist[0] is not None or 'variable' in keyargs:
                 arglist[0] = self._reconstruct_tv(arglist, keyargs)
@@ -3725,9 +3727,13 @@ Options:::
             else:
                 returned_kargs = self.backend.plot(*arglist, **keyargs)
                 if not keyargs.get("donotstoredisplay", False):
-                    nm, src = self.check_name_source(
-                        None, "default", "display")
-                    dn = displayplot.Dp(nm, parent=self)
+                    dname = keyargs.get("display_name")
+                    if dname is not None:
+                        dn = vcs.elements['display'][dname]
+                    else:
+                        nm, src = self.check_name_source(
+                            None, "default", "display")
+                        dn = displayplot.Dp(nm, parent=self)
                     dn.template = arglist[2]
                     dn.g_type = arglist[3]
                     dn.g_name = arglist[4]
@@ -3872,6 +3878,9 @@ Options:::
     a.plot(array,'default','isofill','quick')
     a.clear()
 
+  Internally, update() calls clear() to assist in removing plots. The preserve_display argument is used to
+  make sure that the display plots that are associated with the current canvas are not eliminated, and
+  are still able to be used to regenerate the plots.
 """
         if self.animate.created():
             self.animate.close()
@@ -3879,6 +3888,10 @@ Options:::
             self.configurator.stop_animating()
         self.animate_info = []
         self.animate.update_animate_display_list()
+
+        preserve_display = kargs.get("preserve_display", False)
+        if "preserve_display" in kargs:
+            del kargs["preserve_display"]
         self.backend.clear(*args, **kargs)
         for nm in self.display_names:
             # Lets look at elements created by dispaly production
@@ -3893,7 +3906,8 @@ Options:::
                     for k in new_elts[e]:
                         if k in vcs.elements[e].keys():
                             del(vcs.elements[e][k])
-            del(vcs.elements["display"][nm])
+            if not preserve_display:
+                del(vcs.elements["display"][nm])
         self.display_names = []
         return
 
@@ -4102,6 +4116,9 @@ Options:::
     a.geometry(450,337)
 
 """
+        if len(args) == 0:
+            return self.backend.geometry()
+
         if (args[0] <= 0) or (args[1] <= 0):
             raise ValueError(
                 'Error -  The width and height values must be an integer greater than 0.')
@@ -5301,48 +5318,8 @@ Options:::
     ##########################################################################
     def gs(self, filename='noname.gs', device='png256',
            orientation=None, resolution='792x612'):
-        """
- Function: gs
 
- Description of Function:
-    This routine allows the user to save the VCS canvas in one of the many
-    GhostScript (gs) file types (also known as devices). To view other
-    GhostScript devices, issue the command "gs --help" at the terminal
-    prompt. Device names include: bmp256, epswrite, jpeg, jpeggray,
-    pdfwrite, png256, png16m, sgirgb, tiffpack, and tifflzw. By default
-    the device = 'png256'.
-
-    If no path/file name is given and no previously created gs file has been
-    designated, then file
-
-        /$HOME/%s/default.gs
-
-    will be used for storing gs images. However, if a previously created gs
-    file exist, then this output file will be used for storage.
-
-    By default, the page orientation is the canvas' orientation.
-    To translate the page orientation to portrait mode (p), set the parameter orientation = 'p'.
-    To translate the page orientation to landscape mode (l), set the parameter orientation = 'l'.
-
-    The gs command is used to create a single gs file at this point. The user
-    can use other tools to append separate image files.
-
- Example of Use:
-    a=vcs.init()
-    a.plot(array)
-    a.gs('example') #defaults: device='png256', orientation='l' and resolution='792x612'
-    a.gs(filename='example.tif', device='tiffpack', orientation='l', resolution='800x600')
-    a.gs(filename='example.pdf', device='pdfwrite', orientation='l', resolution='200x200')
-    a.gs(filename='example.jpg', device='jpeg', orientation='p', resolution='1000x1000')
-""" % (self._dotdir)
-        if orientation is None:
-            orientation = self.orientation()[0]
-        r = resolution.split('x')
-        f1 = f1 = float(r[0]) / 1100.0 * 100.0
-        f2 = f2 = float(r[1]) / 849.85 * 100.0
-        resolution = "%4.1fx%4.1f" % (f2, f1)
-        nargs = (filename, device, orientation, resolution)
-        return self.canvas.gs(*nargs)
+        warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning)
 
     ##########################################################################
     #                                                                           #
diff --git a/Packages/vcs/vcs/VCS_validation_functions.py b/Packages/vcs/vcs/VCS_validation_functions.py
index 7595d900e36282404ce77d083d7af5ec5ef07984..2968d7832f7ab4af7e01f0cd9275dd91d40848a4 100644
--- a/Packages/vcs/vcs/VCS_validation_functions.py
+++ b/Packages/vcs/vcs/VCS_validation_functions.py
@@ -243,6 +243,18 @@ def checkListOfNumbers(self, name, value, minvalue=None,
     return list(value)
 
 
+def checkInStringList(self, name, value, options):
+    checkName(self, name, value)
+    if value not in options:
+        self.checkedRaise(
+            self,
+            value,
+            ValueError,
+            'Invalid value ' + value + '. Valid options are: ' +
+            ','.join(self.scaleoptions))
+    return value
+
+
 def checkFont(self, name, value):
     if (value is None):
         pass
diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py
index 64de3e67760f65ada3fa0bad8216f11ad010c368..814719536babb8a66187c206f0cf9fea40b315fb 100644
--- a/Packages/vcs/vcs/VTKPlots.py
+++ b/Packages/vcs/vcs/VTKPlots.py
@@ -46,18 +46,16 @@ class VTKVCSBackend(object):
         self._renderers = {}
         self._plot_keywords = [
             'cdmsfile',
-            'cell_coordinates'
-            # used to render the continents
-            'continents_renderer',
+            'cell_coordinates',
             # dataset bounds in lon/lat coordinates
             'dataset_bounds',
             # This may be smaller than the data viewport. It is used
             # if autot is passed
             'ratio_autot_viewport',
-            # used to render the dataset
-            'dataset_renderer',
-            # dataset scale: (xScale, yScale)
-            'dataset_scale',
+            # used to render the dataset for clicked point info (hardware selection)
+            'surface_renderer',
+            # (xScale, yScale) - datasets can be scaled using the window ratio
+            'surface_scale',
             # the same as vcs.utils.getworldcoordinates for now. getworldcoordinates uses
             # gm.datawc_... or, if that is not set, it uses data axis margins (without bounds).
             'plotting_dataset_bounds',
@@ -73,7 +71,7 @@ class VTKVCSBackend(object):
         # Initially set to 16x Multi-Sampled Anti-Aliasing
         self.antialiasing = 8
         self._rasterPropsInVectorFormats = False
-        self._initialGeometry = geometry
+        self._geometry = geometry
 
         if renWin is not None:
             self.renWin = renWin
@@ -138,99 +136,92 @@ class VTKVCSBackend(object):
             d = vcs.elements["display"][dnm]
             if d.array[0] is None:
                 continue
-            t = vcs.elements["template"][d.template]
-            gm = vcs.elements[d.g_type][d.g_name]
-            # for non-linear projection or for meshfill. Meshfill is wrapped at
-            # VTK level, so vcs calculations do not work.
-            if gm.projection != "linear" or gm.g_name == 'Gfm':
-                selector = vtk.vtkHardwareSelector()
-                datasetRenderer = d.backend['dataset_renderer']
-                continentsRenderer = d.backend.get('continents_renderer')
-                dataset = d.backend['vtk_backend_grid']
-                if (datasetRenderer and dataset):
-                    selector.SetRenderer(datasetRenderer)
-                    selector.SetArea(xy[0], xy[1], xy[0], xy[1])
-                    selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
-                    # We want to be able see information behind continents
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(False)
-                    selection = selector.Select()
-                    if (continentsRenderer):
-                        continentsRenderer.SetDraw(True)
-                    if (selection.GetNumberOfNodes() > 0):
-                        selectionNode = selection.GetNode(0)
-                        prop = selectionNode.GetProperties().Get(selectionNode.PROP())
-                        if (prop):
-                            cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
-                            if (cellIds):
-                                # scalar value
-                                a = selectionNode.GetSelectionData().GetArray(0)
-                                geometryId = a.GetValue(0)
-                                cellId = cellIds.GetValue(geometryId)
-                                scalars = dataset.GetCellData().GetScalars()
-                                value = scalars.GetValue(cellId)
-                                geoTransform = d.backend['vtk_backend_geo']
-                                if (geoTransform):
-                                    geoTransform.Inverse()
-                                # Use the world picker to get world coordinates
-                                # we deform the dataset, so we need to fix the
-                                # world picker using xScale, yScale
-                                xScale, yScale = d.backend['dataset_scale']
-                                worldPicker = vtk.vtkWorldPointPicker()
-                                worldPicker.Pick(xy[0], xy[1], 0, datasetRenderer)
-                                worldPosition = list(worldPicker.GetPickPosition())
-                                if (xScale > yScale):
-                                    worldPosition[0] /= (xScale/yScale)
-                                else:
-                                    worldPosition[1] /= (yScale/xScale)
-                                lonLat = worldPosition
-                                if (geoTransform):
-                                    geoTransform.InternalTransformPoint(worldPosition, lonLat)
-                                    geoTransform.Inverse()
-                                st += "Var: %s\n" % d.array[0].id
-                                if (float("inf") not in lonLat):
-                                    st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
-                                st += "Value: %g" % value
-            else:
-                if t.data.x1 <= x <= t.data.x2 and t.data.y1 <= y <= t.data.y2:
-                    x1, x2, y1, y2 = vcs.utils.getworldcoordinates(gm,
-                                                                   d.array[0].getAxis(-1),
-                                                                   d.array[0].getAxis(-2))
-
-                    X = (x - t.data.x1) / (t.data.x2 - t.data.x1) * (x2 - x1) + x1
-                    Y = (y - t.data.y1) / (t.data.y2 - t.data.y1) * (y2 - y1) + y1
-
-                    # Ok we now have the X/Y values we need to figure out the
-                    # indices
-                    try:
-                        I = d.array[0].getAxis(-1).mapInterval((X, X, 'cob'))[0]
-                        try:
-                            J = d.array[
-                                0].getAxis(-2).mapInterval((Y, Y, 'cob'))[0]
-                            # Values at that point
-                            V = d.array[0][..., J, I]
-                        except:
-                            V = d.array[0][..., I]
-                        if isinstance(V, numpy.ndarray):
-                            # Grab the appropriate time slice
-                            if self.canvas.animate.created():
-                                t = self.canvas.animate.frame_num
-                                try:
-                                    taxis = V.getTime()
-                                    V = V(time=taxis[t % len(taxis)]).flat[0]
-                                except:
-                                    V = V.flat[0]
+            # Use the hardware selector to determine the cell id we clicked on
+            selector = vtk.vtkHardwareSelector()
+            surfaceRenderer = d.backend['surface_renderer']
+            dataset = d.backend['vtk_backend_grid']
+            if (surfaceRenderer and dataset):
+                selector.SetRenderer(surfaceRenderer)
+                selector.SetArea(xy[0], xy[1], xy[0], xy[1])
+                selector.SetFieldAssociation(vtk.vtkDataObject.FIELD_ASSOCIATION_CELLS)
+                # We only want to render the surface for selection
+                renderers = self.renWin.GetRenderers()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(False)
+                surfaceRenderer.SetDraw(True)
+                selection = selector.Select()
+                renderers.InitTraversal()
+                while(True):
+                    renderer = renderers.GetNextItem()
+                    if (renderer is None):
+                        break
+                    renderer.SetDraw(True)
+                surfaceRenderer.SetDraw(False)
+                if (selection.GetNumberOfNodes() > 0):
+                    selectionNode = selection.GetNode(0)
+                    prop = selectionNode.GetProperties().Get(selectionNode.PROP())
+                    if (prop):
+                        cellIds = prop.GetMapper().GetInput().GetCellData().GetGlobalIds()
+                        if (cellIds):
+                            st += "Var: %s\n" % d.array[0].id
+                            # cell attribute
+                            a = selectionNode.GetSelectionData().GetArray(0)
+                            geometryId = a.GetValue(0)
+                            cellId = cellIds.GetValue(geometryId)
+                            attributes = dataset.GetCellData().GetScalars()
+                            if (attributes is None):
+                                attributes = dataset.GetCellData().GetVectors()
+                            elementId = cellId
+
+                            geoTransform = d.backend['vtk_backend_geo']
+                            if (geoTransform):
+                                geoTransform.Inverse()
+                            # Use the world picker to get world coordinates
+                            # we deform the dataset, so we need to fix the
+                            # world picker using xScale, yScale
+                            xScale, yScale = d.backend['surface_scale']
+                            worldPicker = vtk.vtkWorldPointPicker()
+                            worldPicker.Pick(xy[0], xy[1], 0, surfaceRenderer)
+                            worldPosition = list(worldPicker.GetPickPosition())
+                            if (xScale > yScale):
+                                worldPosition[0] /= (xScale/yScale)
                             else:
-                                V = V.flat[0]
-                        try:
-                            st += "Var: %s\nX[%i] = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, I, X, J, Y, V)
-                        except:
-                            st += "Var: %s\nX = %4.1f\nY[%i] = %4.1f\nValue: %g" % (
-                                d.array[0].id, X, I, Y, V)
-                    except:
-                        st += "Var: %s\nX=%g\nY=%g\nValue = N/A" % (
-                            d.array[0].id, X, Y)
+                                worldPosition[1] /= (yScale/xScale)
+                            lonLat = worldPosition
+                            if (attributes is None):
+                                # if point dataset, return the value for the closest point
+                                cell = dataset.GetCell(cellId)
+                                closestPoint = [0, 0, 0]
+                                subId = vtk.mutable(0)
+                                pcoords = [0, 0, 0]
+                                dist2 = vtk.mutable(0)
+                                weights = [0] * cell.GetNumberOfPoints()
+                                cell.EvaluatePosition(worldPosition, closestPoint,
+                                                      subId, pcoords, dist2, weights)
+                                indexMax = numpy.argmax(weights)
+                                pointId = cell.GetPointId(indexMax)
+                                attributes = dataset.GetPointData().GetScalars()
+                                if (attributes is None):
+                                    attributes = dataset.GetPointData().GetVectors()
+                                elementId = pointId
+                            if (geoTransform):
+                                geoTransform.InternalTransformPoint(worldPosition, lonLat)
+                                geoTransform.Inverse()
+                            if (float("inf") not in lonLat):
+                                st += "X=%4.1f\nY=%4.1f\n" % (lonLat[0], lonLat[1])
+                            # get the cell value or the closest point value
+                            if (attributes):
+                                if (attributes.GetNumberOfComponents() > 1):
+                                    v = attributes.GetTuple(elementId)
+                                    st += "Value: (%g, %g)" % (v[0], v[1])
+                                else:
+                                    value = attributes.GetValue(elementId)
+                                    st += "Value: %g" % value
+
         if st == "":
             return
         ren = vtk.vtkRenderer()
@@ -299,23 +290,21 @@ class VTKVCSBackend(object):
             parg.append(d.g_type)
             parg.append(d.g_name)
             plots_args.append(parg)
-            kwarg = {}
+            key = {"display_name": dnm}
             if d.ratio is not None:
-                kwarg["ratio"] = d.ratio
-
-            kwarg["continents"] = d.continents
-            kwarg["continents_line"] = d.continents_line
-
-            key_args.append(kwarg)
+                key["ratio"] = d.ratio
+            key["continents"] = d.continents
+            key["continents_line"] = d.continents_line
+            key_args.append(key)
 
-        # Have to pull out the UI layer so it doesn't get borked by the clear
+        # Have to pull out the UI layer so it doesn't get borked by the z
         self.hideGUI()
 
         if self.canvas.configurator is not None:
             restart_anim = self.canvas.configurator.animation_timer is not None
         else:
             restart_anim = False
-        self.canvas.clear(render=False)
+        self.canvas.clear(render=False, preserve_display=True)
 
         for i, pargs in enumerate(plots_args):
             self.canvas.plot(*pargs, render=False, **key_args[i])
@@ -385,9 +374,9 @@ class VTKVCSBackend(object):
             # turning off antialiasing by default
             # mostly so that pngs are same accross platforms
             self.renWin.SetMultiSamples(self.antialiasing)
-            if self._initialGeometry is not None:
-                width = self._initialGeometry["width"]
-                height = self._initialGeometry["height"]
+            if self._geometry is not None:
+                width = self._geometry["width"]
+                height = self._geometry["height"]
             else:
                 width = None
                 height = None
@@ -446,9 +435,9 @@ class VTKVCSBackend(object):
             if (self.bg):
                 height = self.canvas.bgY
                 width = self.canvas.bgX
-            elif (self._initialGeometry):
-                height = self._initialGeometry['height']
-                width = self._initialGeometry['width']
+            elif (self._geometry):
+                height = self._geometry['height']
+                width = self._geometry['width']
             else:
                 height = self.canvas.bgY
                 width = self.canvas.bgX
@@ -555,8 +544,18 @@ class VTKVCSBackend(object):
         else:
             return True
 
-    def geometry(self, x, y, *args):
-        self.renWin.SetSize(x, y)
+    def geometry(self, *args):
+        if len(args) == 0:
+            return self._geometry
+        if len(args) < 2:
+            raise TypeError("Function takes zero or two <width, height> "
+                            "or more than two arguments. Got " + len(*args))
+        x = args[0]
+        y = args[1]
+
+        if self.renWin is not None:
+            self.renWin.SetSize(x, y)
+        self._geometry = {'width': x, 'height': y}
         self._lastSize = (x, y)
 
     def flush(self):
@@ -757,7 +756,10 @@ class VTKVCSBackend(object):
                 plot.onClosing(cell)
 
     def plotContinents(self, wc, projection, wrap, vp, priority, **kargs):
-        contData = vcs2vtk.prepContinents(self.canvas._continentspath())
+        continents_path = self.canvas._continentspath()
+        if continents_path is None:
+            return (None, 1, 1)
+        contData = vcs2vtk.prepContinents(continents_path)
         contMapper = vtk.vtkPolyDataMapper()
         contMapper.SetInputData(contData)
         contActor = vtk.vtkActor()
@@ -858,9 +860,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = crdate.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
@@ -895,9 +897,9 @@ class VTKVCSBackend(object):
                     ren = self.createRenderer()
                     self.renWin.AddRenderer(ren)
                     self.setLayer(ren, 1)
-                    self._renderers[(None, None, None)] = ren
+                    self._renderers[(None, None, None)] = (ren, 1, 1)
                 else:
-                    ren = self._renderers[(None, None, None)]
+                    ren, xratio, yratio = self._renderers[(None, None, None)]
                 tt, to = zname.name.split(":::")
                 tt = vcs.elements["texttable"][tt]
                 to = vcs.elements["textorientation"][to]
diff --git a/Packages/vcs/vcs/boxfill.py b/Packages/vcs/vcs/boxfill.py
index 231b228399c3dd71ca6ab2f34e9e5fb97470f12d..b403895fca8b42969695f855c6596da348d9eed3 100755
--- a/Packages/vcs/vcs/boxfill.py
+++ b/Packages/vcs/vcs/boxfill.py
@@ -24,6 +24,7 @@ import vcs
 import cdtime
 import VCS_validation_functions
 import xmldocs
+import numpy
 import warnings
 
 
@@ -734,6 +735,76 @@ class Gfb(object):
         self.yaxisconvert = yat
     xyscale.__doc__ = xmldocs.xyscaledoc
 
+    def getlevels(self, varmin, varmax):
+        if self.boxfill_type == "custom":
+            return self.levels
+
+        nlev = float(self.color_2 - self.color_1 + 1)
+        autolevels = False
+
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
+            low_end = varmin
+            high_end = varmax
+        else:
+            low_end = self.level_1
+            high_end = self.level_2
+
+        if self.boxfill_type == "log10":
+            low_end = numpy.ma.log10(low_end)
+            high_end = numpy.ma.log10(high_end)
+
+        if autolevels:
+            # Use nice values for the scale
+            scale = vcs.mkscale(low_end, high_end)
+            low_end = scale[0]
+            high_end = scale[-1]
+
+        dx = (high_end - low_end) / nlev
+
+        if dx == 0:
+            high_end += .00001
+            return [low_end, high_end]
+        float_epsilon = numpy.finfo(numpy.float32).eps
+        contourLevels = numpy.arange(low_end, high_end + float_epsilon, dx)
+
+        return contourLevels
+
+    def getlegendlabels(self, levels):
+        if self.legend:
+            return self.legend
+
+        if numpy.allclose(self.level_1, 1.e20) or numpy.allclose(self.level_2, 1.e20):
+            autolevels = True
+        else:
+            autolevels = False
+
+        if len(levels) > 12:
+            scale = vcs.mkscale(levels[0], levels[-1])
+            if autolevels:
+                return vcs.mklabels(scale)
+            else:
+                # Create our own scale
+                dx = (self.level_2 - self.level_1) / float(len(scale) - 1)
+                real_values = [self.level_1, self.level_2]
+                float_epsilon = numpy.finfo(numpy.float32).eps
+                levels = numpy.arange(levels[0], levels[-1] + float_epsilon, dx)
+        else:
+            real_values = levels
+
+        # Need to line up the levels and the labels, so we'll massage the label positions
+        max_round = 0
+        for l in real_values:
+            round_pos = 0
+            while numpy.round(l, round_pos) != l:
+                round_pos += 1
+            max_round = max(max_round, round_pos)
+
+        round_values = [numpy.round(l, round_pos) for l in levels]
+        round_labels = vcs.mklabels(round_values, "list")
+
+        return {lev: label for lev, label in zip(levels, round_labels)}
+
     ###########################################################################
     #                                                                         #
     # List out boxfill graphics method members (attributes).                  #
diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py
index 5e29417457418aba48e704fae1c0134be4abd2bb..fd2ee2f0c820559e8b017e20b3cd39b15f1a57eb 100644
--- a/Packages/vcs/vcs/template.py
+++ b/Packages/vcs/vcs/template.py
@@ -56,6 +56,18 @@ def _setgen(self, name, cls, value):
     setattr(self, "_%s" % name, value)
 
 
+def epsilon_gte(a, b):
+    """a >= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return -float_epsilon < a - b
+
+
+def epsilon_lte(a, b):
+    """a <= b, using floating point epsilon value."""
+    float_epsilon = numpy.finfo(numpy.float32).eps
+    return float_epsilon > a - b
+
+
 # read .scr file
 def process_src(nm, code):
     """Takes VCS script code (string) as input and generates boxfill gm from it"""
@@ -1848,7 +1860,7 @@ class P(object):
                    startlong])
         # Now make sure we have a legend
         if isinstance(levels[0], list):
-            # Ok these are nono contiguous levels, we will use legend only if
+            # Ok these are non-contiguous levels, we will use legend only if
             # it's a perfect match
             for i, l in enumerate(levels):
                 lt = l[0]
@@ -1873,29 +1885,31 @@ class P(object):
         else:
             if legend is None:
                 legend = vcs.mklabels(levels)
+            # We'll use the less precise float epsilon since this is just for labels
             if levels[0] < levels[1]:
-                ecompfunc = numpy.less_equal
-                compfunc = numpy.less
+                comparison = epsilon_lte
             else:
-                ecompfunc = numpy.greater_equal
-                compfunc = numpy.greater
+                comparison = epsilon_gte
+
+            def in_bounds(x):
+                return comparison(levels[0], x) and comparison(x, levels[-1])
+
             dlong = dD / (len(levels) - 1)
+
             for l in legend.keys():
-                if not compfunc(l, levels[0]) and not compfunc(levels[-1], l):
+                if in_bounds(l):
                     for i in range(len(levels) - 1):
-                        if ecompfunc(levels[i], l) and ecompfunc(
-                                l, levels[i + 1]):
-                            # Ok we're between 2 levels, let's add the legend
-                            # first let's figure out where to put it
-                            loc = i * dlong  # position at beginnig of level
-                            # Adds the distance from beginnig of level box
-                            loc += (l - levels[i]) / \
-                                (levels[i + 1] - levels[i]) * dlong
-                            loc += startlong  # Figures out the begining
-    # loc=((l-levels[0])/(levels[-1]-levels[0]))*dD+startlong
-                            Ll.append([loc, loc])
+                        # if legend key is (inclusive) between levels[i] and levels[i+1]
+                        if comparison(levels[i], l) and comparison(l, levels[i + 1]):
+                            # first let's figure out where to put the legend label
+                            location = i * dlong  # position at beginning of level
+                            # Adds the distance from beginning of level box
+                            location += (l - levels[i]) / (levels[i + 1] - levels[i]) * dlong
+                            location += startlong  # Figures out the beginning
+
+                            Ll.append([location, location])
                             Sl.append([startshrt, startshrt + dshrt])
-                            Lt.append(loc)
+                            Lt.append(location)
                             St.append(startshrt + dshrt * 1.4)
                             Tt.append(legend[l])
                             break
diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py
index 03815f71ac7247d331f2be1d8a4595651cbfc965..e69466eab54845a3ad4a00e1533069ee62c73327 100644
--- a/Packages/vcs/vcs/utils.py
+++ b/Packages/vcs/vcs/utils.py
@@ -994,21 +994,16 @@ def mklabels(vals, output='dict'):
         amax = float(numpy.ma.maximum(vals))
     #  Number of digit on the left of decimal point
     idigleft = int(numpy.ma.floor(numpy.ma.log10(amax))) + 1
+
     # Now determine the number of significant figures
     idig = 0
     for i in range(nvals):
         aa = numpy.ma.power(10., -idigleft)
         while abs(round(aa * vals[i]) - aa * vals[i]) > .000001:
             aa = aa * 10.
-        idig = numpy.ma.maximum(
-            idig,
-            numpy.ma.floor(
-                numpy.ma.log10(
-                    aa *
-                    numpy.ma.power(
-                        10.,
-                        idigleft))))
+        idig = numpy.ma.maximum(idig, numpy.ma.floor(numpy.ma.log10(aa * numpy.ma.power(10., idigleft))))
     idig = int(idig)
+
     # Now does the writing part
     lbls = []
     # First if we need an E format
@@ -1634,7 +1629,7 @@ def getgraphicsmethod(type, name):
     return copy_mthd
 
 
-def creategraphicsmethod(gtype, name):
+def creategraphicsmethod(gtype, gname='default', name=None):
     if gtype in ['isoline', 'Gi']:
         func = vcs.createisoline
     elif gtype in ['isofill', 'Gfi']:
@@ -1657,11 +1652,17 @@ def creategraphicsmethod(gtype, name):
         func = vcs.createvector
     elif gtype in ['taylordiagram', 'Gtd']:
         func = vcs.createtaylordiagram
-    elif isinstance(type, vcsaddons.core.VCSaddon):
-        func = type.creategm
+    elif gtype == '3d_scalar':
+        func = vcs.create3d_scalar
+    elif gtype == '3d_dual_scalar':
+        func = vcs.create3d_dual_scalar
+    elif gtype == '3d_vector':
+        func = vcs.create3d_vector
+    elif isinstance(gtype, vcsaddons.core.VCSaddon):
+        func = gtype.creategm
     else:
         return None
-    copy_mthd = func(source=name)
+    copy_mthd = func(name=name, source=gname)
     return copy_mthd
 
 
diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py
index d6c449b05ca2dac9959f95813adf172c32fb232b..81142492c5ea1e771b1f50924b183ba470d3608f 100644
--- a/Packages/vcs/vcs/vcs2vtk.py
+++ b/Packages/vcs/vcs/vcs2vtk.py
@@ -180,142 +180,48 @@ def handleProjectionEdgeCases(projection, data):
     return data
 
 
-def genGridOnPoints(data1, gm, deep=True, grid=None, geo=None,
-                    data2=None):
-    continents = False
-    projection = vcs.elements["projection"][gm.projection]
-    xm, xM, ym, yM = None, None, None, None
-    useStructuredGrid = True
-
-    data1 = handleProjectionEdgeCases(projection, data1)
-    if data2 is not None:
-        data2 = handleProjectionEdgeCases(projection, data2)
-
-    try:
-        g = data1.getGrid()
-        if grid is None:
-            x = g.getLongitude()[:]
-            y = g.getLatitude()[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-        continents = True
-        wrap = [0, 360]
-        # Ok need unstrctured grid
-        if isinstance(g, cdms2.gengrid.AbstractGenericGrid):
-            useStructuredGrid = False
-    except:
-        # hum no grid that's much easier
-        wrap = None
-        if grid is None:
-            x = data1.getAxis(-1)[:]
-            y = data1.getAxis(-2)[:]
-            xm = x[0]
-            xM = x[-1]
-            ym = y[0]
-            yM = y[-1]
-
-    if grid is None:
-        if x.ndim == 1:
-            y = y[:, numpy.newaxis] * numpy.ones(x.shape)[numpy.newaxis, :]
-            x = x[numpy.newaxis, :] * numpy.ones(y.shape)
-        x = x.flatten()
-        y = y.flatten()
-        sh = list(x.shape)
-        sh.append(1)
-        x = numpy.reshape(x, sh)
-        y = numpy.reshape(y, sh)
-        # Ok we have our points in 2D let's create unstructured points grid
-        if xm is None:
-            xm = x.min()
-        if xM is None:
-            xM = x.max()
-        if ym is None:
-            ym = y.min()
-        if yM is None:
-            yM = y.max()
-        z = numpy.zeros(x.shape)
-        m3 = numpy.concatenate((x, y), axis=1)
-        m3 = numpy.concatenate((m3, z), axis=1)
-        deep = True
-        pts = vtk.vtkPoints()
-        # Convert nupmy array to vtk ones
-        ppV = numpy_to_vtk_wrapper(m3, deep=deep)
-        pts.SetData(ppV)
-        xm, xM, ym, yM, tmp, tmp2 = pts.GetBounds()
-    else:
-        xm, xM, ym, yM, tmp, tmp2 = grid.GetPoints().GetBounds()
-        vg = grid
-    oldpts = pts
-    if geo is None:
-        bounds = pts.GetBounds()
-        xm, xM, ym, yM = [bounds[0], bounds[1], bounds[2], bounds[3]]
-        # We use zooming feature (gm.datawc) for linear and polar projections.
-        # We use wrapped coordinates for doing the projection
-        # such that parameters like the central meridian are set correctly.
-        if (gm.g_name == 'Gfm'):
-            # axes are not lon/lat for meshfill
-            wc = [gm.datawc_x1, gm.datawc_x2, gm.datawc_y1, gm.datawc_y2]
-        else:
-            wc = vcs.utils.getworldcoordinates(gm,
-                                               data1.getAxis(-1),
-                                               data1.getAxis(-2))
-        geo, geopts = project(pts, projection, getWrappedBounds(
-            wc, [xm, xM, ym, yM], wrap))
-        pts = geopts
-    # Sets the vertices into the grid
-    if grid is None:
-        if useStructuredGrid:
-            vg = vtk.vtkStructuredGrid()
-            vg.SetDimensions(data1.shape[1], data1.shape[0], 1)
-        else:
-            vg = vtk.vtkUnstructuredGrid()
-        vg.SetPoints(oldpts)
-        vg.SetPoints(pts)
-    else:
-        vg = grid
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    vg.GetPointData().SetScalars(scalar)
-    out = {"vtk_backend_grid": vg,
-           "xm": xm,
-           "xM": xM,
-           "ym": ym,
-           "yM": yM,
-           "continents": continents,
-           "wrap": wrap,
-           "geo": geo,
-           "data": data1,
-           "data2": data2
-           }
-    return out
-
-
-# Returns the bounds list for 'axis'. If axis has n elements the
-# bounds list will have n+1 elements
-def getBoundsList(axis):
-    bounds = numpy.zeros(len(axis) + 1)
-    try:
-        axisBounds = axis.getBounds()
+def getBoundsList(axis, hasCellData, dualGrid):
+    '''
+    Returns the bounds list for 'axis'. If axis has n elements the
+    bounds list will have n+1 elements
+    If there are not explicit bounds in the file we return None
+    '''
+    needsCellData = (hasCellData != dualGrid)
+    axisBounds = axis.getBoundsForDualGrid(dualGrid)
+    # we still have to generate bounds for non lon-lat axes, because
+    # the default in axis.py is 2 (generate bounds only for lat/lon axis)
+    # this is used for non lon-lat plots - by default numpy arrays are POINT data
+    if (not axis.isLatitude() and not axis.isLongitude() and needsCellData):
+        axisBounds = axis.genGenericBounds()
+    if (axisBounds is not None):
+        bounds = numpy.zeros(len(axis) + 1)
         if (axis[0] < axis[-1]):
             # axis is increasing
-            bounds[:len(axis)] = axisBounds[:, 0]
-            bounds[len(axis)] = axisBounds[-1, 1]
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
         else:
             # axis is decreasing
-            bounds[:len(axis)] = axisBounds[:, 1]
-            bounds[len(axis)] = axisBounds[-1, 0]
-    except Exception:
-        # No luck we have to generate bounds ourselves
-        bounds[1:-1] = (axis[:-1] + axis[1:]) / 2.
-        bounds[0] = axis[0] - (axis[1] - axis[0]) / 2.
-        bounds[-1] = axis[-1] + (axis[-1] - axis[-2]) / 2.
-    return bounds
+            if (axisBounds[0][0] < axisBounds[0][1]):
+                # interval is increasing
+                bounds[:len(axis)] = axisBounds[:, 1]
+                bounds[len(axis)] = axisBounds[-1, 0]
+            else:
+                # interval is decreasing
+                bounds[:len(axis)] = axisBounds[:, 0]
+                bounds[len(axis)] = axisBounds[-1, 1]
+        return bounds
+    else:
+        return None
 
 
-def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
+def genGrid(data1, data2, gm, deep=True, grid=None, geo=None, genVectors=False,
+            dualGrid=False):
     continents = False
     wrap = None
     m3 = None
@@ -325,6 +231,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
     projection = vcs.elements["projection"][gm.projection]
 
     data1 = handleProjectionEdgeCases(projection, data1)
+    if data2 is not None:
+        data2 = handleProjectionEdgeCases(projection, data2)
 
     try:  # First try to see if we can get a mesh out of this
         g = data1.getGrid()
@@ -388,6 +296,7 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
         # Ok a simple structured grid is enough
         if grid is None:
             vg = vtk.vtkStructuredGrid()
+        hasCellData = data1.hasCellData()
         if g is not None:
             # Ok we have grid
             continents = True
@@ -406,38 +315,49 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                 lon = data1.getAxis(-1)
                 lat = data1.getAxis(-2)
                 # Ok let's try to get the bounds
-                lon2 = getBoundsList(lon)
-                lat2 = getBoundsList(lat)
+                lon2 = getBoundsList(lon, hasCellData, dualGrid)
+                lat2 = getBoundsList(lat, hasCellData, dualGrid)
+                if (lon2 is not None and lat2 is not None):
+                    lon3 = lon2
+                    lat3 = lat2
+                else:
+                    lon3 = lon
+                    lat3 = lat
+                    cellData = False
                 # Note that m,M is min,max for an increasing list
                 # and max,min for a decreasing list
-                xm = lon2[0]
-                xM = lon2[-1]
-                ym = lat2[0]
-                yM = lat2[-1]
-
-                lat = lat2[:, numpy.newaxis] * \
-                    numpy.ones(lon2.shape)[numpy.newaxis, :]
-                lon = lon2[numpy.newaxis,
-                           :] * numpy.ones(lat2.shape)[:,
-                                                       numpy.newaxis]
+                xm = lon3[0]
+                xM = lon3[-1]
+                ym = lat3[0]
+                yM = lat3[-1]
+
+                lat = lat3[:, numpy.newaxis] * numpy.ones(lon3.shape)[numpy.newaxis, :]
+                lon = lon3[numpy.newaxis, :] * numpy.ones(lat3.shape)[:, numpy.newaxis]
         elif grid is None:
             # No grid info from data, making one up
             data1 = cdms2.asVariable(data1)
             lon = data1.getAxis(-1)
             lat = data1.getAxis(-2)
             # Ok let's try to get the bounds
-            lon2 = getBoundsList(lon)
-            lat2 = getBoundsList(lat)
+            lon2 = getBoundsList(lon, hasCellData, dualGrid)
+            lat2 = getBoundsList(lat, hasCellData, dualGrid)
+            if (lon2 is not None and lat2 is not None):
+                lon3 = lon2
+                lat3 = lat2
+            else:
+                lon3 = lon
+                lat3 = lat
+                cellData = False
             # Note that m,M is min,max for an increasing list
             # and max,min for a decreasing list
-            xm = lon2[0]
-            xM = lon2[-1]
-            ym = lat2[0]
-            yM = lat2[-1]
-            lat = lat2[:, numpy.newaxis] * \
-                numpy.ones(lon2.shape)[numpy.newaxis, :]
-            lon = lon2[numpy.newaxis, :] * \
-                numpy.ones(lat2.shape)[:, numpy.newaxis]
+            xm = lon3[0]
+            xM = lon3[-1]
+            ym = lat3[0]
+            yM = lat3[-1]
+            lat = lat3[:, numpy.newaxis] * \
+                numpy.ones(lon3.shape)[numpy.newaxis, :]
+            lon = lon3[numpy.newaxis, :] * \
+                numpy.ones(lat3.shape)[:, numpy.newaxis]
         if grid is None:
             vg.SetDimensions(lat.shape[1], lat.shape[0], 1)
             lon = numpy.ma.ravel(lon)
@@ -461,15 +381,23 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
                     ym = lat.min()
                     yM = lat.max()
 
-    # scalar data
-    scalar = numpy_to_vtk_wrapper(data1.filled(0.).flat,
-                                  deep=False)
-    scalar.SetName("scalar")
-    gridForScalar = grid if grid else vg
+    # attribute data
+    gridForAttribute = grid if grid else vg
+    if genVectors:
+        attribute = generateVectorArray(data1, data2, gridForAttribute)
+    else:
+        attribute = numpy_to_vtk_wrapper(data1.filled(0.).flat,
+                                         deep=False)
+        attribute.SetName("scalar")
     if cellData:
-        gridForScalar.GetCellData().SetScalars(scalar)
+        attributes = gridForAttribute.GetCellData()
+    else:
+        attributes = gridForAttribute.GetPointData()
+    if genVectors:
+        attributes.SetVectors(attribute)
     else:
-        gridForScalar.GetPointData().SetScalars(scalar)
+        attributes.SetScalars(attribute)
+
     if grid is None:
         # First create the points/vertices (in vcs terms)
         pts = vtk.vtkPoints()
@@ -489,8 +417,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
             # wrapping
             pedigreeId = vtk.vtkIntArray()
             pedigreeId.SetName("PedigreeIds")
-            pedigreeId.SetNumberOfTuples(scalar.GetNumberOfTuples())
-            for i in range(0, scalar.GetNumberOfTuples()):
+            pedigreeId.SetNumberOfTuples(attribute.GetNumberOfTuples())
+            for i in range(0, attribute.GetNumberOfTuples()):
                 pedigreeId.SetValue(i, i)
             if cellData:
                 vg.GetCellData().SetPedigreeIds(pedigreeId)
@@ -533,7 +461,8 @@ def genGrid(data1, data2, gm, deep=True, grid=None, geo=None):
            "wrap": wrap,
            "geo": geo,
            "cellData": cellData,
-           "data": data1
+           "data": data1,
+           "data2": data2
            }
     return out
 
@@ -880,6 +809,7 @@ def doWrap(Act, wc, wrap=[0., 360], fastClip=True):
     if wrap is None:
         return Act
     Mapper = Act.GetMapper()
+    Mapper.Update()
     data = Mapper.GetInput()
     # insure that GLOBALIDS are not removed by the append filter
     attributes = data.GetCellData()
@@ -1651,16 +1581,16 @@ def __build_ld__():
 
 def stippleLine(prop, line_type):
     if line_type == 'long-dash':
-        prop.SetLineStipplePattern(int('1111111100000000', 2))
+        prop.SetLineStipplePattern(int('0000111111111111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dot':
-        prop.SetLineStipplePattern(int('1010101010101010', 2))
+        prop.SetLineStipplePattern(int('0101010101010101', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash':
-        prop.SetLineStipplePattern(int('1111000011110000', 2))
+        prop.SetLineStipplePattern(int('0001111100011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'dash-dot':
-        prop.SetLineStipplePattern(int('0011110000110011', 2))
+        prop.SetLineStipplePattern(int('0101111101011111', 2))
         prop.SetLineStippleRepeatFactor(1)
     elif line_type == 'solid':
         prop.SetLineStipplePattern(int('1111111111111111', 2))
@@ -1853,27 +1783,8 @@ def generateVectorArray(data1, data2, vtk_grid):
     w = numpy.concatenate((u, v), axis=1)
     w = numpy.concatenate((w, z), axis=1)
 
-    # HACK The grid returned by vtk2vcs.genGrid is not the same size as the
-    # data array. I'm not sure where the issue is...for now let's just zero-pad
-    # data array so that we can at least test rendering until Charles gets
-    # back from vacation:
-    wLen = len(w)
-    numPts = vtk_grid.GetNumberOfPoints()
-    if wLen != numPts:
-        warnings.warn("!!! Warning during vector plotting: "
-                      "Number of points does not "
-                      "match the number of vectors to be "
-                      "glyphed (%s points vs %s "
-                      "vectors). The vectors will be "
-                      "padded/truncated to match for "
-                      "rendering purposes, but the resulting "
-                      "image should not be "
-                      "trusted." % (numPts, wLen))
-        newShape = (numPts,) + w.shape[1:]
-        w = numpy.ma.resize(w, newShape)
-
     w = numpy_to_vtk_wrapper(w, deep=False)
-    w.SetName("vectors")
+    w.SetName("vector")
     return w
 
 
diff --git a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
index 7726521728c320fc47d2f969ff9bf1cf209dc095..f2a3ea602014b8ab46bece9e8e42ecb5fbdc1f26 100644
--- a/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/boxfillpipeline.py
@@ -24,6 +24,7 @@ class BoxfillPipeline(Pipeline2D):
         self._contourLabels = None
         self._mappers = None
         self._customBoxfillArgs = {}
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -47,57 +48,11 @@ class BoxfillPipeline(Pipeline2D):
 
     def _updateContourLevelsAndColorsForBoxfill(self):
         """Set contour information for a standard boxfill."""
-        # Compute levels
-        nlev = (self._gm.color_2 - self._gm.color_1) + 1
-        if numpy.allclose(self._gm.level_1, 1.e20) or \
-           numpy.allclose(self._gm.level_2, 1.e20):
-            self._contourLevels = vcs.mkscale(self._scalarRange[0],
-                                              self._scalarRange[1])
-            if len(self._contourLevels) == 1:  # constant value ?
-                self._contourLevels = [self._contourLevels[0],
-                                       self._contourLevels[0] + .00001]
-            self._contourLabels = vcs.mklabels(self._contourLevels)
-            dx = (self._contourLevels[-1] - self._contourLevels[0]) / nlev
-            self._contourLevels = numpy.arange(self._contourLevels[0],
-                                               self._contourLevels[-1] + dx,
-                                               dx)
-        else:
-            if self._gm.boxfill_type == "log10":
-                levslbls = vcs.mkscale(numpy.ma.log10(self._gm.level_1),
-                                       numpy.ma.log10(self._gm.level_2))
-                self._contourLevels = vcs.mkevenlevels(
-                    numpy.ma.log10(self._gm.level_1),
-                    numpy.ma.log10(self._gm.level_2), nlev=nlev)
-            else:
-                levslbls = vcs.mkscale(self._gm.level_1, self._gm.level_2)
-                self._contourLevels = vcs.mkevenlevels(self._gm.level_1,
-                                                       self._gm.level_2,
-                                                       nlev=nlev)
-            if len(self._contourLevels) > 25:
-                # Too many colors/levels need to prettyfy this for legend
-                self._contourLabels = vcs.mklabels(levslbls)
-                # Make sure extremes are in
-                legd2 = vcs.mklabels([self._contourLevels[0],
-                                      self._contourLevels[-1]])
-                self._contourLabels.update(legd2)
-            else:
-                self._contourLabels = vcs.mklabels(self._contourLevels)
-            if self._gm.boxfill_type == "log10":
-                logLabels = {}
-                for key in self._contourLabels.keys():
-                    value = self._contourLabels[key]
-                    newKey = float(numpy.ma.log10(value))
-                    logLabels[newKey] = value
-                self._contourLabels = logLabels
-
+        self._contourLevels = self._gm.getlevels(self._scalarRange[0], self._scalarRange[1])
+        self._contourLabels = self._gm.getlegendlabels(self._contourLevels)
         # Use consecutive colors:
         self._contourColors = range(self._gm.color_1, self._gm.color_2 + 1)
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Special case for custom boxfills:
@@ -175,8 +130,6 @@ class BoxfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
 
         for act in patternActors:
             if self._vtkGeoTransform is None:
@@ -256,7 +209,6 @@ class BoxfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def _plotInternalBoxfill(self):
         """Implements the logic to render a non-custom boxfill."""
@@ -286,7 +238,7 @@ class BoxfillPipeline(Pipeline2D):
 
         # Colortable bit
         # make sure length match
-        numLevels = len(self._contourLevels)
+        numLevels = len(self._contourLevels) - 1
         while len(self._contourColors) < numLevels:
             self._contourColors.append(self._contourColors[-1])
 
diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
index e2ee5360ef183cf0d2247ba4f2d87da43dc760e5..55098f9e5c36e23239a7e5b39f78050dbe6e2189 100644
--- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py
@@ -13,44 +13,19 @@ class IsofillPipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsofillPipeline, self).__init__(gm, context_)
-
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
+        self._needsCellData = False
 
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._vtkPolyDataFilter.Update()
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
 
-        prepedContours = self._prepContours()
-        tmpLevels = prepedContours["tmpLevels"]
-        tmpIndices = prepedContours["tmpIndices"]
-        tmpColors = prepedContours["tmpColors"]
-        tmpOpacities = prepedContours["tmpOpacities"]
+        preppedCountours = self._prepContours()
+        tmpLevels = preppedCountours["tmpLevels"]
+        tmpIndices = preppedCountours["tmpIndices"]
+        tmpColors = preppedCountours["tmpColors"]
+        tmpOpacities = preppedCountours["tmpOpacities"]
         style = self._gm.fillareastyle
 
         luts = []
@@ -183,8 +158,6 @@ class IsofillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in patternActors:
             self._context().fitToViewport(
                 act, vp,
@@ -251,4 +224,3 @@ class IsofillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
index af79a45b862c6329ae9151f640936c5666233a66..3406824f0bba7d938922ff394c9b833b053638fe 100644
--- a/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/isolinepipeline.py
@@ -12,21 +12,7 @@ class IsolinePipeline(Pipeline2D):
 
     def __init__(self, gm, context_):
         super(IsolinePipeline, self).__init__(gm, context_)
-
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
-        # Force point data for isoline/isofill
-        genGridDict = vcs2vtk.genGridOnPoints(self._data1, self._gm,
-                                              deep=False,
-                                              grid=self._vtkDataSet,
-                                              geo=self._vtkGeoTransform)
-        genGridDict["cellData"] = False
-        self._data1 = genGridDict["data"]
-        self._updateFromGenGridDict(genGridDict)
-
-        data = vcs2vtk.numpy_to_vtk_wrapper(self._data1.filled(0.).flat,
-                                            deep=False)
-        self._vtkDataSet.GetPointData().SetScalars(data)
+        self._needsCellData = False
 
     def _updateContourLevelsAndColors(self):
         """Overrides baseclass implementation."""
@@ -49,20 +35,6 @@ class IsolinePipeline(Pipeline2D):
         # Contour colors:
         self._contourColors = self._gm.linecolors
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            # Sets data to point instead of just cells
-            c2p = vtk.vtkCellDataToPointData()
-            c2p.SetInputData(self._vtkDataSet)
-            c2p.Update()
-            # For contouring duplicate points seem to confuse it
-            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
-        else:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
-
     def _plotInternal(self):
         """Overrides baseclass implementation."""
         tmpLevels = []
@@ -164,7 +136,7 @@ class IsolinePipeline(Pipeline2D):
             numLevels = len(l)
 
             cot = vtk.vtkContourFilter()
-            if self._useCellScalars:
+            if self._hasCellData:
                 cot.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
             else:
                 cot.SetInputData(self._vtkDataSet)
@@ -172,7 +144,6 @@ class IsolinePipeline(Pipeline2D):
 
             for n in range(numLevels):
                 cot.SetValue(n, l[n])
-            cot.SetValue(numLevels, l[-1])
             # TODO remove update
             cot.Update()
 
@@ -301,8 +272,6 @@ class IsolinePipeline(Pipeline2D):
                 create_renderer=(dataset_renderer is None))
 
             countLevels += len(l)
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         if len(textprops) > 0:
             self._resultDict["vtk_backend_contours_labels_text_properties"] = \
                 textprops
@@ -360,4 +329,3 @@ class IsolinePipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
diff --git a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
index 90c06718d2d5a0b8a45e39acc625e7610d537cfe..64a95c4e31f014bc5a665a8e0a3cc83b7285cdab 100644
--- a/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/meshfillpipeline.py
@@ -15,6 +15,7 @@ class MeshfillPipeline(Pipeline2D):
         super(MeshfillPipeline, self).__init__(gm, context_)
 
         self._patternActors = []
+        self._needsCellData = True
 
     def _updateScalarData(self):
         """Overrides baseclass implementation."""
@@ -25,16 +26,6 @@ class MeshfillPipeline(Pipeline2D):
     def _updateContourLevelsAndColors(self):
         self._updateContourLevelsAndColorsGeneric()
 
-    def _createPolyDataFilter(self):
-        """Overrides baseclass implementation."""
-        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
-        if self._useCellScalars:
-            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
-        else:
-            p2c = vtk.vtkPointDataToCellData()
-            p2c.SetInputData(self._vtkDataSet)
-            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
-
     def _plotInternal(self):
 
         prepedContours = self._prepContours()
@@ -204,8 +195,6 @@ class MeshfillPipeline(Pipeline2D):
                 geo=self._vtkGeoTransform,
                 priority=self._template.data.priority,
                 create_renderer=(dataset_renderer is None))
-        self._resultDict['dataset_renderer'] = dataset_renderer
-        self._resultDict['dataset_scale'] = (xScale, yScale)
         for act in self._patternActors:
             if self._vtkGeoTransform is None:
                 # If using geofilter on wireframed does not get wrapped not sure
@@ -279,7 +268,6 @@ class MeshfillPipeline(Pipeline2D):
                 vp, self._template.data.priority,
                 vtk_backend_grid=self._vtkDataSet,
                 dataset_bounds=self._vtkDataSetBounds)
-            self._resultDict['continents_renderer'] = continents_renderer
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds
diff --git a/Packages/vcs/vcs/vcsvtk/pipeline2d.py b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
index 4d370db8aebe87ca6b037739ed1bde71b4a729d3..dc12f3f5cdb050a01b5009a99e2a9731806b14f5 100644
--- a/Packages/vcs/vcs/vcsvtk/pipeline2d.py
+++ b/Packages/vcs/vcs/vcsvtk/pipeline2d.py
@@ -1,9 +1,10 @@
 from .pipeline import Pipeline
 from .. import vcs2vtk
 
-import vcs
-import numpy
 import fillareautils
+import numpy
+import vcs
+import vtk
 import warnings
 
 
@@ -45,8 +46,11 @@ class IPipeline2D(Pipeline):
         - _useContinents: Whether or not to plot continents.
         - _dataWrapModulo: Wrap modulo as [YMax, XMax], in degrees. 0 means
             'no wrapping'.
-        - _useCellScalars: True if data is applied to cell, false if data is
+        - _hasCellData: True if data is applied to cell, false if data is
             applied to points.
+        - _needsCellData: True if the plot needs cell scalars, false if
+            the plot needs point scalars
+        - _needsVectors: True if the plot needs vectors, false if it needs scalars
         - _scalarRange: The range of _data1 as tuple(float min, float max)
         - _maskedDataMapper: The mapper used to render masked data.
     """
@@ -74,7 +78,9 @@ class IPipeline2D(Pipeline):
         self._colorMap = None
         self._useContinents = None
         self._dataWrapModulo = None
-        self._useCellScalars = None
+        self._hasCellData = None
+        self._needsCellData = None
+        self._needsVectors = False
         self._scalarRange = None
         self._maskedDataMapper = None
 
@@ -82,7 +88,7 @@ class IPipeline2D(Pipeline):
         """Create _data1 and _data2 from _originalData1 and _originalData2."""
         raise NotImplementedError("Missing override.")
 
-    def _updateVTKDataSet(self):
+    def _updateVTKDataSet(self, plotBasedDualGrid):
         """Apply the vcs data to _vtkDataSet, creating it if necessary."""
         raise NotImplementedError("Missing override.")
 
@@ -272,10 +278,13 @@ class Pipeline2D(IPipeline2D):
 
         # Preprocess the input scalar data:
         self._updateScalarData()
+        self._min = self._data1.min()
+        self._max = self._data1.max()
         self._scalarRange = vcs.minmax(self._data1)
 
         # Create/update the VTK dataset.
-        self._updateVTKDataSet()
+        plotBasedDualGrid = kargs.get('plot_based_dual_grid', True)
+        self._updateVTKDataSet(plotBasedDualGrid)
 
         # Update the results:
         self._resultDict["vtk_backend_grid"] = self._vtkDataSet
@@ -308,19 +317,65 @@ class Pipeline2D(IPipeline2D):
         """Overrides baseclass implementation."""
         self._data1 = self._context().trimData2D(self._originalData1)
         self._data2 = self._context().trimData2D(self._originalData2)
-        self._min = self._data1.min()
-        self._max = self._data1.max()
 
-    def _updateVTKDataSet(self):
-        """Overrides baseclass implementation."""
+    def _updateVTKDataSet(self, plotBasedDualGrid):
+        """
+        """
+        if (plotBasedDualGrid):
+            hasCellData = self._data1.hasCellData()
+            dualGrid = (hasCellData != self._needsCellData)
+        else:
+            dualGrid = False
         genGridDict = vcs2vtk.genGrid(self._data1, self._data2, self._gm,
                                       deep=False,
                                       grid=self._vtkDataSet,
-                                      geo=self._vtkGeoTransform)
-
+                                      geo=self._vtkGeoTransform, genVectors=self._needsVectors,
+                                      dualGrid=dualGrid)
         self._data1 = genGridDict["data"]
+        self._data2 = genGridDict["data2"]
         self._updateFromGenGridDict(genGridDict)
 
+    def _createPolyDataFilter(self):
+        """This is only used when we use the grid stored in the file for all plots."""
+        self._vtkPolyDataFilter = vtk.vtkDataSetSurfaceFilter()
+        if self._hasCellData == self._needsCellData:
+            self._vtkPolyDataFilter.SetInputData(self._vtkDataSet)
+        elif self._hasCellData:
+            # use cells but needs points
+            c2p = vtk.vtkCellDataToPointData()
+            c2p.PassCellDataOn()
+            c2p.SetInputData(self._vtkDataSet)
+            self._vtkPolyDataFilter.SetInputConnection(c2p.GetOutputPort())
+        else:
+            # use points but needs cells
+            p2c = vtk.vtkPointDataToCellData()
+            p2c.SetInputData(self._vtkDataSet)
+            # For contouring duplicate points seem to confuse it
+            self._vtkPolyDataFilter.SetInputConnection(p2c.GetOutputPort())
+        self._vtkPolyDataFilter.Update()
+        self._resultDict["vtk_backend_filter"] = self._vtkPolyDataFilter
+        # create an actor and a renderer for the surface mesh.
+        # this is used for displaying point information using the hardware selection
+        mapper = vtk.vtkPolyDataMapper()
+        mapper.SetInputConnection(self._vtkPolyDataFilter.GetOutputPort())
+        act = vtk.vtkActor()
+        act.SetMapper(mapper)
+        vp = self._resultDict.get(
+            'ratio_autot_viewport',
+            [self._template.data.x1, self._template.data.x2,
+             self._template.data.y1, self._template.data.y2])
+        plotting_dataset_bounds = self.getPlottingBounds()
+        surface_renderer, xScale, yScale = self._context().fitToViewport(
+            act, vp,
+            wc=plotting_dataset_bounds, geoBounds=self._vtkDataSet.GetBounds(),
+            geo=self._vtkGeoTransform,
+            priority=self._template.data.priority,
+            create_renderer=True)
+        self._resultDict['surface_renderer'] = surface_renderer
+        self._resultDict['surface_scale'] = (xScale, yScale)
+        if (surface_renderer):
+            surface_renderer.SetDraw(False)
+
     def _updateFromGenGridDict(self, genGridDict):
         """Overrides baseclass implementation."""
         self._vtkDataSet = genGridDict['vtk_backend_grid']
@@ -329,7 +384,7 @@ class Pipeline2D(IPipeline2D):
         self._useContinents = genGridDict['continents']
         self._dataWrapModulo = genGridDict['wrap']
         self._vtkGeoTransform = genGridDict['geo']
-        self._useCellScalars = genGridDict['cellData']
+        self._hasCellData = genGridDict['cellData']
 
     def _createMaskedDataMapper(self):
         """Overrides baseclass implementation."""
@@ -338,11 +393,11 @@ class Pipeline2D(IPipeline2D):
         if color is not None:
             color = self.getColorIndexOrRGBA(_colorMap, color)
         self._maskedDataMapper = vcs2vtk.putMaskOnVTKGrid(
-            self._data1, self._vtkDataSet, color, self._useCellScalars,
+            self._data1, self._vtkDataSet, color, self._hasCellData,
             deep=False)
 
         self._resultDict["vtk_backend_missing_mapper"] = (
-            self._maskedDataMapper, color, self._useCellScalars)
+            self._maskedDataMapper, color, self._hasCellData)
 
     def getPlottingBounds(self):
         """gm.datawc if it is set or dataset_bounds if there is not geographic projection
diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
index b8c4eaead4e706c4c38146a67ebcb359deeab0c5..642884bc6c3170400a498d75e6926b4ff7cd2355 100644
--- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
+++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py
@@ -1,67 +1,49 @@
-from .pipeline import Pipeline
+from .pipeline2d import Pipeline2D
 
 import vcs
 from vcs import vcs2vtk
 import vtk
 
 
-class VectorPipeline(Pipeline):
+class VectorPipeline(Pipeline2D):
 
     """Implementation of the Pipeline interface for VCS vector plots."""
 
     def __init__(self, gm, context_):
         super(VectorPipeline, self).__init__(gm, context_)
+        self._needsCellData = False
+        self._needsVectors = True
 
-    def plot(self, data1, data2, tmpl, grid, transform, **kargs):
+    def _plotInternal(self):
         """Overrides baseclass implementation."""
         # Preserve time and z axis for plotting these inof in rendertemplate
-        geo = None  # to make flake8 happy
         projection = vcs.elements["projection"][self._gm.projection]
-        returned = {}
-        taxis = data1.getTime()
-        if data1.ndim > 2:
-            zaxis = data1.getAxis(-3)
+        taxis = self._originalData1.getTime()
+        scaleFactor = 1.0
+
+        if self._originalData1.ndim > 2:
+            zaxis = self._originalData1.getAxis(-3)
         else:
             zaxis = None
 
-        # Ok get3 only the last 2 dims
-        data1 = self._context().trimData2D(data1)
-        data2 = self._context().trimData2D(data2)
-
         scale = 1.0
         lat = None
         lon = None
 
-        latAccessor = data1.getLatitude()
-        lonAccesrsor = data1.getLongitude()
+        latAccessor = self._data1.getLatitude()
+        lonAccessor = self._data1.getLongitude()
         if latAccessor:
             lat = latAccessor[:]
-        if lonAccesrsor:
-            lon = lonAccesrsor[:]
-
-        gridGenDict = vcs2vtk.genGridOnPoints(data1, self._gm, deep=False, grid=grid,
-                                              geo=transform, data2=data2)
-
-        data1 = gridGenDict["data"]
-        data2 = gridGenDict["data2"]
-        geo = gridGenDict["geo"]
-
-        grid = gridGenDict['vtk_backend_grid']
-        xm = gridGenDict['xm']
-        xM = gridGenDict['xM']
-        ym = gridGenDict['ym']
-        yM = gridGenDict['yM']
-        continents = gridGenDict['continents']
-        self._dataWrapModulo = gridGenDict['wrap']
-        geo = gridGenDict['geo']
-
-        if geo is not None:
+        if lonAccessor:
+            lon = lonAccessor[:]
+
+        if self._vtkGeoTransform is not None:
             newv = vtk.vtkDoubleArray()
             newv.SetNumberOfComponents(3)
             newv.InsertTupleValue(0, [lon.min(), lat.min(), 0])
             newv.InsertTupleValue(1, [lon.max(), lat.max(), 0])
 
-            vcs2vtk.projectArray(newv, projection, [xm, xM, ym, yM])
+            vcs2vtk.projectArray(newv, projection, self._vtkDataSetBounds)
             dimMin = [0, 0, 0]
             dimMax = [0, 0, 0]
 
@@ -81,19 +63,6 @@ class VectorPipeline(Pipeline):
         else:
             scale = 1.0
 
-        returned["vtk_backend_grid"] = grid
-        returned["vtk_backend_geo"] = geo
-        missingMapper = vcs2vtk.putMaskOnVTKGrid(data1, grid, None, False,
-                                                 deep=False)
-
-        # None/False are for color and cellData
-        # (sent to vcs2vtk.putMaskOnVTKGrid)
-        returned["vtk_backend_missing_mapper"] = (missingMapper, None, False)
-
-        w = vcs2vtk.generateVectorArray(data1, data2, grid)
-
-        grid.GetPointData().AddArray(w)
-
         # Vector attempt
         l = self._gm.line
         if l is None:
@@ -117,24 +86,80 @@ class VectorPipeline(Pipeline):
         arrow.SetOutputPointsPrecision(vtk.vtkAlgorithm.DOUBLE_PRECISION)
         arrow.FilledOff()
 
+        polydata = self._vtkPolyDataFilter.GetOutput()
+        vectors = polydata.GetPointData().GetVectors()
+
+        if self._gm.scaletype == 'constant' or\
+           self._gm.scaletype == 'constantNNormalize' or\
+           self._gm.scaletype == 'constantNLinear':
+            scaleFactor = scale * 2.0 * self._gm.scale
+        else:
+            scaleFactor = 1.0
+
         glyphFilter = vtk.vtkGlyph2D()
-        glyphFilter.SetInputData(grid)
-        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vectors")
+        glyphFilter.SetInputData(polydata)
+        glyphFilter.SetInputArrayToProcess(1, 0, 0, 0, "vector")
         glyphFilter.SetSourceConnection(arrow.GetOutputPort())
         glyphFilter.SetVectorModeToUseVector()
 
         # Rotate arrows to match vector data:
         glyphFilter.OrientOn()
+        glyphFilter.ScalingOn()
 
-        # Scale to vector magnitude:
         glyphFilter.SetScaleModeToScaleByVector()
-        glyphFilter.SetScaleFactor(scale * 2.0 * self._gm.scale)
 
-        # These are some unfortunately named methods. It does *not* clamp the
-        # scale range to [min, max], but rather remaps the range
-        # [min, max] --> [0, 1].
-        glyphFilter.ClampingOn()
-        glyphFilter.SetRange(0.01, 1.0)
+        if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'linear' or\
+           self._gm.scaletype == 'constantNNormalize' or self._gm.scaletype == 'constantNLinear':
+
+            # Find the min and max vector magnitudes
+            maxNorm = vectors.GetMaxNorm()
+
+            if maxNorm == 0:
+                maxNorm = 1.0
+
+            if self._gm.scaletype == 'normalize' or self._gm.scaletype == 'constantNNormalize':
+                scaleFactor /= maxNorm
+
+            if self._gm.scaletype == 'linear' or self._gm.scaletype == 'constantNLinear':
+                minNorm = None
+                maxNorm = None
+
+                noOfComponents = vectors.GetNumberOfComponents()
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
+
+                    if (minNorm is None or norm < minNorm):
+                        minNorm = norm
+                    if (maxNorm is None or norm > maxNorm):
+                        maxNorm = norm
+
+                if maxNorm == 0:
+                    maxNorm = 1.0
+
+                scalarArray = vtk.vtkDoubleArray()
+                scalarArray.SetNumberOfComponents(1)
+                scalarArray.SetNumberOfValues(vectors.GetNumberOfTuples())
+
+                oldRange = maxNorm - minNorm
+                oldRange = 1.0 if oldRange == 0.0 else oldRange
+
+                # New range min, max.
+                newRangeValues = self._gm.scalerange
+                newRange = newRangeValues[1] - newRangeValues[0]
+
+                for i in range(0, vectors.GetNumberOfTuples()):
+                    norm = vtk.vtkMath.Norm(vectors.GetTuple(i), noOfComponents)
+                    newValue = (((norm - minNorm) * newRange) / oldRange) + newRangeValues[0]
+                    scalarArray.SetValue(i, newValue)
+                    polydata.GetPointData().SetScalars(scalarArray)
+
+                # Scale to vector magnitude:
+                # NOTE: Currently we compute our own scaling factor since VTK does
+                # it by clamping the values > max to max  and values < min to min
+                # and not remap the range.
+                glyphFilter.SetScaleModeToScaleByScalar()
+
+        glyphFilter.SetScaleFactor(scaleFactor)
 
         mapper = vtk.vtkPolyDataMapper()
 
@@ -152,22 +177,20 @@ class VectorPipeline(Pipeline):
 
         plotting_dataset_bounds = vcs2vtk.getPlottingBounds(
             vcs.utils.getworldcoordinates(self._gm,
-                                          data1.getAxis(-1),
-                                          data1.getAxis(-2)),
-            [xm, xM, ym, yM], geo)
+                                          self._data1.getAxis(-1),
+                                          self._data1.getAxis(-2)),
+            self._vtkDataSetBounds, self._vtkGeoTransform)
         x1, x2, y1, y2 = plotting_dataset_bounds
-        if geo is None:
+        if self._vtkGeoTransform is None:
             wc = plotting_dataset_bounds
         else:
             xrange = list(act.GetXRange())
             yrange = list(act.GetYRange())
             wc = [xrange[0], xrange[1], yrange[0], yrange[1]]
 
-        if (transform and kargs.get('ratio', '0') == 'autot'):
-            returned['ratio_autot_viewport'] = self._processRatioAutot(tmpl, grid)
-
-        vp = returned.get('ratio_autot_viewport',
-                          [tmpl.data.x1, tmpl.data.x2, tmpl.data.y1, tmpl.data.y2])
+        vp = self._resultDict.get('ratio_autot_viewport',
+                                  [self._template.data.x1, self._template.data.x2,
+                                   self._template.data.y1, self._template.data.y2])
         # look for previous dataset_bounds different than ours and
         # modify the viewport so that the datasets are alligned
         # Hack to fix the case when the user does not specify gm.datawc_...
@@ -189,31 +212,29 @@ class VectorPipeline(Pipeline):
         dataset_renderer, xScale, yScale = self._context().fitToViewport(
             act, vp,
             wc=wc,
-            priority=tmpl.data.priority,
+            priority=self._template.data.priority,
             create_renderer=True)
-        returned['dataset_renderer'] = dataset_renderer
-        returned['dataset_scale'] = (xScale, yScale)
-        bounds = [min(xm, xM), max(xm, xM), min(ym, yM), max(ym, yM)]
-        kwargs = {'vtk_backend_grid': grid,
-                  'dataset_bounds': bounds,
+        kwargs = {'vtk_backend_grid': self._vtkDataSet,
+                  'dataset_bounds': self._vtkDataSetBounds,
                   'plotting_dataset_bounds': plotting_dataset_bounds}
-        if ('ratio_autot_viewport' in returned):
+        if ('ratio_autot_viewport' in self._resultDict):
             kwargs["ratio_autot_viewport"] = vp
-        returned.update(self._context().renderTemplate(
-            tmpl, data1,
+        self._resultDict.update(self._context().renderTemplate(
+            self._template, self._data1,
             self._gm, taxis, zaxis, **kwargs))
 
         if self._context().canvas._continents is None:
-            continents = False
-        if continents:
+            self._useContinents = False
+        if self._useContinents:
             continents_renderer, xScale, yScale = self._context().plotContinents(
                 plotting_dataset_bounds, projection,
-                self._dataWrapModulo, vp, tmpl.data.priority,
-                vtk_backend_grid=grid,
-                dataset_bounds=bounds)
-            returned["continents_renderer"] = continents_renderer
-        returned["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
-        returned["vtk_backend_glyphfilters"] = [glyphFilter]
-        returned["vtk_backend_luts"] = [[None, None]]
-
-        return returned
+                self._dataWrapModulo, vp, self._template.data.priority,
+                vtk_backend_grid=self._vtkDataSet,
+                dataset_bounds=self._vtkDataSetBounds)
+        self._resultDict["vtk_backend_actors"] = [[act, plotting_dataset_bounds]]
+        self._resultDict["vtk_backend_glyphfilters"] = [glyphFilter]
+        self._resultDict["vtk_backend_luts"] = [[None, None]]
+
+    def _updateContourLevelsAndColors(self):
+        """Overrides baseclass implementation."""
+        pass
diff --git a/Packages/vcs/vcs/vector.py b/Packages/vcs/vcs/vector.py
index acea94c047c2ecc223dbe8b2df17f8c13c09cc54..2c49bfd30d2bda86271c9bf267e9a26176605f58 100755
--- a/Packages/vcs/vcs/vector.py
+++ b/Packages/vcs/vcs/vector.py
@@ -131,7 +131,7 @@ def process_src(nm, code):
 class Gv(object):
 
     """
- Class:	Gv				# Vector
+ Class: Gv              # Vector
 
  Description of Gv Class:
     The vector graphics method displays a vector plot of a 2D vector field. Vectors
@@ -145,76 +145,76 @@ class Gv(object):
     entry.
 
  Other Useful Functions:
-         a=vcs.init()			# Constructor
-         a.show('vector')		# Show predefined vector graphics methods
-         a.show('line')			# Show predefined VCS line objects
-         a.setcolormap("AMIP")		# Change the VCS color Map
-         a.vector(s1, s2, v,'default')	# Plot data 's1', and 's2' with vector 'v'
+         a=vcs.init()           # Constructor
+         a.show('vector')       # Show predefined vector graphics methods
+         a.show('line')         # Show predefined VCS line objects
+         a.setcolormap("AMIP")      # Change the VCS color Map
+         a.vector(s1, s2, v,'default')  # Plot data 's1', and 's2' with vector 'v'
                                          and 'default' template
-         a.update()		 	# Updates the VCS Canvas at user's request
-         a.mode=1, or 0 	 	# If 1, then automatic update, else if
+         a.update()         # Updates the VCS Canvas at user's request
+         a.mode=1, or 0         # If 1, then automatic update, else if
                                           0, then use update function to
                                           update the VCS Canvas.
 
  Example of Use:
     a=vcs.init()
     To Create a new instance of vector use:
-     vc=a.createvector('new','quick')	# Copies content of 'quick' to 'new'
-     vc=a.createvector('new') 		# Copies content of 'default' to 'new'
+     vc=a.createvector('new','quick')   # Copies content of 'quick' to 'new'
+     vc=a.createvector('new')       # Copies content of 'default' to 'new'
 
     To Modify an existing vector use:
      vc=a.getvector('AMIP_psl')
 
-    vc.list()  				# Will list all the vector attribute values
-    vc.projection='linear'   		# Can only be 'linear'
+    vc.list()               # Will list all the vector attribute values
+    vc.projection='linear'          # Can only be 'linear'
     lon30={-180:'180W',-150:'150W',0:'Eq'}
     vc.xticlabels1=lon30
     vc.xticlabels2=lon30
-    vc.xticlabels(lon30, lon30)  	# Will set them both
+    vc.xticlabels(lon30, lon30)     # Will set them both
     vc.xmtics1=''
     vc.xmtics2=''
-    vc.xmtics(lon30, lon30)  		# Will set them both
+    vc.xmtics(lon30, lon30)         # Will set them both
     vc.yticlabels1=lat10
     vc.yticlabels2=lat10
-    vc.yticlabels(lat10, lat10)  	# Will set them both
+    vc.yticlabels(lat10, lat10)     # Will set them both
     vc.ymtics1=''
     vc.ymtics2=''
-    vc.ymtics(lat10, lat10)  		# Will set them both
+    vc.ymtics(lat10, lat10)         # Will set them both
     vc.datawc_y1=-90.0
     vc.datawc_y2=90.0
     vc.datawc_x1=-180.0
     vc.datawc_x2=180.0
-    vc.datawc(-90, 90, -180, 180)  	# Will set them all
+    vc.datawc(-90, 90, -180, 180)   # Will set them all
     xaxisconvert='linear'
     yaxisconvert='linear'
-    vc.xyscale('linear', 'area_wt')  	# Will set them both
+    vc.xyscale('linear', 'area_wt')     # Will set them both
 
     Specify the line style:
-     vc.line=0 				# Same as vc.line='solid'
-     vc.line=1 				# Same as vc.line='dash'
-     vc.line=2 				# Same as vc.line='dot'
-     vc.line=3 				# Same as vc.line='dash-dot'
-     vc.line=4 				# Same as vc.line='long-dot'
+     vc.line=0              # Same as vc.line='solid'
+     vc.line=1              # Same as vc.line='dash'
+     vc.line=2              # Same as vc.line='dot'
+     vc.line=3              # Same as vc.line='dash-dot'
+     vc.line=4              # Same as vc.line='long-dot'
 
     Specify the line color of the vectors:
-     vc.linecolor=16   			# Color range: 16 to 230, default line color is black
-     vc.linewidth=1   			# Width range: 1 to 100, default size is 1
+     vc.linecolor=16            # Color range: 16 to 230, default line color is black
+     vc.linewidth=1             # Width range: 1 to 100, default size is 1
 
     Specify the vector scale factor:
-     vc.scale=2.0   			# Can be an integer or float
+     vc.scale=2.0               # Can be an integer or float
 
     Specify the vector alignment:
-     vc.alignment=0			# Same as vc.alignment='head'
-     vc.alignment=1			# Same as vc.alignment='center'
-     vc.alignment=2			# Same as vc.alignment='tail'
+     vc.alignment=0         # Same as vc.alignment='head'
+     vc.alignment=1         # Same as vc.alignment='center'
+     vc.alignment=2         # Same as vc.alignment='tail'
 
     Specify the vector type:
-      vc.type=0   			# Same as vc.type='arrow head'
-      vc.type=1   			# Same as vc.type='wind barbs'
-      vc.type=2   			# Same as vc.type='solid arrow head'
+      vc.type=0             # Same as vc.type='arrow head'
+      vc.type=1             # Same as vc.type='wind barbs'
+      vc.type=2             # Same as vc.type='solid arrow head'
 
     Specify the vector reference:
-      vc.reference=4    		# Can be an integer or float
+      vc.reference=4            # Can be an integer or float
 """
     __slots__ = [
         'name',
@@ -244,6 +244,9 @@ class Gv(object):
         'type',
         'reference',
         'colormap',
+        'scaleoptions',
+        'scaletype',
+        'scalerange',
         '_name',
         '_xaxisconvert',
         '_yaxisconvert',
@@ -270,9 +273,13 @@ class Gv(object):
         '_type',
         '_reference',
         '_colormap',
+        '_scaleoptions',
+        '_scaletype',
+        '_scalerange',
     ]
 
     colormap = VCS_validation_functions.colormap
+    scaleoptions = ('off', 'constant', 'normalize', 'linear', 'constantNNormalize', 'constantNLinear')
 
     def _getname(self):
         return self._name
@@ -528,6 +535,30 @@ class Gv(object):
         self._alignment = value
     alignment = property(_getalignment, _setalignment)
 
+    def _getscaletype(self):
+        return self._scaletype
+
+    def _setscaletype(self, value):
+        value = VCS_validation_functions.checkInStringList(self,
+                                                           'scaletype',
+                                                           value,
+                                                           self.scaleoptions)
+        self._scaletype = value
+    scaletype = property(_getscaletype, _setscaletype)
+
+    def _getscalerange(self):
+        return self._scalerange
+
+    def _setscalerange(self, value):
+        value = VCS_validation_functions.checkListOfNumbers(self,
+                                                            'scalerange',
+                                                            value,
+                                                            minvalue=0.0,
+                                                            minelements=2,
+                                                            maxelements=2)
+        self._scalerange = value
+    scalerange = property(_getscalerange, _setscalerange)
+
     def __init__(self, Gv_name, Gv_name_src='default'):
                 #                                                         #
                 ###########################################################
@@ -568,6 +599,8 @@ class Gv(object):
             self._datawc_timeunits = "days since 2000"
             self._datawc_calendar = 135441
             self._colormap = None
+            self._scaletype = self.scaleoptions[4]
+            self._scalerange = [0.1, 1.0]
         else:
             if isinstance(Gv_name_src, Gv):
                 Gv_name_src = Gv_name_src.name
@@ -583,7 +616,9 @@ class Gv(object):
                         'datawc_x2', 'xaxisconvert', 'yaxisconvert',
                         'line', 'linecolor', 'linewidth',
                         'datawc_timeunits', 'datawc_calendar', 'colormap',
-                        'scale', 'alignment', 'type', 'reference']:
+                        'scale', 'alignment', 'type', 'reference', 'scaletype',
+                        'scalerange']:
+
                 setattr(self, att, getattr(src, att))
         # Ok now we need to stick in the elements
         vcs.elements["vector"][Gv_name] = self
@@ -660,6 +695,8 @@ class Gv(object):
         print "alignment = ", self.alignment
         print "type = ", self.type
         print "reference = ", self.reference
+        print "scaletype = ", self.scaletype
+        print "scalerange = ", self.scalerange
 
     ##########################################################################
     #                                                                           #
@@ -798,6 +835,9 @@ class Gv(object):
             fp.write("%s.linecolor = %s\n" % (unique_name, self.linecolor))
             fp.write("%s.linewidth = %s\n" % (unique_name, self.linewidth))
             fp.write("%s.scale = %s\n" % (unique_name, self.scale))
+            fp.write("%s.scaletype = %s\n" % (unique_name, self.scaletype))
+            fp.write("%s.scalerange = %s\n" % (unique_name, self.scalerange))
+            fp.write("%s.scaleoptions = %s\n" % (unique_name, self.scaleoptions))
             fp.write("%s.alignment = '%s'\n" % (unique_name, self.alignment))
             fp.write("%s.type = '%s'\n" % (unique_name, self.type))
             fp.write("%s.reference = %s\n\n" % (unique_name, self.reference))
@@ -814,5 +854,5 @@ class Gv(object):
 
 
 ###############################################################################
-#        END OF FILE							      #
+#        END OF FILE                                  #
 ###############################################################################
diff --git a/README.md b/README.md
index f79ed08fbec182bbdbd28897107afe77d9c2dec1..5968858fef233c8927baf12e51ffdf946665ba1d 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,12 @@
 uvcdat
 ======
 [![build status](https://travis-ci.org/UV-CDAT/uvcdat.svg?branch=master)](https://travis-ci.org/UV-CDAT/uvcdat/builds)
-[![stable version](http://img.shields.io/badge/stable version-2.4.0-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.0)
+[![stable version](http://img.shields.io/badge/stable version-2.4.1-brightgreen.svg)](https://github.com/UV-CDAT/uvcdat/releases/tag/2.4.1)
 ![platforms](http://img.shields.io/badge/platforms-linux | osx-lightgrey.svg)
-[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.45136.svg)](http://dx.doi.org/10.5281/zenodo.45136)
+[![DOI](https://zenodo.org/badge/doi/10.5281/zenodo.50101.svg)](http://dx.doi.org/10.5281/zenodo.50101)
+
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/installer/conda.svg)](https://conda.anaconda.org/uvcdat)
+[![Anaconda-Server Badge](https://anaconda.org/uvcdat/uvcdat/badges/downloads.svg)](https://anaconda.org/uvcdat/uvcdat)
 
 Developed by partnering with ESGF and the community to create a larger problem-solving environment, UV-CDAT is an open source, easy-to-use application that links together disparate software subsystems and packages to form an integrated environment for analysis and visualization. This project seeks to advance climate science by fulfilling computational and diagnostic/visualization capabilities needed for DOE's climate research.
 
@@ -13,4 +16,4 @@ Developing and Contributing
 ------
 We'd love to get contributions from you! Please take a look at the
 [Contribution Documents](CONTRIBUTING.md) to see how to get your changes merged
-in.
\ No newline at end of file
+in.
diff --git a/installation/control.py b/installation/control.py
index 7a3a520919de91bf4f33afcfaeeafc668ab153af..49ed5d9af6e3957ee7d7a99d02142e76623cdd4e 100644
--- a/installation/control.py
+++ b/installation/control.py
@@ -1,7 +1,7 @@
 # This file is used to control the behavior of install.py.
 
 # The search path is used if the X11 directories aren't configured.
-x11search = ['/usr/X11R6', '/usr/X11R6.5.1', 
+x11search = ['/usr/X11R6', '/usr/X11R6.5.1',
              '/usr/X11R6.4','/usr','/usr/openwin','/opt']
 # Here is where they are on OSF1 and perhaps similar systems
 x11OSF1lib = ['/usr/lib/X11', '/usr/lib']
@@ -48,14 +48,14 @@ else:
     make_code = 'make'
 
 # List of packages to be built
-packages = [ 
+packages = [
     "Packages/pydebug",
     "Packages/cdtime",
     "Packages/demo",
     "Packages/help",
     "Packages/regrid2",
-    "Packages/cdms2", 
-    "Packages/esg", 
+    "Packages/cdms2",
+    "Packages/esg",
     "Packages/ncml",
     "Packages/DV3D",
     "Packages/vcs",
@@ -63,9 +63,10 @@ packages = [
     "Packages/cdutil",
     "Packages/unidata",
     "Packages/xmgrace",
-    "Packages/genutil", 
+    "Packages/genutil",
     "Packages/Thermo",
     "Packages/WK",
     "Packages/gui_support",
     "Packages/distarray",
+    "Packages/testing",
     ]
diff --git a/testing/dv3d/TestManager.py b/testing/dv3d/TestManager.py
index 51ed57183a7e28edd7c9265a39b9979e9c5e7a04..94e7e365edf951b4391c26c33882869baff2854e 100644
--- a/testing/dv3d/TestManager.py
+++ b/testing/dv3d/TestManager.py
@@ -10,7 +10,7 @@ import vcs, os, sys, shutil, collections, subprocess
 TestingDir=os.path.dirname(__file__)
 pth = os.path.join(TestingDir,"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 DefaultSampleFile = "geos5-sample.nc"
 DefaultSampleVar = "uwnd"
@@ -128,8 +128,8 @@ class vcsTest:
         test_image = '.'.join( [ self.name, 'test', 'png' ] )
         self.canvas.png( test_image, width = 900, height = 600 )
 
-        ret = checkimage.check_result_image( test_image, self.image_name,\
-                checkimage.defaultThreshold+3. )
+        ret = regression.check_result_image( test_image, self.image_name,\
+                regression.defaultThreshold+3. )
 
         if  interactive:
             print "Type <Enter> to continue and update ref image ( type 'n' to skip update )."
diff --git a/testing/metrics/diags_test.py b/testing/metrics/diags_test.py
index ff71c8d66a87bae57008225b80066ad2b60660bc..2d4131cb80972fdaaa09492cfd859f9c3c55c771 100755
--- a/testing/metrics/diags_test.py
+++ b/testing/metrics/diags_test.py
@@ -12,7 +12,7 @@ import sys, os, shutil, tempfile, subprocess
 import cdms2, numpy
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 import argparse, pdb
 
 class DiagTest(object):
@@ -95,7 +95,7 @@ class DiagTest(object):
     def execute(self, test_str, imagefilename, imagethreshold, ncfiles, rtol, atol):
         print test_str
         if imagethreshold is None:  # user didn't specify a value
-     	    imagethreshold = checkimage.defaultThreshold
+	    imagethreshold = regression.defaultThreshold
         # Silence annoying messages about how to set the NetCDF file type.  Anything will do.
         cdms2.setNetcdfShuffleFlag(0)
         cdms2.setNetcdfDeflateFlag(0)
@@ -118,7 +118,7 @@ class DiagTest(object):
             imagebaselinefname = os.path.join( self.baselinepath, imagefilename )
             #pdb.set_trace()
             print "OK THRESHOLD IS:",imagethreshold
-            graphics_result = checkimage.check_result_image( imagefname, imagebaselinefname, imagethreshold )
+            graphics_result = regression.check_result_image( imagefname, imagebaselinefname, imagethreshold )
             print "Graphics file", imagefname, "match difference:", graphics_result
             
             #initialize to successful graphics check
diff --git a/testing/metrics/diagtest01.py b/testing/metrics/diagtest01.py
index b8cf415f0a89644a5d34ff0df2b89251f1efc36c..09197a1e3a9e370e23620fea8fb8129bf370af89 100755
--- a/testing/metrics/diagtest01.py
+++ b/testing/metrics/diagtest01.py
@@ -58,7 +58,7 @@ varid = 'T'
 vard = package.all_variables( filetable1, filetable2, sname )
 var = vard[varid]
 
-plot = sclass( [filetable1], [filetable2], varid, seasonid )
+plot = sclass( [filetable1], [filetable2], varid, seasonid, plotparms = { 'model':{}, 'obs':{}, 'diff':{} } )
 res = plot.compute()
 if res is not None:
     if res.__class__.__name__ is 'uvc_composite_plotspec':
diff --git a/testing/regrid/testEsmfRegridPeriodictyRegional.py b/testing/regrid/testEsmfRegridPeriodictyRegional.py
index 2b92795255d6675dd6296b18d036c882935eb798..53a1a706b1620c4df58c68b4db26cf403001333f 100644
--- a/testing/regrid/testEsmfRegridPeriodictyRegional.py
+++ b/testing/regrid/testEsmfRegridPeriodictyRegional.py
@@ -1,9 +1,4 @@
-import vcs,cdms2
-import os,sys
-import EzTemplate
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, EzTemplate, testing.regression as regression
 
 data = sys.argv[1]
 png = sys.argv[2]
@@ -38,11 +33,7 @@ s_esmf_lin.id = "ESMF Linear"
 s_esmf_con = s.regrid(grid_dest,regridTool="esmf",regridMethod="conservative")
 s_esmf_lin.id = "ESMF Conservative"
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x=regression.init()
 t=x.createtemplate()
 t.blank()
 t.data.priority=1
@@ -60,7 +51,5 @@ x.plot(s,M.get(),gm,bg=1)
 x.plot(s_regrid2,M.get(),gm,bg=1)
 x.plot(s_esmf_lin,M.get(),gm,bg=1)
 x.plot(s_esmf_con,M.get(),gm,bg=1)
-x.png("esmf_issue_1125")
 
-ret = checkimage.check_result_image("esmf_issue_1125.png",png,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, "esmf_issue_1125.png", png)
diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt
index a5faf6e242845f3ea9e8bb6e105668cb877358ea..bee8b9a45fa74b327c8faf7225b9d337b8aa04c6 100644
--- a/testing/vcs/CMakeLists.txt
+++ b/testing/vcs/CMakeLists.txt
@@ -381,6 +381,10 @@ cdat_add_test(test_vcs_missing_colorname
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_missing_colorname.py
   "${BASELINE_DIR}/test_vcs_missing_colorname.png"
   )
+cdat_add_test(test_vcs_geometry
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py
+  )
 ##############################################################################
 #
 # These tests perform plotting and need sample data
@@ -900,6 +904,16 @@ cdat_add_test(test_vcs_settings_color_name_rgba
       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py
       "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png"
       )
+    cdat_add_test(test_vcs_vectors_scale_options
+      "${PYTHON_EXECUTABLE}"
+      ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png"
+      "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png"
+      )
   endif()
 endif()
 
@@ -935,6 +949,11 @@ cdat_add_test(test_vcs_configurator_resize
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_configurator_resize.py
   ${BASELINE_DIR}/test_vcs_configurator_resize.png
 )
+cdat_add_test(vcs_test_fewer_colors_than_levels
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_fewer_colors_than_levels.py
+  ${BASELINE_DIR}/test_fewer_colors_than_levels.png
+)
 
 cdat_add_test(test_vcs_colorpicker_appearance
   "${PYTHON_EXECUTABLE}"
@@ -942,33 +961,22 @@ cdat_add_test(test_vcs_colorpicker_appearance
   ${BASELINE_DIR}/test_vcs_colorpicker_appearance.png
 )
 
-cdat_add_test(test_vcs_click_info
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info.png
-  a_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_mollweide_boxfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_mollweide_boxfill.png
-  a_mollweide_boxfill
-)
-
-cdat_add_test(test_vcs_click_info_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_meshfill.png
-  a_meshfill
-)
 
-cdat_add_test(test_vcs_click_info_robinson_meshfill
-  "${PYTHON_EXECUTABLE}"
-  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
-  ${BASELINE_DIR}/test_vcs_click_info_robinson_meshfill.png
-  a_robinson_meshfill
-)
+foreach(plot a_boxfill a_mollweide_boxfill a_meshfill a_robinson_meshfill
+        a_isofill a_isoline vector_default)
+  string(SUBSTRING ${plot} 0 2 plot_prefix)
+  if (${plot_prefix} STREQUAL "a_")
+    string(SUBSTRING ${plot} 2 -1 plot_name)
+  else ()
+    string(REGEX MATCH "[^_]+" plot_name ${plot})
+  endif ()
+  cdat_add_test(test_vcs_click_info_${plot_name}
+    "${PYTHON_EXECUTABLE}"
+    ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_click_info.py
+    "${BASELINE_DIR}/test_vcs_click_info_${plot_name}.png"
+    ${plot}
+    )
+endforeach()
 
 
 cdat_add_test(test_vcs_mercator_edge
@@ -989,40 +997,54 @@ cdat_add_test(test_vcs_large_pattern_hatch
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_large_pattern_hatch.py
   ${BASELINE_DIR}/test_vcs_large_pattern_hatch.png
 )
-# crashes on mac commenting out for release`
-#foreach(x_over_y 0.5 2)
-#  # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
-#  foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
-#    foreach(mode foreground background)
-#      string(SUBSTRING ${plot} 2 -1 plot_name)
-#      cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
-#        "${PYTHON_EXECUTABLE}"
-#        ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
-#        "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
-#        ${mode}
-#        ${plot}
-#        ${x_over_y}
-#        )
-#    endforeach()
-#  endforeach()
-#endforeach()
+
+foreach(x_over_y 0.5 2)
+ # a_meshfill does not work yet, as meshfills are wrapped which is not known to VCS
+ foreach(plot a_boxfill a_mollweide_boxfill a_robinson_meshfill a_lambert_isofill a_robinson_isoline)
+   foreach(mode foreground background)
+     string(SUBSTRING ${plot} 2 -1 plot_name)
+     cdat_add_test(test_vcs_autot_axis_titles_${mode}_${plot_name}_${x_over_y}
+       "${PYTHON_EXECUTABLE}"
+       ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_autot_axis_titles.py
+       "${BASELINE_DIR}/test_vcs_autot_axis_titles_${plot_name}_${x_over_y}.png"
+       ${mode}
+       ${plot}
+       ${x_over_y}
+       )
+   endforeach()
+ endforeach()
+endforeach()
+
 cdat_add_test(test_vcs_boxfill_lambert_crash
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lambert_crash.py
   "${BASELINE_DIR}/test_vcs_boxfill_lambert_crash.png"
   "${UVCDAT_GIT_TESTDATA_DIR}/data/NCEP_09_climo.nc"
 )
+
+cdat_add_test(test_vcs_line_patterns
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_line_patterns.py
+  "${BASELINE_DIR}/test_vcs_line_patterns.png"
+)
+
 cdat_add_test(test_vcs_init_open_sizing
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_init_open_sizing.py
 )
-# Rename baseline
+## Rename baseline
 cdat_add_test(test_vcs_matplotlib_colormap
   "${PYTHON_EXECUTABLE}"
   ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_matplotlib_colormap.py
   ${BASELINE_DIR}/test_vcs_matplotlib_colormap.png
 )
 
+cdat_add_test(test_vcs_no_continents
+  "${PYTHON_EXECUTABLE}"
+  ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_no_continents.py
+  ${BASELINE_DIR}/test_vcs_no_continents.png
+)
+
 
 add_subdirectory(vtk_ui)
 add_subdirectory(editors)
diff --git a/testing/vcs/test_fewer_colors_than_levels.py b/testing/vcs/test_fewer_colors_than_levels.py
new file mode 100644
index 0000000000000000000000000000000000000000..c500cf19e3b9aafcde84c2ad33c91ad3fd780391
--- /dev/null
+++ b/testing/vcs/test_fewer_colors_than_levels.py
@@ -0,0 +1,15 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = dataset("clt")
+
+canvas = regression.init()
+
+boxfill = canvas.createboxfill()
+
+boxfill.color_1 = 242
+boxfill.color_2 = 250
+
+canvas.plot(data, boxfill, bg=1)
+
+regression.run(canvas, "test_fewer_colors_than_levels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1D_datawc.py b/testing/vcs/test_vcs_1D_datawc.py
index 1595a6cd712a277f3cb4e233969cfad24975a324..8e671bb9338f5015f6417b4f6972dd9b98ea4b5b 100644
--- a/testing/vcs/test_vcs_1D_datawc.py
+++ b/testing/vcs/test_vcs_1D_datawc.py
@@ -4,15 +4,13 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import testing.regression as regression
+
+x = regression.init()
 yx =x.createyxvsx()
 
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,14 +28,9 @@ data = numpy.array(data,dtype=numpy.float)
 data = MV2.array(data)
 yx.datawc_x1 = 0
 yx.datawc_x2 = 80
-yx.datawc_y1 =-12 
-yx.datawc_y2 = 12 
+yx.datawc_y1 =-12
+yx.datawc_y2 = 12
 
 
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc.png", src)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1D_datawc_missing.py b/testing/vcs/test_vcs_1D_datawc_missing.py
index 07e6f560aa52cb33414af3a3156092634117c18c..d6caabc8e9c0320a9bca0519ce5771c2dfd67a30 100644
--- a/testing/vcs/test_vcs_1D_datawc_missing.py
+++ b/testing/vcs/test_vcs_1D_datawc_missing.py
@@ -1,19 +1,11 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-yx =x.createyxvsx()
+x = regression.init()
+yx = x.createyxvsx()
 
 data = """
--999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. 
+-999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999. -999.
 0.059503571833625334
 0.059503571833625334 0.05664014775641405 0.05193557222118004
 0.04777129850801233 0.0407139313814465 0.029382624830271705
@@ -42,16 +34,5 @@ data = """
 """.split()
 data = numpy.array(data,dtype=numpy.float)
 data = MV2.masked_less(data,-900)
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
 x.plot(data,yx,bg=1)
-fnm = "test_vcs_1D_datawc_missing.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_1D_datawc_missing.png")
diff --git a/testing/vcs/test_vcs_1D_with_manyDs.py b/testing/vcs/test_vcs_1D_with_manyDs.py
index 1caba446e1b601e781c9f4bf495c8c337f9741bc..28a6a7a8bc7f6427da02fdff5ef732af4affdfff 100644
--- a/testing/vcs/test_vcs_1D_with_manyDs.py
+++ b/testing/vcs/test_vcs_1D_with_manyDs.py
@@ -1,31 +1,9 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-d=numpy.reshape(d,(10,10))
-
-
+d = numpy.reshape(d,(10,10))
 one = x.create1d()
-
 x.plot(d,one,bg=1)
-
-
-fnm = "test_1D_with_manyDs.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1D_with_manyDs.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_in_boxfill.py b/testing/vcs/test_vcs_1d_in_boxfill.py
index 292fe98a3cc7ab7dbbc24986fc7c3a771eaed4e3..1da5743947f7ac7602a2bfdfd726016fbd52b5f7 100644
--- a/testing/vcs/test_vcs_1d_in_boxfill.py
+++ b/testing/vcs/test_vcs_1d_in_boxfill.py
@@ -1,29 +1,8 @@
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 d = numpy.sin(numpy.arange(100))
-
 b = x.createboxfill()
-
 x.plot(d,b,bg=1)
-
-
-fnm = "test_1d_in_boxfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
-
+regression.run(x, "test_1d_in_boxfill.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
index c350e4c5992b9f7e07450a8e128daac896707796..f850f977e2626779fa515806c5052ad279f48bf8 100644
--- a/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
+++ b/testing/vcs/test_vcs_1d_marker_not_shown_if_xaxis_flipped.py
@@ -1,42 +1,15 @@
-import vcs
-import numpy
-import MV2
-import cdms2
-import sys
-import os
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
+x = regression.init()
 data = MV2.array([4,5,6,7,1,3,7,9,])+230.
-
 p = cdms2.createAxis([2,5,100,200,500,800,850,1000])
-
 data.setAxis(0,p)
-
 data.id="jim"
-
 gm=x.create1d()
-
 gm.linewidth=0
 gm.datawc_x1=1000
 gm.datawc_x2=0
-
 gm.markersize=30
-
 x.plot(data,gm,bg=1)
-
-fnm = "test_1d_marker_not_shown_if_xaxis_flipped.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_1d_marker_not_shown_if_xaxis_flipped.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_1d_missing.py b/testing/vcs/test_vcs_1d_missing.py
index 3a6880bdd5e556f628a02dbe3da73e1180a101a0..8c124e09c617d26b7102fb05fe6ed498c971c749 100644
--- a/testing/vcs/test_vcs_1d_missing.py
+++ b/testing/vcs/test_vcs_1d_missing.py
@@ -3,15 +3,11 @@ import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 yx =x.createyxvsx()
-
-data = """-11.14902417  -9.17390922  -7.29515002  
+data = """-11.14902417  -9.17390922  -7.29515002
 -7.51774549  -8.63608171
   -10.4827395   -9.93859485  -7.3394366   -5.39241468  -5.74825567
      -6.74967902  -7.09622319  -5.93836983  -4.04592997  -2.65591499
@@ -30,17 +26,5 @@ data = MV2.array(data)
 
 data=MV2.masked_where(MV2.logical_and(data>-4,data<-2),data)
 
-#yx.datawc_x1 = 0
-#yx.datawc_x2 = 80
-##yx.datawc_y1 =-12 
-#yx.datawc_y2 = 12 
-
-
-x.plot(data,yx,bg=1)
-fnm = "test_vcs_1d_missing.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(data, yx, bg=1)
+regression.run(x, "test_vcs_1d_missing.png", src)
diff --git a/testing/vcs/test_vcs_animate_boxfill.py b/testing/vcs/test_vcs_animate_boxfill.py
index 2bbd53c267f71f2259b7a56f10024cc85e5c889c..641b59edb259be7e5183213a31910673e923e39c 100644
--- a/testing/vcs/test_vcs_animate_boxfill.py
+++ b/testing/vcs/test_vcs_animate_boxfill.py
@@ -1,20 +1,14 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-
 gm=x.createboxfill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +20,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isofill.py b/testing/vcs/test_vcs_animate_isofill.py
index ba5ea0e8d6c7faba449fe0c5ff19c35b419aba79..591d6636213fbed88d9058f1e5bacd981634e257 100644
--- a/testing/vcs/test_vcs_animate_isofill.py
+++ b/testing/vcs/test_vcs_animate_isofill.py
@@ -1,20 +1,13 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
+x = regression.init()
 gm=x.createisofill()
 x.plot(s,gm,bg=1)
 x.animate.create()
@@ -26,8 +19,8 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),
+         regression.defaultThreshold)
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline.py b/testing/vcs/test_vcs_animate_isoline.py
index 41e0c7318b7b84f7b094a8dd2d8d0b7339456734..cea333d3bb425d480f6395a2e45329c32295514a 100644
--- a/testing/vcs/test_vcs_animate_isoline.py
+++ b/testing/vcs/test_vcs_animate_isoline.py
@@ -1,33 +1,24 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-iso=x.createisoline()
-x.plot(s,iso,bg=1)
+x = regression.init()
+iso = x.createisoline()
+x.plot(s,iso, bg=1)
 x.animate.create()
 print "Saving now"
 prefix= os.path.split(__file__)[1][:-3]
 x.animate.save("%s.mp4"%prefix)
-pngs = x.animate.close(preserve_pngs = True) # so we can look at them again
+pngs = x.animate.close(preserve_pngs=True) # so we can look at them again
 src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_colored.py b/testing/vcs/test_vcs_animate_isoline_colored.py
index 4fed45c864e7b84daccaedb5a95568d597b89daf..4519ac16c021af59bc89f6dbdf016409df02aa41 100644
--- a/testing/vcs/test_vcs_animate_isoline_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_colored.py
@@ -1,19 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,12)) # read only 12 times steps to speed up things
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 iso=x.createisoline()
 levs = range(0,101,10)
@@ -34,8 +27,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels.py b/testing/vcs/test_vcs_animate_isoline_text_labels.py
index 7a3be1fc5e902cc8159884ec55732a1c0a8c2b6a..0e83c02c6a4e54f45a53f945603c977a4deabaad 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -27,8 +23,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
index cf009e4a8714e228eacf635c6c782226b015ab6a..02711e2196bf069a509d7f5da55f7c660421b3a2 100644
--- a/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
+++ b/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py
@@ -1,11 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import time
+import vcs, numpy, cdms2, MV2, os, sys, time, testing.regression as regression
+
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",slice(0,12)) # read only 12 times steps to speed up things
@@ -36,7 +32,7 @@ pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
   print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_animate_meshfill.py b/testing/vcs/test_vcs_animate_meshfill.py
index 78d6b1551635e0a75ab33e555ccb268108a8bff4..aa89a8639d123ac00461cb0500bbdfcebb2a5049 100644
--- a/testing/vcs/test_vcs_animate_meshfill.py
+++ b/testing/vcs/test_vcs_animate_meshfill.py
@@ -1,12 +1,7 @@
-import vcs
-import cdms2
-import os
-import sys
-import MV2
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
-import checkimage
 
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s=f("sample")
@@ -38,8 +33,7 @@ src_pth = sys.argv[1]
 pth = os.path.join(src_pth,prefix)
 ret = 0
 for p in pngs:
-  print "Checking:",p
-  ret += checkimage.check_result_image(p,os.path.join(pth,os.path.split(p)[1]),checkimage.defaultThreshold)
+  ret += regression.check_result_image(p,os.path.join(pth,os.path.split(p)[1]))
 if ret == 0:
     os.removedirs(os.path.split(p)[0])
     os.remove("%s.mp4" % prefix)
diff --git a/testing/vcs/test_vcs_antialiasing.py b/testing/vcs/test_vcs_antialiasing.py
index 34f43e040c458c8b8fa7c0707e28c7e167be07d0..f744d3e04f1e0c67a3f40aa27e7f738f01355a07 100644
--- a/testing/vcs/test_vcs_antialiasing.py
+++ b/testing/vcs/test_vcs_antialiasing.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_aspect_ratio.py b/testing/vcs/test_vcs_aspect_ratio.py
index 1e59304a378381c0620426e3d9fce5a0642ef0b4..9e3cb376760e2402787fc3bbff6040e3c55ed128 100644
--- a/testing/vcs/test_vcs_aspect_ratio.py
+++ b/testing/vcs/test_vcs_aspect_ratio.py
@@ -1,27 +1,20 @@
-
-import vcs
-import cdms2
-import sys
-import os
-import MV2
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+src = sys.argv[1]
 pth0 = os.path.dirname(_file__)
 pth = os.path.join(pth0,"..")
 sys.path.append(pth)
-import checkimage
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",time=slice(0,1),squeeze=1)
-gm=vcs.createisofill()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",time=slice(0,1),squeeze=1)
+gm = vcs.createisofill()
 
 def plot_a_ratio(s,gm,ratio):
     ret = 0
-    x=vcs.init()
-x.drawlogooff()
+    x = regression.init()
+    x.drawlogooff()
     x.open()
     x.geometry(400,800)
-    y=vcs.init()
+    y = regression.init()
     y.open()
     y.geometry(800,400)
     for X in [x,y]:
@@ -32,19 +25,13 @@ x.drawlogooff()
             orient = "port"
         fnm = "aspect_ratio_%s_%s.png" % (orient,ratio)
         X.png(fnm)
-        print "fnm:",fnm
         src = os.path.join(pth0,fnm)
-        print "src:",src
-        ret += checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+        ret += regression.check_result_image(fnm, src)
     return ret
 
-ret = 0 
+ret = 0
 for ratio in ["1t","2t",".5t","autot"]:
     ret  += plot_a_ratio(s,gm,ratio)
 
 
-sys.exit(ret)
-
-
-
-
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_auto_time_labels.py b/testing/vcs/test_vcs_auto_time_labels.py
index 2dfa661a9bc8105e8b9ec4f83a0a1f46105f192d..e5422b29cc3db5edfcc5ef3cbe7e4a051ce86dc4 100644
--- a/testing/vcs/test_vcs_auto_time_labels.py
+++ b/testing/vcs/test_vcs_auto_time_labels.py
@@ -1,18 +1,7 @@
-import vcs,cdms2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",longitude=slice(34,35),squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import vcs, cdms2, os, sys, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",longitude=slice(34,35),squeeze=1)
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_vcs_auto_time_labels.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_auto_time_labels.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_autot_axis_titles.py b/testing/vcs/test_vcs_autot_axis_titles.py
index ec485ccc0ac32598733cb75dfb627862bffda314..2728cbd6d5ad8f1cd1d5e360114a8244aa820c22 100644
--- a/testing/vcs/test_vcs_autot_axis_titles.py
+++ b/testing/vcs/test_vcs_autot_axis_titles.py
@@ -1,7 +1,4 @@
-import vcs
-import cdms2
-import os
-import sys
+import vcs, cdms2, os, sys, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_mollweide_boxfill': ('clt.nc', 'clt'),
@@ -11,27 +8,23 @@ testConfig = {'a_boxfill': ('clt.nc', 'clt'),
               'a_robinson_isoline': ('clt.nc', 'clt')}
 
 # Tests if ratio=autot works correctly for background and foreground plots
-src = sys.argv[1]
 bg = 1
 if (sys.argv[2] == 'foreground'):
     bg = 0
 plot = sys.argv[3]
 x_over_y = sys.argv[4]
 if (x_over_y == '0.5'):
-    xSize = 400
-    ySize = 800
+    xSize = 250
+    ySize = 500
 else:
     xSize = 800
     ySize = 400
 pth = os.path.join(os.path.dirname(__file__), "..")
 sys.path.append(pth)
 
-import checkimage
-
 f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
 s = f(testConfig[plot][1])
-
-x = vcs.init(bg=bg, geometry=(xSize, ySize))
+x = regression.init(bg=bg, geometry=(xSize, ySize))
 
 # graphics method
 if (plot.find('boxfill') != -1):
@@ -49,11 +42,5 @@ else:
 x.setantialiasing(0)
 x.drawlogooff()
 x.plot(s, gm, ratio="autot")
-name = "test_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
-x.png(name)
-
-print "name:", name
-print "src:", src
-
-ret = checkimage.check_result_image(name, src, checkimage.defaultThreshold)
-sys.exit(ret)
+name = "test_vcs_autot_axis_titles_" + plot[2:] + "_" + x_over_y + "_" + str(bg) + ".png"
+regression.run(x, name, sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_bad_time_units.py b/testing/vcs/test_vcs_bad_time_units.py
index b89c2f7a77eaea79208af0c20e221cfce8aa9cfa..0bdcd16c2cff8ae6a6260b461b930819a902ccca 100644
--- a/testing/vcs/test_vcs_bad_time_units.py
+++ b/testing/vcs/test_vcs_bad_time_units.py
@@ -1,8 +1,8 @@
-import cdms2,vcs
-import os,sys
+import cdms2, vcs
+import os, sys
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1))
 s.getTime().units="XXX-))rvv"
-x=vcs.init()
-x.plot(s,bg=1)
+x = vcs.init()
+x.plot(s, bg=1)
diff --git a/testing/vcs/test_vcs_basic_gms.py b/testing/vcs/test_vcs_basic_gms.py
index daa144d9dd3dbbc734b704be1bcbf664f14da406..2cffb86e645cc221da0c5dbc649cf6e223b85c16 100644
--- a/testing/vcs/test_vcs_basic_gms.py
+++ b/testing/vcs/test_vcs_basic_gms.py
@@ -1,6 +1,4 @@
-
-import sys,os
-import argparse
+import argparse, os, sys, cdms2, MV2, testing.regression as regression, vcs, vtk
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -25,20 +23,9 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -143,7 +130,7 @@ print "fnm:",fnm
 print "src:",src
 if args.show:
     raw_input("Press Enter")
-ret = checkimage.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png',src,20., cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_basic_text.py b/testing/vcs/test_vcs_basic_text.py
index d46bd4f48c15be83c90d872ccd0d1673c067105e..d69f37ce009d3c4488583cba7b151196977e8c2b 100644
--- a/testing/vcs/test_vcs_basic_text.py
+++ b/testing/vcs/test_vcs_basic_text.py
@@ -1,11 +1,6 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
+x = regression.init()
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 txt=x.createtext()
@@ -16,10 +11,4 @@ txt.halign = "center"
 txt.valign="base"
 txt.angle=45
 x.plot(txt,bg=1)
-fnm = "test_basic_text.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_basic_text.png", sys.argv[1])
diff --git a/testing/vcs/test_vcs_basic_vectors.py b/testing/vcs/test_vcs_basic_vectors.py
index 561f7f2d83d071fdaf456c0544fe9db61cfd1d10..37626e670fbe8cf81e09f0d8201f0ce29792a738 100644
--- a/testing/vcs/test_vcs_basic_vectors.py
+++ b/testing/vcs/test_vcs_basic_vectors.py
@@ -1,6 +1,5 @@
+import argparse, os, sys, numpy, cdms2, MV2, vcs, vtk
 
-import sys,os
-import argparse
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -18,21 +17,11 @@ args = p.parse_args(sys.argv[1:])
 
 if not args.show:
   src = args.src
-  pth = os.path.join(os.path.dirname(__file__),"..")
-  sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
+  import testing.regression as regression
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -95,7 +84,7 @@ else:
   x.png(fnm)
   print "fnm:",fnm
   print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png',src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
index c012c09ca7aefc9f26bf1571bf57d0f628b9ac23..7d81b898171f9490cb568dce8248a91726351c92 100644
--- a/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
+++ b/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py
@@ -1,30 +1,12 @@
-import vcs
-import cdms2
-import os
-import sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),squeeze=1)
-
-x=vcs.init()
-x.drawlogooff()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-gm=x.createboxfill()
-gm.boxfill_type="custom"
-gm.levels=[1.e20,1.e20]
-gm.ext_1="y"
-gm.ext_2="y"
-
-x.plot(s,gm,bg=1)
-fnm = "test_box_custom_as_def_vistrails_exts.png"
-src =sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+import os, sys, cdms2, vcs, testing.regression as regression
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
+gm = x.createboxfill()
+gm.boxfill_type = "custom"
+gm.levels = [1.e20,1.e20]
+gm.ext_1 = "y"
+gm.ext_2 = "y"
+x.plot(s, gm, bg=1)
+regression.run(x, "test_box_custom_as_def_vistrails_exts.png", sys.argv[1])
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
index b2cdf8319aa12ca23005bd1987f3a9a50f5e7f8c..2444cc8a824828490febba882f66cfbfe68e3356 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_masked_numpy.py
@@ -1,23 +1,9 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+s = numpy.ma.masked_greater(s,.5)
 
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-s=numpy.ma.masked_greater(s,.5)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_masked_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x = regression.init()
+x.plot(s, bg=1)
+regression.run(x, "test_vcs_boxfill_10x10_masked_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_10x10_numpy.py b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
index 07910519842a4a6dbe703d21865b3987a98f1d19..765917942d2638515a0e7884cd3a22200bea5457 100644
--- a/testing/vcs/test_vcs_boxfill_10x10_numpy.py
+++ b/testing/vcs/test_vcs_boxfill_10x10_numpy.py
@@ -1,22 +1,7 @@
+import vcs, numpy, os, sys, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.plot(s,bg=1)
-fnm= "test_vcs_boxfill_10x10_numpy.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+s = numpy.sin(numpy.arange(100))
+s = numpy.reshape(s,(10,10))
+x = regression.init()
+x.plot(s, bg=1)
+regression.run(x, "test_vcs_boxfill_10x10_numpy.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom.py b/testing/vcs/test_vcs_boxfill_custom.py
index 16685d9715b881f9c9066ec2c084eee89d89e235..5330a11037c4b4956cf3e9a3074a98251edbbe69 100644
--- a/testing/vcs/test_vcs_boxfill_custom.py
+++ b/testing/vcs/test_vcs_boxfill_custom.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -24,14 +18,5 @@ boxfill.boxfill_type = 'custom'
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
 # Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1.py b/testing/vcs/test_vcs_boxfill_custom_ext1.py
index dd16ab8376b60ac29220087479b1ba63cc95aa76..7a5e2005ec0707d2efe1f1d1434c7e6ff97447b5 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -28,14 +22,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
index 68b5a9a41dc74c696afbdae00193f3e3a7868df1..74407167163244afa4790eb3d62508e9333275a2 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -29,14 +23,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 canvas.plot(clt, boxfill, bg=1)
 
 # Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext1_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext1_ext2.png")
diff --git a/testing/vcs/test_vcs_boxfill_custom_ext2.py b/testing/vcs/test_vcs_boxfill_custom_ext2.py
index 959fc2c6c04307817d9f64a2bae284e0298966b0..d45950bdbbd3f0d3fe8c606fa77051779ea8fb65 100644
--- a/testing/vcs/test_vcs_boxfill_custom_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_custom_ext2.py
@@ -3,6 +3,8 @@ import os
 import sys
 import vcs
 
+import testing.regression as regression
+
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt")
@@ -10,10 +12,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -27,15 +26,4 @@ boxfill.fillareacolors=vcs.getcolors(boxfill.levels)
 
 canvas.plot(clt, boxfill, bg=1)
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_ext2.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
index 7363d22508420a31aa96728a8d3e4f18acb158ac..b84db2bb52f77af46460f6c446146ad2e988658d 100644
--- a/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
+++ b/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import cdms2, os, sys, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,10 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill=canvas.createboxfill()
@@ -25,16 +19,4 @@ boxfill.levels=levels
 boxfill.fillareacolors=vcs.getcolors(levels)
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_boxfill_custom_no_default_levels.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_boxfill_custom_no_default_levels.png")
diff --git a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
index 009b947a9e67b5536baf38d88b9346b3ab5e5387..6cbc0f0163099434d91848c18ad3283d5b941082 100755
--- a/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
+++ b/testing/vcs/test_vcs_boxfill_decreasing_latitude.py
@@ -1,23 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import cdutil
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, cdutil, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
 ice = f("variable_6")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+x = regression.init()
 
-#gm = x.createisofill()
-#gm.label = "y"
 gm = x.createboxfill()
 gm.boxfill_type = "custom"
 
@@ -44,12 +31,8 @@ tmpl.legend.y1 = .03
 tmpl.legend.y2 = .055
 tmpl.max.priority = 1
 
-#tmpl.crdate.priority=1
-#tmpl.crdate.x=.8
-#tmpl.crdate.y=.95
 txt = x.createtext()
 txt.height = 20
-#txt.color=242
 txt.valign = "half"
 txt.halign = "center"
 
@@ -68,9 +51,5 @@ gm.fillareacolors = cols
 gm.datawc_y2 = 30
 gm.datawc_y1 = 90
 
-x.plot(ice, gm, tmpl, bg = 1)
-fnm = "test_boxfill_decreasing_latitude.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+x.plot(ice, gm, tmpl, bg=1)
+regression.run(x, "test_boxfill_decreasing_latitude.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lambert_crash.py b/testing/vcs/test_vcs_boxfill_lambert_crash.py
index f1827882f1203ea684d07ed9ec4752be5712ceda..f445ba5551b64e7c3ae648f9132a05cff8f117d9 100644
--- a/testing/vcs/test_vcs_boxfill_lambert_crash.py
+++ b/testing/vcs/test_vcs_boxfill_lambert_crash.py
@@ -1,17 +1,10 @@
 #!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 f = cdms2.open(sys.argv[2])
-a=f("Z3")
+a = f("Z3")
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200, 900, units="pixels")
@@ -24,6 +17,4 @@ x.plot(a(latitude=(20,60),longitude=(-160,-120)),b, bg=1)
 fileName = os.path.basename(__file__)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, fileName)
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
index 5c69d7af3d4ea82c3bb6c5681dec17108230608b..6c1d985d7d09d26d64672912efd20e22767f3573 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2.py
@@ -1,25 +1,10 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=.5
-b.level_2=14.5
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = .5
+b.level_2 = 14.5
+x.plot(s, b, bg=1)
+regression.run(x, "test_vcs_boxfill_lev1_lev2.png")
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
index 594949238043b7f2c8dd77471f4713ecf0dec349..67f87029e9acc16cc0edf1f1775802f7a160e5a3 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
index 9e355d1e18518afb127a2273566eeecc75f32f62..dc7958c596d9b9a9d852948709c5ac0a702b6e51 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py
@@ -1,27 +1,12 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_1="y"
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext1_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt", slice(0,1), squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_1 = "y"
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext1_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
index 375c93d8aac93d2ee545f9715c8fd293cdc3ffd6..398325eab83f0f4d398c585fda47af1728439952 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py
@@ -1,26 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-f=cdms2.open(vcs.sample_data+"/clt.nc")
-s=f("clt",slice(0,1),squeeze=1)
-b=x.createboxfill()
-b.level_1=20
-b.level_2=80
-b.ext_2="y"
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ext2.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/clt.nc")
+s = f("clt",slice(0,1),squeeze=1)
+b = x.createboxfill()
+b.level_1 = 20
+b.level_2 = 80
+b.ext_2 = "y"
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ext2.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
index 984179e5ce0a0119433196c354bfebf330557f96..d2a39a1ba2364f5bda55da49d18040306a95e4b2 100644
--- a/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
+++ b/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py
@@ -1,28 +1,11 @@
-
-import cdms2,sys,vcs,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-f=cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
-s=f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
-s=cdms2.MV2.masked_less(s,-45.)
-b=x.createboxfill()
-b.level_1=-40
-b.level_2=40
-x.plot(s,b,bg=1)
-
-fnm= "test_boxfill_lev1_lev2_ta_missing.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-raw_input()
-
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
+f = cdms2.open(vcs.sample_data+"/ta_ncep_87-6-88-4.nc")
+s = f("ta",slice(0,1),longitude=slice(34,35),squeeze=1)-273.15
+s = cdms2.MV2.masked_less(s,-45.)
+b = x.createboxfill()
+b.level_1 = -40
+b.level_2 = 40
+x.plot(s, b, bg=1)
+regression.run(x, "test_boxfill_lev1_lev2_ta_missing.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_polar.py b/testing/vcs/test_vcs_boxfill_polar.py
index e4f534c9dc733ec4102e8683a1c68b1fbaf486af..869d09802ce01dddb4b61a9be2b1b3cba8aeab0c 100644
--- a/testing/vcs/test_vcs_boxfill_polar.py
+++ b/testing/vcs/test_vcs_boxfill_polar.py
@@ -1,33 +1,16 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
 
 f = cdms2.open(vcs.sample_data + "/clt.nc")
-a=f("clt")
+a = f("clt")
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
-
-p=x.getprojection("polar")
-b=x.createboxfill()
-b.projection=p
-#b.datawc_y1 = 90
-#b.datawc_y2 = -90
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
+p = x.getprojection("polar")
+b = x.createboxfill()
+b.projection = p
 x.plot(a(latitude=(90,-90)), b, bg=1)
 
 fileName = os.path.basename(__file__)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_boxfill_robinson_wrap.py b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
index 81b3206bdfe0d24c720daabb16ae3f1967196304..de66e516ccfae93f93138f34eea7ecd2bcf1c304 100644
--- a/testing/vcs/test_vcs_boxfill_robinson_wrap.py
+++ b/testing/vcs/test_vcs_boxfill_robinson_wrap.py
@@ -1,15 +1,7 @@
-#!/usr/bin/env python
-import cdms2, cdutil, genutil
-import vcs,os
-import sys
+import os, sys, cdms2, cdutil, genutil, vcs, testing.regression as regression
 
 # This tests if extending the longitude to more than 360 decrees is handled correctly by
 # proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more information.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt2 = cdmsfile('clt')
 clt3 = clt2(latitude=(-90.0, 90.0),squeeze=1,longitude=(-180, 200.0),time=('1979-01', '1988-12'),)
@@ -19,7 +11,4 @@ kwargs = {}
 kwargs[ 'cdmsfile' ] = cdmsfile.id
 kwargs['bg'] = 1
 canvas.plot(clt3, gmBoxfill, **kwargs)
-fnm = "test_robinson_wrap.png"
-canvas.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_robinson_wrap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background.py b/testing/vcs/test_vcs_canvas_background.py
index 1d39b330d26fa37397aa5251ffcd1a91ff1d589b..2c72b51f3d4b8048d919982971193c2159d1abc4 100644
--- a/testing/vcs/test_vcs_canvas_background.py
+++ b/testing/vcs/test_vcs_canvas_background.py
@@ -1,19 +1,6 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(500,500,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
-fnm = "test_backgroundcolor_white.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_backgroundcolor_white.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_canvas_background_update.py b/testing/vcs/test_vcs_canvas_background_update.py
index 454f0ef0993fc6d1c1b77ffb34bcabe239f0b5a5..80f79d2aa9f70da67888f39b5a44c397196b1b6b 100644
--- a/testing/vcs/test_vcs_canvas_background_update.py
+++ b/testing/vcs/test_vcs_canvas_background_update.py
@@ -1,22 +1,8 @@
-import vcs, cdms2, os, sys
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x = vcs.init()
-
-x.drawlogooff()
-x.setbgoutputdimensions(500,500, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
 x.backgroundcolor = (255, 255, 255)
 x.open()
 x.backgroundcolor = (255, 255, 0)
 x.update()
-fnm = "test_backgroundcolor_yellow.png"
-x.png(fnm)
-
-src=sys.argv[1]
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.check_result_image(x, "test_backgroundcolor_yellow.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_click_info.py b/testing/vcs/test_vcs_click_info.py
index 0fe587792bfd0832a90bd76c1a52d3065c3e1a9a..8d55e77c6501fb37dc06e653bb6fa7c1294c6a0d 100644
--- a/testing/vcs/test_vcs_click_info.py
+++ b/testing/vcs/test_vcs_click_info.py
@@ -1,41 +1,44 @@
-import cdms2
-import sys
-import vcs
-import os
+import os, sys, cdms2, vcs, testing.regression as regression
 
 testConfig = {'a_boxfill': ('clt.nc', 'clt', (200, 200)),
               'a_mollweide_boxfill': ('clt.nc', 'clt', (222, 322)),
+              'a_isofill': ('clt.nc', 'clt', (200, 200)),
+              'a_isoline': ('clt.nc', 'clt', (200, 200)),
+              'vector_default': ('clt.nc', ('u', 'v'), (200, 200)),
               'a_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322)),
               'a_robinson_meshfill': ('sampleCurveGrid4.nc', 'sample', (222, 322))}
 
 # Tests if the info produced when clicking on a map is correct.
 src = sys.argv[1]
 plot = sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-# Needs to set the size of window so it is consistent accross
-# test platforms
-x.open(814, 606)
-
-# data
-f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
-s = f(testConfig[plot][1])
+x = regression.init(bg=False, geometry=(800, 600))
 
+vector = False
 # graphics method
 if (plot.find('boxfill') != -1):
     gm = x.getboxfill(plot)
 elif (plot.find('meshfill') != -1):
     gm = x.getmeshfill(plot)
+elif (plot.find('isofill') != -1):
+    gm = x.getisofill(plot)
+elif (plot.find('isoline') != -1):
+    gm = x.getisoline(plot)
+elif (plot.find('vector') != -1):
+    gm = x.getvector(plot[plot.index('_') + 1:])
+    vector = True
 else:
     print "Invalid plot"
     sys.exit(13)
 
-# Has to plot in foreground to simulate a click
-x.plot(s, gm)
+# data
+f = cdms2.open(vcs.sample_data + "/" + testConfig[plot][0])
+if (vector):
+    u = f(testConfig[plot][1][0])
+    v = f(testConfig[plot][1][1])
+    x.plot(u, v, gm)
+else:
+    s = f(testConfig[plot][1])
+    x.plot(s, gm)
 
 # Simulate a click -- VTK Specific
 location = testConfig[plot][2]
@@ -47,7 +50,4 @@ fileName = os.path.basename(src)
 fileName = os.path.splitext(fileName)[0]
 fileName += '.png'
 
-x.png(fileName, width=814, height= 606)
-
-ret = checkimage.check_result_image(fileName, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_close.py b/testing/vcs/test_vcs_close.py
index 7bf00722497445219bf28ee8ba4981e2c9f7e438..3457a648ea2c22aa437845104391c2a424c1244a 100644
--- a/testing/vcs/test_vcs_close.py
+++ b/testing/vcs/test_vcs_close.py
@@ -1,7 +1,5 @@
 import os, sys, vcs, cdms2
-#import checkimage
 
-#src=sys.argv[1]
 pth = os.path.join(os.path.dirname(__file__),"..")
 sys.path.append(pth)
 cdmsfile = cdms2.open(vcs.sample_data+"/clt.nc")
@@ -9,8 +7,4 @@ data = cdmsfile('clt')
 x = vcs.init()
 x.plot(data, bg=1)
 x.close()
-#x.plot(data[4][1:89], bg=1)
-#fnm = "test_vcs_close.png"
-#x.png(fnm)
-#ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
 sys.exit(0)
diff --git a/testing/vcs/test_vcs_colormaps_source.py b/testing/vcs/test_vcs_colormaps_source.py
index 1dcc7d8807f99c751e29e0ad9145e5dc5b3758cc..2cfc027217b17a03f8bc23b474b222f80c3b1056 100644
--- a/testing/vcs/test_vcs_colormaps_source.py
+++ b/testing/vcs/test_vcs_colormaps_source.py
@@ -1,27 +1,12 @@
-import vcs
-import argparse
-import cdms2
-import  os
-import sys
-
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 parser = argparse.ArgumentParser()
-
 parser.add_argument("-g",dest="gm",default="boxfill",choices = ["boxfill","isofill","meshfill","isoline","vector","1d"])
 parser.add_argument("-s",dest="src",default="vcs",choices=["vcs","canvas","gm"])
 parser.add_argument("-b",dest="baseline")
-
-
 args = parser.parse_args()
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-x.drawlogooff()
+x = regression.init()
 
 exec("gm = x.create%s()" % args.gm)
 
@@ -55,7 +40,4 @@ else:
 fnm = "test_vcs_colormaps_source_%s_%s.png" % (args.gm,args.src)
 x.png(fnm)
 baselineImage = args.baseline
-ret = checkimage.check_result_image(fnm, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+ret = regression.run(x, fnm, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_colorpicker_appearance.py b/testing/vcs/test_vcs_colorpicker_appearance.py
index c925340711465ac60dc4ad92644ac33878b0095c..4ccba61fd2747ca23eb4ccd6eafdb94ad67a482e 100644
--- a/testing/vcs/test_vcs_colorpicker_appearance.py
+++ b/testing/vcs/test_vcs_colorpicker_appearance.py
@@ -1,7 +1,6 @@
 import vcs, vtk
 
 picker = vcs.colorpicker.ColorPicker(500, 250, None, 0)
-
 win = picker.render_window
 
 win.Render()
@@ -16,8 +15,6 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    src = sys.argv[1]
-    sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
-    import checkimage
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
+    import testing.regression as regression
+    ret = regression.check_result_image(fnm, sys.argv[1])
     sys.exit(ret)
diff --git a/testing/vcs/test_vcs_configurator_resize.py b/testing/vcs/test_vcs_configurator_resize.py
index b6179626d422de52f577790291fb1d434078d59c..bd3490afbdd8280b66b1a69231dc360624e7a51e 100644
--- a/testing/vcs/test_vcs_configurator_resize.py
+++ b/testing/vcs/test_vcs_configurator_resize.py
@@ -4,15 +4,16 @@ x = vcs.init()
 x.open()
 x.configure()
 
-x.backend.renWin.SetSize(814, 303)
-
 fnm = "test_vcs_configurator_resize.png"
 
 win = x.backend.renWin
-win.Render()
+win.SetSize(814, 303)
+
 out_filter = vtk.vtkWindowToImageFilter()
 out_filter.SetInput(win)
 
+win.Render()
+
 png_writer = vtk.vtkPNGWriter()
 png_writer.SetFileName(fnm)
 png_writer.SetInputConnection(out_filter.GetOutputPort())
@@ -20,9 +21,7 @@ png_writer.Write()
 
 import sys, os
 if len(sys.argv) > 1:
-    pth = os.path.join(os.path.dirname(__file__), "..")
-    sys.path.append(pth)
-    import checkimage
+    import testing.regression as regression
     src = sys.argv[1]
-    ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-    sys.exit(ret)
+    ret = regression.check_result_image(fnm, src)
+    sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_continents.py b/testing/vcs/test_vcs_continents.py
index 9ae3d62de208e3058801b73c6a94fbbfcdef161e..c102df9de47557d2e523979d1bb77744b5c6d54c 100644
--- a/testing/vcs/test_vcs_continents.py
+++ b/testing/vcs/test_vcs_continents.py
@@ -1,22 +1,14 @@
-import cdms2
-import os
-import sys
-import vcs
-import EzTemplate
+import os, sys, EzTemplate, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 clt = dataFile("clt", time="1979-1-1", squeeze=1)
 
-
 # Zero out the array so we can see the continents clearly
 clt[:] = 0
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 boxfill = canvas.createboxfill()
@@ -64,15 +56,4 @@ for i in range(12):
         canvas.plot(clt, template, boxfill, continents=7, continents_line=cont_line, bg=1)
         os.environ["UVCDAT_DIR"] = current_dotdir
 
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_continents.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    25)
-sys.exit(ret)
+regression.run(canvas, "test_continents.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_create_get.py b/testing/vcs/test_vcs_create_get.py
index ec525d1b4d72a56d750e8dd8e69e3648daea4811..adb879d64013d1882f21186439f805ffc0d220d6 100644
--- a/testing/vcs/test_vcs_create_get.py
+++ b/testing/vcs/test_vcs_create_get.py
@@ -1,4 +1,3 @@
-
 import vcs
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_draw_logo_on.py b/testing/vcs/test_vcs_draw_logo_on.py
index 65271eb15103b46aa4bdb53823a0aea40477e45e..4a0c28d2cdd93d8fe057eab8fe4b7bfa4e6cd1ed 100644
--- a/testing/vcs/test_vcs_draw_logo_on.py
+++ b/testing/vcs/test_vcs_draw_logo_on.py
@@ -1,19 +1,9 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = vcs.init()
 a=numpy.arange(100)
 a.shape=(10,10)
 x.plot(a,bg=1)
 fnm = "test_vcs_draw_logo_on.png"
 x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.check_result_image(fnm, sys.argv[1])
diff --git a/testing/vcs/test_vcs_dump_json.json b/testing/vcs/test_vcs_dump_json.json
index b79b1319c3ec2fde786acbb559bb9a5f41e1e6e1..d408449871bef2eb96b994ed7f64ff41f6696f10 100644
--- a/testing/vcs/test_vcs_dump_json.json
+++ b/testing/vcs/test_vcs_dump_json.json
@@ -1,6 +1,6 @@
 {
  "G1d": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -29,7 +29,7 @@
   }
  }, 
  "Gfb": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "boxfill_type": "linear", 
    "color_1": 16, 
    "color_2": 239, 
@@ -70,7 +70,7 @@
   }
  }, 
  "Gfi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -110,7 +110,7 @@
   }
  }, 
  "Gfm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "colormap": null, 
    "datawc_calendar": 135441, 
    "datawc_timeunits": "days since 2000", 
@@ -153,7 +153,7 @@
   }
  }, 
  "Gi": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": [
     35.0
    ], 
@@ -210,8 +210,50 @@
    "yticlabels2": "*"
   }
  }, 
+ "Gv": {
+  "vcs_instance": {
+   "alignment": "center", 
+   "colormap": null, 
+   "datawc_calendar": 135441, 
+   "datawc_timeunits": "days since 2000", 
+   "datawc_x1": 1e+20, 
+   "datawc_x2": 1e+20, 
+   "datawc_y1": 1e+20, 
+   "datawc_y2": 1e+20, 
+   "line": null, 
+   "linecolor": null, 
+   "linewidth": null, 
+   "projection": "linear", 
+   "reference": 1e+20, 
+   "scale": 1.0, 
+   "scaleoptions": [
+    "off", 
+    "constant", 
+    "normalize", 
+    "linear", 
+    "constantNNormalize", 
+    "constantNLinear"
+   ], 
+   "scalerange": [
+    0.1, 
+    1.0
+   ], 
+   "scaletype": "constantNNormalize", 
+   "type": "arrows", 
+   "xaxisconvert": "linear", 
+   "xmtics1": "", 
+   "xmtics2": "", 
+   "xticlabels1": "*", 
+   "xticlabels2": "*", 
+   "yaxisconvert": "linear", 
+   "ymtics1": "", 
+   "ymtics2": "", 
+   "yticlabels1": "*", 
+   "yticlabels2": "*"
+  }
+ }, 
  "P": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "box1": {
     "line": "default", 
     "priority": 1, 
@@ -579,7 +621,7 @@
   }
  }, 
  "Proj": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "parameters": [
     1e+20, 
     1e+20, 
@@ -601,7 +643,7 @@
   }
  }, 
  "Tf": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -632,7 +674,7 @@
   }
  }, 
  "Tl": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -662,7 +704,7 @@
   }
  }, 
  "Tm": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "color": [
     1
    ], 
@@ -691,7 +733,7 @@
   }
  }, 
  "To": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "angle": 0, 
    "halign": 0, 
    "height": 14, 
@@ -700,7 +742,7 @@
   }
  }, 
  "Tt": {
-  "Charles.Doutriaux": {
+  "vcs_instance": {
    "backgroundcolor": 0, 
    "backgroundopacity": 0, 
    "color": 1, 
diff --git a/testing/vcs/test_vcs_dump_json.py b/testing/vcs/test_vcs_dump_json.py
index aca6215b89fb5196da3ae09054dec9d2e3a07f63..421606c4d836ff0a9d048529fc152acbde577a21 100644
--- a/testing/vcs/test_vcs_dump_json.py
+++ b/testing/vcs/test_vcs_dump_json.py
@@ -1,33 +1,35 @@
 
 import filecmp
 import vcs,numpy,os,sys
-src=sys.argv[1]
+src = sys.argv[1]
 if os.path.exists("test_vcs_dump_json.json"):
     os.remove("test_vcs_dump_json.json")
 
-b = vcs.createboxfill("Charles.Doutriaux")
+b = vcs.createboxfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisofill("Charles.Doutriaux")
+b = vcs.createisofill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createisoline("Charles.Doutriaux")
+b = vcs.createisoline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmeshfill("Charles.Doutriaux")
+b = vcs.createmeshfill("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.create1d("Charles.Doutriaux")
+b = vcs.create1d("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createfillarea("Charles.Doutriaux")
+b = vcs.createfillarea("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtext("Charles.Doutriaux")
+b = vcs.createvector("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createline("Charles.Doutriaux")
+b = vcs.createtext("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createmarker("Charles.Doutriaux")
+b = vcs.createline("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createtemplate("Charles.Doutriaux")
+b = vcs.createmarker("vcs_instance")
 b.script("test_vcs_dump_json","a")
-b = vcs.createprojection("Charles.Doutriaux")
+b = vcs.createtemplate("vcs_instance")
+b.script("test_vcs_dump_json","a")
+b = vcs.createprojection("vcs_instance")
 b.script("test_vcs_dump_json","a")
 
-assert(filecmp.cmp("test_vcs_dump_json.json",src))
+assert(filecmp.cmp("test_vcs_dump_json.json", src))
 
 
diff --git a/testing/vcs/test_vcs_fillarea_basics_no_plot.py b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
index 870aae9be8c592ec896dac0eaf854aa4f8282b62..9a8e38ce7f388be647fad7b227695fc5c56d2af5 100644
--- a/testing/vcs/test_vcs_fillarea_basics_no_plot.py
+++ b/testing/vcs/test_vcs_fillarea_basics_no_plot.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_fillarea_transparency.py b/testing/vcs/test_vcs_fillarea_transparency.py
index dc3a8bf4bd052b00fe31ab47223c03fe3d943e27..831b3e029f53b28111ff5a92005e79e9590bd54b 100644
--- a/testing/vcs/test_vcs_fillarea_transparency.py
+++ b/testing/vcs/test_vcs_fillarea_transparency.py
@@ -1,13 +1,6 @@
-import vcs
-import sys,os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, sys, os, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 fa1 = x.createfillarea()
 
@@ -29,8 +22,4 @@ x.plot(fa1,bg=True)
 x.plot(fa2,bg=True)
 
 fnm = os.path.split(__file__[:-2]+"png")[-1]
-x.png(fnm)
-src = sys.argv[1]
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_first_png_blank.py b/testing/vcs/test_vcs_first_png_blank.py
index d11c59e6d5e1fec5a9c12c03882fd1117ff7c1d5..1e0bd8e2802838828c38281b2904c6ce5bd58291 100644
--- a/testing/vcs/test_vcs_first_png_blank.py
+++ b/testing/vcs/test_vcs_first_png_blank.py
@@ -1,20 +1,7 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 T=f('clt')
-v = vcs.init()
-v.setantialiasing(0)
-v.setbgoutputdimensions(1200,1091,units="pixels")
+v = regression.init()
 v.plot(T,bg=1)
-# This will write a blank plot to a file:
-fnm = "first_png_blank.png"
-v.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(v, 'first_png_blank.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipNone.py b/testing/vcs/test_vcs_flipNone.py
index a76e271f1fe620f1a5c0aefcee5e34ee232151bf..79b69ffddbe985a8bc6240cb33f599c5bafb986f 100644
--- a/testing/vcs/test_vcs_flipNone.py
+++ b/testing/vcs/test_vcs_flipNone.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipNone.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipNone.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipX.py b/testing/vcs/test_vcs_flipX.py
index 0de7001aacba52022175308cb0b678550ada7137..e211bf16a771dfbb98fff5f20d19939d4dc4bb3f 100644
--- a/testing/vcs/test_vcs_flipX.py
+++ b/testing/vcs/test_vcs_flipX.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90),level=(0,10000))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipX.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipX.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipXY.py b/testing/vcs/test_vcs_flipXY.py
index 8dd0f8d89332c0385a283847c97579e6fdf9bd2f..779a0fe7e523343a00b8210d0b126f15937c94e8 100644
--- a/testing/vcs/test_vcs_flipXY.py
+++ b/testing/vcs/test_vcs_flipXY.py
@@ -1,26 +1,9 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
 
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1,latitude=(90,-90))
 x.plot(s,bg=1)
-fnm = "test_vcs_flipXY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipXY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_flipY.py b/testing/vcs/test_vcs_flipY.py
index 7194f3f3067aae4f18c1ea6f0d63e116ec51fa07..5efa5774512a55cc3e91a4f7480a598c3e41e354 100644
--- a/testing/vcs/test_vcs_flipY.py
+++ b/testing/vcs/test_vcs_flipY.py
@@ -1,26 +1,8 @@
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-
-
 vr = "ta"
 s=f(vr,slice(0,1),longitude=slice(90,91),squeeze=1)
 x.plot(s,bg=1)
-fnm = "test_vcs_flipY.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, 'test_vcs_flipY.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gen_meshfill.py b/testing/vcs/test_vcs_gen_meshfill.py
index b726a20a692a6c89bae24f4f1dacd041e8769157..e5994055465ca30e5db4181ad23eb3aa5dd95c9d 100644
--- a/testing/vcs/test_vcs_gen_meshfill.py
+++ b/testing/vcs/test_vcs_gen_meshfill.py
@@ -1,39 +1,16 @@
+import os, sys, numpy, vcs, testing.regression as regression
 
-import vcs,numpy,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 
 data = numpy.array([1,2,3,4])
-
 blon = numpy.array([-1,1,1,0,-1])
 blat = numpy.array([0,0,1,2,1])
-
 acell=numpy.array([blat,blon])
 bcell = numpy.array([blat,blon+2.5])
 ccell = numpy.array([blat+2.5,blon+2.5])
 dcell = numpy.array([blat+2.5,blon])
-
 mesh = numpy.array([acell,bcell,ccell,dcell])
-
 m=x.createmeshfill()
 
 x.plot(data,mesh,m,bg=1)
-
-
-fnm = "test_vcs_gen_meshfill.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
-
+regression.run(x, "test_vcs_gen_meshfill.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_geometry.py b/testing/vcs/test_vcs_geometry.py
new file mode 100644
index 0000000000000000000000000000000000000000..578bfb67d1c82a4c3232d2620c762191cc92e883
--- /dev/null
+++ b/testing/vcs/test_vcs_geometry.py
@@ -0,0 +1,32 @@
+import sys, vcs
+
+# This will check if we can set the geometry
+# at the initialization of canvas
+canvas = vcs.init(geometry=(600, 400))
+canvas.open()
+
+if dict(width=600, height=400) != canvas.geometry():
+    canvas.close()
+    sys.exit(1)
+
+canvas.close()
+
+canvas2 = vcs.init()
+
+# This will check if we can safely set the geometry even
+# though the canvas window has not been created yet
+canvas2.geometry(400, 400)
+canvas2.open()
+if dict(width=400, height=400) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+# This will check if we can dynamically change the geometry
+canvas2.geometry(500, 400)
+canvas2.geometry(500, 500)
+if dict(width=500, height=500) != canvas2.geometry():
+    canvas2.close()
+    sys.exit(1)
+
+canvas2.close()
+sys.exit(0)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_gms_animate_projected_plots.py b/testing/vcs/test_vcs_gms_animate_projected_plots.py
index 2e521cd4ad292b5ae1a4c1606b73df550103e752..3de234e418cb9ab65b0b5c1f16b30ac573e39d86 100644
--- a/testing/vcs/test_vcs_gms_animate_projected_plots.py
+++ b/testing/vcs/test_vcs_gms_animate_projected_plots.py
@@ -1,15 +1,5 @@
 # Test animation of projected plots
-
-import argparse
-import cdms2
-import MV2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Testing animation of projected plots")
 p.add_argument("--gm_type", dest="gm", help="gm to test")
@@ -19,18 +9,13 @@ p.add_argument("--source", dest="src", help="path to baseline image")
 p.add_argument("--keep", dest="keep", action="store_true", default=False,
                help="Save images, even if baseline matches.")
 p.add_argument("--threshold", dest="threshold", type=int,
-               default=checkimage.defaultThreshold,
+               default=regression.defaultThreshold,
                help="Threshold value for image differnces")
 
 args = p.parse_args(sys.argv[1:])
 
 gm_type = args.gm
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
-
+x = regression.init()
 s = None
 
 if gm_type == "meshfill":
@@ -72,7 +57,7 @@ pngs = x.animate.close(preserve_pngs=True)  # so we can look at them again
 ret = 0
 pdir = os.path.split(pngs[0])[0]
 p = pdir + os.sep + "anim_0.png"
-ret = checkimage.check_result_image(p, args.src, args.threshold)
+ret = regression.check_result_image(p, args.src, args.threshold)
 if ret == 0 and not args.keep:
     for f in pngs:
         if os.path.isfile(f):
diff --git a/testing/vcs/test_vcs_gms_patterns_hatches.py b/testing/vcs/test_vcs_gms_patterns_hatches.py
index 4030429a0d02cfe2b2b4afb070722fca29cde8fd..a7681a4205b962e7a28da73a7250f5a351bfcf6d 100644
--- a/testing/vcs/test_vcs_gms_patterns_hatches.py
+++ b/testing/vcs/test_vcs_gms_patterns_hatches.py
@@ -1,14 +1,4 @@
-# Test the use of patterns/hatches for plots
-
-import argparse
-import cdms2
-import os
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import argparse, os, sys, cdms2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Patterns/hatches testing code for vcs gms")
 p.add_argument("--source", dest="src", help="source image file")
@@ -22,7 +12,7 @@ p.add_argument("--lat2", dest="lat2", default=90, type=float, help="Last latitud
 p.add_argument("--lon1", dest="lon1", default=-180, type=float, help="First Longitude")
 p.add_argument("--lon2", dest="lon2", default=180, type=float, help="Last Longitude")
 p.add_argument("--keep", dest="keep", action="store_true", help="Save image, even if baseline matches.")
-p.add_argument("--threshold", dest="threshold", type=int, default=checkimage.defaultThreshold,
+p.add_argument("--threshold", dest="threshold", type=int, default=regression.defaultThreshold,
         help="Default threshold")
 p.add_argument("--non-contiguous", dest="contig", default=True, action="store_false", help="use non contiguous levels")
 
@@ -98,7 +88,7 @@ fnm += nm_xtra
 x.png(fnm)
 print "fnm:", fnm
 print "src:", src
-ret = checkimage.check_result_image(fnm+'.png', src,
+ret = regression.check_result_image(fnm+'.png', src,
                                     args.threshold,
                                     cleanup=not args.keep)
 if args.show:
diff --git a/testing/vcs/test_vcs_hatches_patterns.py b/testing/vcs/test_vcs_hatches_patterns.py
index 8cd1766f0db987dfb385f30bf9d277dfaee5f90a..8a467259c835be3ca76c319952eea6318bae31e8 100644
--- a/testing/vcs/test_vcs_hatches_patterns.py
+++ b/testing/vcs/test_vcs_hatches_patterns.py
@@ -1,17 +1,6 @@
-import os
-import sys
-import vcs
+import os, sys, vcs, testing.regression as regression
 
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-baselineImage = sys.argv[1]
-
-# Initialize the graphics canvas
-x = vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 
 # Create a test plot for listing all the hatches and patterns
 style_list = []
@@ -77,11 +66,4 @@ plot_title.y = [.9]
 x.plot(plot_title, bg=1)
 x.plot(fill_test, bg=1)
 x.plot(fill_info, bg=1)
-
-testImage = os.path.abspath("test_vcs_hatches_patterns.png")
-x.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(x, "test_vcs_hatches_patterns.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_import.py b/testing/vcs/test_vcs_import.py
index c1224212518fcbde53cc84adca3a94c58c0561ec..01c3d0447cccc402b1ec4d3c3e759249a6e58e04 100644
--- a/testing/vcs/test_vcs_import.py
+++ b/testing/vcs/test_vcs_import.py
@@ -1,3 +1,2 @@
-
 import vcs
 
diff --git a/testing/vcs/test_vcs_infinity.py b/testing/vcs/test_vcs_infinity.py
index 235f551ebd9d12058082808a30525047260fd88d..daf40601683bed2b57271251687da2ae742f1b9a 100644
--- a/testing/vcs/test_vcs_infinity.py
+++ b/testing/vcs/test_vcs_infinity.py
@@ -1,28 +1,16 @@
-
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import numpy
+import os, sys, numpy, MV2, cdms2, testing.regression as regression
 
 s= numpy.sin(numpy.arange(100))
-s=numpy.reshape(s,(10,10))
+s = numpy.reshape(s,(10,10))
 
 s[4,6] = numpy.inf
 s[7,9] = numpy.NINF
 s[9,2] = numpy.nan
 
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1)
 fnm = "infinity.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part1.py b/testing/vcs/test_vcs_iso_celine_part1.py
index 586918d36ac55ae19827b0243fe215e9ba7374a3..a8fcc5ae72162cc06e29c599ed834bad0b6bb3d6 100644
--- a/testing/vcs/test_vcs_iso_celine_part1.py
+++ b/testing/vcs/test_vcs_iso_celine_part1.py
@@ -1,22 +1,13 @@
+import os,sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
 src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.setantialiasing(0)
-x.drawlogooff()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
+i = x.getisofill("celine")
 x.plot(s,i,bg=1)
 fnm = "test_celine_iso.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_iso_celine_part2.py b/testing/vcs/test_vcs_iso_celine_part2.py
index f99f821da90cb03eaef8519b6834c207cbb20a1d..c1c1df5b1c8507b88a72a1a2076ddc88d4dafc34 100644
--- a/testing/vcs/test_vcs_iso_celine_part2.py
+++ b/testing/vcs/test_vcs_iso_celine_part2.py
@@ -1,26 +1,15 @@
+import os, sys, MV2, numpy, vcs, cdms2, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
 pth0 = os.path.dirname(__file__)
-pth = os.path.join(pth0,"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(pth0,"celine.nc"))
-s=f("data")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+f = cdms2.open(os.path.join(pth0,"celine.nc"))
+s = f("data")
+x = regression.init()
 x.scriptrun(os.path.join(pth0,"celine.json"))
-i=x.getisofill("celine")
-b=vcs.createboxfill()
+i = x.getisofill("celine")
+b = vcs.createboxfill()
 b.levels=i.levels
 b.fillareacolors=i.fillareacolors
 b.boxfill_type="custom"
 x.plot(s,b,bg=1)
 fnm = "test_celine_iso_2.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
index 5f12f65b0b0cfe87815ea99a076b46061970d5cf..8d1cc949c142c9356c3ec31f6129a15e807ba4cf 100644
--- a/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
+++ b/testing/vcs/test_vcs_isofill_data_read_north_to_south.py
@@ -1,24 +1,10 @@
-import cdms2
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 clt = f("clt",latitude=(80.0, 38.0),squeeze=1,longitude=(-180.0, 180.0),time=slice(0,1))
-x = vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.setantialiasing(0)
+x = regression.init()
 gm = vcs.createisofill()
 gm.projection="polar"
 x.plot( clt,gm,bg=1)
 fnm = os.path.split(__file__)[-1][:-2]+"png"
-src= sys.argv[1]
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_isoline_labels.py b/testing/vcs/test_vcs_isofill_isoline_labels.py
index d230fc8fc4732218a8829f64529bd83e3b7eb403..192ffc4bf904f4eb0da3b0bef6f711856524e4e4 100644
--- a/testing/vcs/test_vcs_isofill_isoline_labels.py
+++ b/testing/vcs/test_vcs_isofill_isoline_labels.py
@@ -1,24 +1,10 @@
-import vcs,cdms2,sys,os
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isofill = canvas.createisofill()
-
-# First plot the isofill
 canvas.plot(data, isofill, bg=1)
-
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -35,13 +21,6 @@ for i in range(10):
 isoline.text = texts
 isoline.linecolors = colors
 
-# Next plot the isolines with labels
+# Plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isofill_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isofill_isoline_labels.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isofill_mask_cell_shift.py b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
index 9bc8671131e5bd215abdc918cc114667e8af018b..199b1586d466b7b4dcc2df3f6b516ce3be3b2aea 100644
--- a/testing/vcs/test_vcs_isofill_mask_cell_shift.py
+++ b/testing/vcs/test_vcs_isofill_mask_cell_shift.py
@@ -1,25 +1,8 @@
-import os,sys
-import MV2
-import vcs
-import cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt",slice(0,1),latitude=(30,70),longitude=(-130,-60))
-s2=MV2.masked_greater(s,65.)
+import os, sys, MV2, cdms2, vcs, testing.regression as regression
 
+x = regression.init()
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt",slice(0,1),latitude=(30, 70),longitude=(-130, -60))
+s2 = MV2.masked_greater(s, 65.)
 x.plot(s2,"default","isofill",bg=1)
-fnm = "test_vcs_isofill_mask_cell_shift.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_isofill_mask_cell_shift.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labels.py b/testing/vcs/test_vcs_isoline_labels.py
index 77033f2c822cc29c9efda6ac54df7dcccffc5cfe..46023543162646912f649feb99dded3566708009 100644
--- a/testing/vcs/test_vcs_isoline_labels.py
+++ b/testing/vcs/test_vcs_isoline_labels.py
@@ -1,20 +1,8 @@
-import vcs,cdms2,sys,os
-
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label="y"
 texts=[]
@@ -33,24 +21,17 @@ isoline.text = texts
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
 
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s"%baseline
-testImage = os.path.abspath("test_isoline_labels.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
+ret = regression.run_wo_terminate(canvas, "test_isoline_labels.png", baselineImage)
 
 # Now set isoline.linecolors and test again.
 canvas.clear()
 isoline.linecolors = colors
 canvas.plot(data, isoline, bg=1)
-
 baselineImage = "%s%d%s"%(baseline[0], 2, baseline[1])
 testImage = os.path.abspath("test_isoline_labels2.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 # Now set isoline.textcolors and test again.
 canvas.clear()
@@ -59,9 +40,6 @@ canvas.plot(data, isoline, bg=1)
 
 baselineImage = "%s%d%s"%(baseline[0], 3, baseline[1])
 testImage = os.path.abspath("test_isoline_labels3.png")
-canvas.png(testImage)
-
-ret += checkimage.check_result_image(testImage, baselineImage,
-                                     checkimage.defaultThreshold)
+ret += regression.run_wo_terminate(canvas, testImage, baselineImage)
 
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_isoline_labels_background.py b/testing/vcs/test_vcs_isoline_labels_background.py
index da1d1e6ac99cd30f74645fe2b820c2801708fa91..08088836adfea942aa61caec0fecbd15c920e56e 100644
--- a/testing/vcs/test_vcs_isoline_labels_background.py
+++ b/testing/vcs/test_vcs_isoline_labels_background.py
@@ -1,26 +1,13 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 import random
 
-# ('/path/to/filename', '.extension')
-baseline = os.path.splitext(sys.argv[1])
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
 (latmin, latmax, lonmin, lonmax) = (-90, 90, -180, 180)
 dataset = cdms2.open(os.path.join(vcs.sample_data, "tas_cru_1979.nc"))
 data = dataset("tas", time=slice(0, 1), latitude=(latmin, latmax),
                longitude=(lonmin, lonmax, 'co'), squeeze=1)
 dataset.close()
 
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 canvas.backgroundcolor = [100, 105, 105]
 
 isoline = canvas.createisoline()
@@ -50,11 +37,6 @@ isoline.labelskipdistance = 15.0
 
 # First test using isoline.text[...].color
 canvas.plot(data, isoline, bg=1)
-
+baseline = os.path.splitext(sys.argv[1])
 baselineImage = "%s%s" % baseline
-testImage = os.path.abspath("test_isoline_labels_background.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, baselineImage)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
index da5e5d3c3769b739999fcb9cc2115e50f5a1cb74..8c6e13a7e95a5817dc1dfd9518d8cf9b7d7f6cb5 100644
--- a/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
+++ b/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py
@@ -1,30 +1,18 @@
-import vcs
-import cdms2
-import os,sys
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-iso=x.createisoline()
-t=x.createtext()
-t.color=243
-t.height=25
-to=x.createtextorientation()
-to.height = 55
-tt=x.createtexttable()
-tt.color=245
-iso.textcolors=[None,None,None,242,244]
-iso.text=[t,tt,to]
-iso.label="y"
-x.plot(s,iso,bg=1)
-x.png("test_vcs_isoline_labels_multi_label_input_types.png")
-
-src=sys.argv[1]
+import os, sys, cdms2, vcs, testing.regression as regression
 
-ret = checkimage.check_result_image('test_vcs_isoline_labels_multi_label_input_types.png',src,checkimage.defaultThreshold)
-sys.exit(ret)
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+x = regression.init()
+iso = x.createisoline()
+t = x.createtext()
+t.color = 243
+t.height = 25
+to = x.createtextorientation()
+to.height = 55
+tt = x.createtexttable()
+tt.color = 245
+iso.textcolors = [None,None,None,242,244]
+iso.text = [t,tt,to]
+iso.label = "y"
+x.plot(s, iso, bg=1)
+regression.run(x, "test_vcs_isoline_labels_multi_label_input_types.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_labelskipdistance.py b/testing/vcs/test_vcs_isoline_labelskipdistance.py
index 04ecdfabb404b3a568483691b74bf706723eab50..4b119e0bc3504ef25e78eb20d3e4307b43f1ecac 100644
--- a/testing/vcs/test_vcs_isoline_labelskipdistance.py
+++ b/testing/vcs/test_vcs_isoline_labelskipdistance.py
@@ -1,21 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 isoline = canvas.createisoline()
 isoline.label = "y"
@@ -36,11 +23,4 @@ isoline.linecolors = colors
 
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_labelskipdistance.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+regression.run(canvas, "test_isoline_labelskipdistance.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_numpy.py b/testing/vcs/test_vcs_isoline_numpy.py
index ced140e773fb43905c7240974aa644308caeb9dc..147f2f499635c2b4c957c1889e7613a9c8d9b3dd 100644
--- a/testing/vcs/test_vcs_isoline_numpy.py
+++ b/testing/vcs/test_vcs_isoline_numpy.py
@@ -1,21 +1,13 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
+import os, sys, cdms2, vcs, testing.regression as regression
+
+x = regression.init()
 x.setantialiasing(0)
 x.setbgoutputdimensions(1200,1091,units="pixels")
 x.drawlogooff()
 fnm = os.path.join(vcs.sample_data,'clt.nc')
-f=cdms2.open(fnm)
-
-s=f("clt")
-gm=x.createisofill()
+f = cdms2.open(fnm)
+s = f("clt")
+gm = x.createisofill()
 x.plot(s.filled(),gm,bg=1)
 fnm = "test_vcs_isoline_numpy.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_isoline_width_stipple.py b/testing/vcs/test_vcs_isoline_width_stipple.py
index b563436c26017e9e57d94adde068cf567f773303..20a7e5c4ff48c532d70069ac369f4e7ed6cd8dcb 100644
--- a/testing/vcs/test_vcs_isoline_width_stipple.py
+++ b/testing/vcs/test_vcs_isoline_width_stipple.py
@@ -1,22 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-baselineImage = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage  # noqa
+import os, sys, cdms2, vcs, testing.regression as regression
 
 dataset = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 data = dataset("clt")
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200, 1091, units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 isoline = canvas.createisoline()
 isoline.label = "y"
 texts = []
@@ -35,15 +21,8 @@ for i in range(7):
 isoline.levels = levels
 isoline.text = texts
 isoline.linecolors = colors
-
 isoline.linewidths = (1, 2, 3, 4, 1)
 isoline.line = ('dot', 'dash', 'solid', 'dash-dot', 'long-dash', 'dot', 'dash')
-
 # Next plot the isolines with labels
 canvas.plot(data, isoline, bg=1)
-
-testImage = os.path.abspath("test_isoline_width_stipple.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baselineImage, 30)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_isoline_width_stipple.png")
diff --git a/testing/vcs/test_vcs_issue_960_labels.py b/testing/vcs/test_vcs_issue_960_labels.py
index ee45216cdcf00841cad219df39509b58e22d0ebe..7da2104ff4de4d623af43a1c3ae58ec091f41ac2 100644
--- a/testing/vcs/test_vcs_issue_960_labels.py
+++ b/testing/vcs/test_vcs_issue_960_labels.py
@@ -1,29 +1,19 @@
-import sys,os,cdms2,vcs
-import vcs
-src1=sys.argv[1]
-src2=sys.argv[2]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
+
+src1 = sys.argv[1]
+src2 = sys.argv[2]
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",time=slice(0,1),latitude=(-7,5),squeeze=1)
 x.plot(s,bg=1)
 fnm = "test_vcs_issue_960_labels_1.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src1
-ret = checkimage.check_result_image(fnm,src1,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, src1)
 b=x.createboxfill()
 b.datawc_y1=-7
 b.datawc_y2=5
 x.plot(s,b,bg=1)
 fnm = "test_vcs_issue_960_labels_2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src2
-ret += checkimage.check_result_image(fnm,src2,checkimage.defaultThreshold)
-sys.exit(ret)
+ret += regression.check_result_image(fnm, src2)
+sys.exit(ret)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_lambert.py b/testing/vcs/test_vcs_lambert.py
index 17a50209f8147589bba589a7c2334b938a688c69..c78e71e9b564018071760247c09d38abed002771 100644
--- a/testing/vcs/test_vcs_lambert.py
+++ b/testing/vcs/test_vcs_lambert.py
@@ -1,27 +1,10 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 p=x.createprojection()
 p.type="lambert"
-
 iso.projection = p
 x.plot(s(latitude=(20, 60),longitude=(-140,-20)), iso, bg=True)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_lambert.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_lambert.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_large_pattern_hatch.py b/testing/vcs/test_vcs_large_pattern_hatch.py
index 3bcf0827da3b051415b4c00f358d15423e1d4791..782cfb14b9f3dedbbfc917d321ac654be2b22b32 100644
--- a/testing/vcs/test_vcs_large_pattern_hatch.py
+++ b/testing/vcs/test_vcs_large_pattern_hatch.py
@@ -1,31 +1,12 @@
-import vcs
-import sys
-import os
-
-baseline = sys.argv[1]
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1090, units="pixels")
-canvas.drawlogooff()
+import os, sys, vcs, testing.regression as regression
 
+canvas = regression.init()
 fillarea = vcs.createfillarea()
 fillarea.x = [[0, .33, .33, 0], [.33, .67, .67, .33], [.67, 1, 1, .67]]
 fillarea.y = [[0, 0, 1, 1]] * 3
 fillarea.style = ["solid", "pattern", "hatch"]
 fillarea.index = [1, 5, 5]
 fillarea.color = [50, 50, 50]
-
 canvas.plot(fillarea, bg=True)
-
-testImage = os.path.abspath("test_vcs_large_pattern_hatch.png")
-canvas.png(testImage)
-
-ret = checkimage.check_result_image(testImage, baseline,
-                                    checkimage.defaultThreshold)
-
-sys.exit(ret)
+fnm = os.path.abspath("test_vcs_large_pattern_hatch.png")
+regression.run(canvas, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_legend.py b/testing/vcs/test_vcs_legend.py
index 26beec7e5ebbf3558ff0ccbfcbecafdc0b64c03e..a352bc080ad2dc7761efc0550bdaaae5225e0a43 100644
--- a/testing/vcs/test_vcs_legend.py
+++ b/testing/vcs/test_vcs_legend.py
@@ -1,5 +1,4 @@
-import sys,os
-import argparse
+import os, sys, argparse, cdms2, MV2, vcs, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,20 +13,10 @@ args = p.parse_args(sys.argv[1:])
 
 gm_type= args.gm
 src = args.src
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
 
 bg = not args.show
 
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -81,7 +70,7 @@ fnm = "test_vcs_legend_%s_%s_ext1_%s_ext2_%s" % (gm_type.lower(),args.orientatio
 x.png(fnm)
 print "fnm:",fnm
 print "src:",src
-ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+ret = regression.check_result_image(fnm+'.png', src,regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py
new file mode 100755
index 0000000000000000000000000000000000000000..7597403fc1c8cfba43ec602a1d1cdfa14d35dd4a
--- /dev/null
+++ b/testing/vcs/test_vcs_line_patterns.py
@@ -0,0 +1,22 @@
+import vcs
+import cdms2
+import sys
+import os
+import testing.regression as regression
+
+
+pth = os.path.join(os.path.dirname(__file__), "..")
+sys.path.append(pth)
+
+import checkimage
+
+x = regression.init(bg=1, geometry=(1620, 1080))
+
+f = cdms2.open(vcs.sample_data + "/clt.nc")
+s = f('clt')
+iso = x.createisoline()
+iso.level=[5, 50, 70, 95]
+iso.line = ['dot', 'dash', 'dash-dot', 'long-dash']
+x.plot(s,iso,continents=0)
+name = "test_vcs_line_patterns.png"
+regression.run(x, name)
diff --git a/testing/vcs/test_vcs_lon_axes_freak_out.py b/testing/vcs/test_vcs_lon_axes_freak_out.py
index ecb63f2a5dbf73031d8851b48c29616bd7935ff2..f18328f582a4a2c7e40c7f0fbcbd372867e69696 100644
--- a/testing/vcs/test_vcs_lon_axes_freak_out.py
+++ b/testing/vcs/test_vcs_lon_axes_freak_out.py
@@ -1,30 +1,11 @@
-import os,sys,vcs,cdms2
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+s = f("clt")
+s3 = f("clt",longitude=(0,360))
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-s=f("clt")
-s3=f("clt",longitude=(0,360))
-
-print s.shape,s3.shape
-
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 x.plot(s,bg=1)
 x.clear()
 x.plot(s3,bg=1)
-
-fnm = "test_lon_axes_freak_out.png"
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_lon_axes_freak_out.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_markers.py b/testing/vcs/test_vcs_markers.py
index 50f4f00d1bd63ce47d6eddce3fe14acc0a8fbb65..21b7a671bcb45304841718f4b029b9f1f37819d1 100644
--- a/testing/vcs/test_vcs_markers.py
+++ b/testing/vcs/test_vcs_markers.py
@@ -1,28 +1,14 @@
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 m = x.createmarker()
-m.x=[[0.,],[5,],[10.,],[15.]]
-m.y=[[0.,],[5,],[10.,],[15.]]
+m.x = [[0.,],[5,],[10.,],[15.]]
+m.y = [[0.,],[5,],[10.,],[15.]]
 m.worldcoordinate=[-5,20,-5,20]
+
 #m.worldcoordinate=[-10,10,0,10]
 m.type=['plus','diamond','square_fill',"hurricane"]
 m.color=[242,243,244,242]
 m.size=[20,20,20,5]
 x.plot(m,bg=1)
-fnm= "test_markers.png"
-
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py
index 06b9f49b9b6fb71f2832bff44f5fae4b4f2b82d1..baf0e1bb077cce7b69d2b28deeea60a3bc4c0102 100644
--- a/testing/vcs/test_vcs_matplotlib_colormap.py
+++ b/testing/vcs/test_vcs_matplotlib_colormap.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -10,24 +7,7 @@ clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
           time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
-
+canvas = regression.init()
 canvas.setcolormap(vcs.matplotlib2vcs("viridis"))
-
 canvas.plot(clt, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_matplotlib_colormap.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_matplotlib_colormap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_mercator_edge.py b/testing/vcs/test_vcs_mercator_edge.py
index eb6d79cdb7147777b38c04b548e555b108ce7e07..31f6cb83bb144b463ebad914e4c3739547c928cd 100644
--- a/testing/vcs/test_vcs_mercator_edge.py
+++ b/testing/vcs/test_vcs_mercator_edge.py
@@ -1,24 +1,9 @@
-import vcs,cdms2
-import os,sys
+import os, sys, cdms2, vcs, testing.regression as regression
+
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 iso = x.createisofill()
 iso.projection = "mercator"
 x.plot(s(latitude=(-90, 90)), iso, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_mercator_edge.png"
-x.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mercator_edge.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_draw_mesh.py b/testing/vcs/test_vcs_meshfill_draw_mesh.py
index ef214e6485b8c753e6d24655804d1258a5b39929..08801d7a6d7e8a4f30e0cb714965b6de02725f9f 100644
--- a/testing/vcs/test_vcs_meshfill_draw_mesh.py
+++ b/testing/vcs/test_vcs_meshfill_draw_mesh.py
@@ -1,23 +1,11 @@
-import vcs,cdms2,sys,os
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x.drawlogooff()
-fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
-f=cdms2.open(fnmcurv)
+import os, sys, cdms2, vcs, testing.regression as regression
 
-s=f("sample")
-m=x.createmeshfill()
-m.mesh=True
+x = regression.init()
+fnmcurv = os.path.join(vcs.sample_data,'sampleCurveGrid4.nc')
+f = cdms2.open(fnmcurv)
+s = f("sample")
+m = x.createmeshfill()
+m.mesh = True
 
 x.plot(s,m,bg=1)
-fnm = "test_meshfill_draw_mesh.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_meshfill_draw_mesh.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_no_wrapping.py b/testing/vcs/test_vcs_meshfill_no_wrapping.py
index 967758c78651290b1541efb45aed7b6ffb99924c..9ee4a99f1c9ce4cc5f11d8db324ffaf0b2b087e9 100755
--- a/testing/vcs/test_vcs_meshfill_no_wrapping.py
+++ b/testing/vcs/test_vcs_meshfill_no_wrapping.py
@@ -1,23 +1,7 @@
-#!/usr/bin/env python
-import cdms2 
-import os 
-import sys
-import vcs
-
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
-
-
-f=cdms2.open(sys.argv[2])
-h=f("heat")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 900, units="pixels")
+import os, sys, cdms2, vcs, testing.regression as regression
 
+f = cdms2.open(sys.argv[2])
+h = f("heat")
+x = regression.init()
 x.plot(h, bg=1)
-fnm = "vcs_test_meshfill_no_wrapping.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "vcs_test_meshfill_no_wrapping.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_regular_grid.py b/testing/vcs/test_vcs_meshfill_regular_grid.py
index 9a955f9fffaeb4182cf027ee4f1775f12281a045..77a390b221ebd19e4e876b0f4296d870cfd8f371 100644
--- a/testing/vcs/test_vcs_meshfill_regular_grid.py
+++ b/testing/vcs/test_vcs_meshfill_regular_grid.py
@@ -1,22 +1,7 @@
-import vcs, cdms2, os, sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
+import os, sys, cdms2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-x = vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
-x.meshfill(s,bg=1)
-fnm = "test_meshfill_regular_grid.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.meshfill(s, bg=1)
+regression.run(x, "test_meshfill_regular_grid.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_vertices.py b/testing/vcs/test_vcs_meshfill_vertices.py
index 6317ef0719479a207aabc2ab50357cf0508b4680..103e681094dfc0e9179685d17b636e642d73d120 100644
--- a/testing/vcs/test_vcs_meshfill_vertices.py
+++ b/testing/vcs/test_vcs_meshfill_vertices.py
@@ -1,19 +1,8 @@
-import numpy
-import vcs
-import sys
-import os
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, vcs, testing.regression as regression
 
-x=vcs.init()
-
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 
 data_values = [ 25, 45, 55.]
-
 data_lon = [ 5., 10., 15.]
 data_lat = [ 5., 10., 15.]
 
@@ -50,8 +39,4 @@ m.levels = [20,30,50,70,80]
 m.mesh = True
 
 x.plot(numpy.array(data_values,),mesh,m,bg=True)
-x.png("test_vcs_meshfill_vertices_issue.png")
-src = sys.argv[1]
-ret = checkimage.check_result_image("test_vcs_meshfill_vertices_issue.png",
-                                    src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_meshfill_vertices_issue.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_meshfill_zoom.py b/testing/vcs/test_vcs_meshfill_zoom.py
index 1026f0ff08d14585484e13d6ac2a2e712bc59451..c366d80100aa50e5011c2e6a6bce5958d8ce6386 100644
--- a/testing/vcs/test_vcs_meshfill_zoom.py
+++ b/testing/vcs/test_vcs_meshfill_zoom.py
@@ -1,14 +1,4 @@
-#!/usr/bin/env python
-import cdms2
-import os
-import sys
-import vcs
-
-# We test if gm.datawc zooms in correctly into the plot. This works only for
-# data using a linear projection. It does not work for geographic projections.
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, cdms2, vcs, testing.regression as regression
 
 flip = False
 if (len(sys.argv) == 3):
@@ -19,12 +9,10 @@ fileName = os.path.splitext(fileName)[0]
 if (flip):
     fileName = fileName + '_flip'
 fileName = fileName + '.png'
-f=cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
-s=f("sample")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-m=x.createmeshfill()
+f = cdms2.open(os.path.join(vcs.sample_data, "sampleCurveGrid4.nc"))
+s = f("sample")
+x = regression.init()
+m = x.createmeshfill()
 # m.mesh = True
 m.datawc_x1 = -20
 m.datawc_x2 = 20
@@ -33,7 +21,4 @@ if (flip):
 m.datawc_y1 = -20
 m.datawc_y2 = 20
 x.plot(s,m, bg=1)
-x.png(fileName)
-ret = checkimage.check_result_image(fileName, sys.argv[1], checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, fileName)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_mintics.py b/testing/vcs/test_vcs_mintics.py
index 01f65848b2cb2244e52468a5f8e6a33e1c60d5f6..302c22fd49698c9a18aec66ff2d752c16d237892 100644
--- a/testing/vcs/test_vcs_mintics.py
+++ b/testing/vcs/test_vcs_mintics.py
@@ -1,16 +1,10 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 s = f("clt")
 box = x.createboxfill()
+
 # Should ignore the string here
 box.xmtics1 = {i:"Test" for i in range(-180, 180, 15) if i % 30 != 0}
 box.ymtics1 = {i:"Test" for i in range(-90, 90, 5) if i % 10 != 0}
@@ -23,10 +17,4 @@ template.xmintic2.priority = 1
 template.xmintic2.y2 += template.xmintic1.y1 - template.xmintic1.y2
 template.ymintic2.priority = 1
 x.plot(s, template, box, bg=1)
-fnm = "test_vcs_mintics.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_mintics.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_missing_colorname.py b/testing/vcs/test_vcs_missing_colorname.py
index 1ed68b475072a84f902d4c379de7989e7a30333a..9b3db75458c629fa95e7aed04b46d62ea38dc35b 100644
--- a/testing/vcs/test_vcs_missing_colorname.py
+++ b/testing/vcs/test_vcs_missing_colorname.py
@@ -1,7 +1,4 @@
-import cdms2
-import os
-import sys
-import vcs
+import os, sys, cdms2, vcs, testing.regression as regression
 
 # Load the clt data:
 dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
@@ -11,10 +8,7 @@ height, width = clt.shape
 clt.mask = [[True if i % 2 else False for i in range(width)] for _ in range(height)]
 
 # Initialize canvas:
-canvas = vcs.init()
-canvas.setantialiasing(0)
-canvas.setbgoutputdimensions(1200,1091,units="pixels")
-canvas.drawlogooff()
+canvas = regression.init()
 
 # Create and plot quick boxfill with default settings:
 # Only have to test boxfill because all 2D methods use the same code
@@ -25,16 +19,4 @@ boxfill = canvas.createboxfill()
 boxfill.missing = "Medium Aquamarine"
 
 canvas.plot(clt, boxfill, bg=1)
-
-# Load the image testing module:
-testingDir = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(testingDir)
-import checkimage
-
-# Create the test image and compare:
-baseline = sys.argv[1]
-testFile = "test_vcs_missing_colorname.png"
-canvas.png(testFile)
-ret = checkimage.check_result_image(testFile, baseline,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(canvas, "test_vcs_missing_colorname.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
index 9f30f40ae4f449f1098ef8eba2673f843639138c..4dcd059a028956165d0be07d4abdc5975f9b9254 100644
--- a/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
+++ b/testing/vcs/test_vcs_monotonic_decreasing_yxvsx_default.py
@@ -1,27 +1,14 @@
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
-x.setantialiasing(0)
+import os, sys, numpy, cdms2, MV2, vcs, testing.regression as regression
 
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
-t=cdms2.createAxis(numpy.arange(120))
+x = regression.init()
+t = cdms2.createAxis(numpy.arange(120))
 t.designateTime()
-t.id="time"
-t.units="months since 2014"
+t.id = "time"
+t.units = "months since 2014"
 data = MV2.arange(120,0,-1)
-data.id="data"
+data.id = "data"
 data.setAxis(0,t)
-x=vcs.init()
-x.setantialiasing(0)
+x = regression.init()
 x.plot(data,bg=1)
 fnm = 'test_vcs_monotonic_decreasing_yxvsx_default.png'
-
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_no_continents.py b/testing/vcs/test_vcs_no_continents.py
new file mode 100644
index 0000000000000000000000000000000000000000..a5c3e8d04e54af7d54c5b626a8255ba066486c3b
--- /dev/null
+++ b/testing/vcs/test_vcs_no_continents.py
@@ -0,0 +1,21 @@
+import os, sys, cdms2, vcs, testing.regression as regression
+
+# Load the clt data:
+dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
+clt = dataFile("clt")
+clt = clt(latitude=(-90.0, 90.0), longitude=(-180., 175.), squeeze=1,
+          time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0'))
+
+# Initialize canvas:
+canvas = regression.init()
+
+t1 = vcs.createtemplate()
+t1.scale(.5, "y")
+t1.move(-.15, "y")
+t2 = vcs.createtemplate(source=t1.name)
+t2.move(.5, 'y')
+
+canvas.plot(clt, t1, continents=0, bg=True)
+canvas.plot(clt, t2, continents=1, bg=True)
+
+regression.run(canvas, "test_vcs_no_continents.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_oned_level_axis.py b/testing/vcs/test_vcs_oned_level_axis.py
index 25479a56b9519d7cf43576d57e94a4cf22cd92e6..ecb708c773379144272261ee5879bf5f2caed372 100644
--- a/testing/vcs/test_vcs_oned_level_axis.py
+++ b/testing/vcs/test_vcs_oned_level_axis.py
@@ -1,24 +1,9 @@
+import os, sys, vcs, cdms2, cdutil, testing.regression as regression
 
-import sys,cdutil
-import vcs
-import os
-import cdms2
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
-ta=f("ta",time=slice(0,1),squeeze=1)
-ta=cdutil.averager(ta,axis="yx")
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+f = cdms2.open(os.path.join(vcs.sample_data,"ta_ncep_87-6-88-4.nc"))
+ta = f("ta",time=slice(0,1),squeeze=1)
+ta = cdutil.averager(ta,axis="yx")
+x = regression.init()
 x.plot(ta,bg=1)
 fnm = "test_oned_level_axis.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_patterns.py b/testing/vcs/test_vcs_patterns.py
index fed4f41a839edb927d84cbee770227c7dd414684..c407f7cfee4e407f2efe8cd62d56dcc2e96e1d5e 100644
--- a/testing/vcs/test_vcs_patterns.py
+++ b/testing/vcs/test_vcs_patterns.py
@@ -1,19 +1,8 @@
-import cdms2
-import os
-import sys
-import vcs
-
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__), "..")
-sys.path.append(pth)
-import checkimage
+import os, sys, vcs, cdms2, testing.regression as regression
 
 f = cdms2.open(vcs.sample_data+"/clt.nc")
 s = f("clt", time=slice(0, 1), squeeze=1)
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1090, units="pixels")
+x = regression.init()
 iso = vcs.createisofill("isoleg")
 iso.levels = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
 iso.fillareastyle = "pattern"
@@ -21,9 +10,4 @@ iso.fillareacolors = vcs.getcolors([0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100])
 iso.fillareaindices = [1, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
 x.plot(s, iso, bg=1)
 fnm = "test_vcs_patterns.png"
-x.png(fnm)
-
-print "fnm:", fnm
-print "src:", src
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold+5.)
-sys.exit(ret)
+regression.run(x, fnm, threshold=regression.defaultThreshold+5.)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_plot_file_var.py b/testing/vcs/test_vcs_plot_file_var.py
index e20f947a672c70421c46be840d1d7d8d6c18c85c..0dd68945d9da5e83d527210d3ce6b8a4c88e69e5 100644
--- a/testing/vcs/test_vcs_plot_file_var.py
+++ b/testing/vcs/test_vcs_plot_file_var.py
@@ -1,8 +1,6 @@
-import vcs
-import os
-import sys
-import cdms2
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-V=f("clt")
-x=vcs.init()
-x.plot(V,bg=1)
+import os, sys, vcs, cdms2
+
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+V = f("clt")
+x = vcs.init()
+x.plot(V, bg=1)
diff --git a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
index 4aa1d24eba1647a001ad3d99324a8cfbd0303621..535ff432ecff39d4c1ce630b7d6da32643e0da14 100644
--- a/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
+++ b/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py
@@ -1,21 +1,7 @@
-import vcs
-import os,sys
-import cdms2
-
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-
-import checkimage
+import vcs, os, sys, cdms2, testing.regression as regression
 
 f = cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 s = f("sample")
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 x.plot(s,bg=1)
-fnm = "test_plot_unstructured_via_boxfill.png"
-src = sys.argv[1]
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_plot_unstructured_via_boxfill.png")
diff --git a/testing/vcs/test_vcs_png_to_base64.py b/testing/vcs/test_vcs_png_to_base64.py
index 6b88b351141d33606af705469ecb44dca27776ad..f6d78e2fa9c6b37163e3841f8d3358d73ee8616b 100644
--- a/testing/vcs/test_vcs_png_to_base64.py
+++ b/testing/vcs/test_vcs_png_to_base64.py
@@ -1,4 +1,4 @@
-import vcs,numpy,cdms2,MV2,os,sys
+import vcs, numpy, cdms2, MV2, os, sys
 
 x = vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_png_window_resize.py b/testing/vcs/test_vcs_png_window_resize.py
index 0adeeb66ba8f10abfb5376e966a1e8d7a155796c..a6346ca282ff5fda32c6460874370a2af555f549 100644
--- a/testing/vcs/test_vcs_png_window_resize.py
+++ b/testing/vcs/test_vcs_png_window_resize.py
@@ -1,20 +1,9 @@
-import vcs
-import sys
-import os
+import vcs, sys, os, testing.regression as regression
 
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-x=vcs.init()
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.open(814,628)
 x.plot([1,2,3,4,5,6,7])
 fnm = __file__[:-3]+".png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_polar_set_opt_param_polar.py b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
index 8c508ca3c01aaa0000acce77fd7c5e7be7142a77..4e777fb2b3cdcd0e62b07e23525476990f4a86a9 100644
--- a/testing/vcs/test_vcs_polar_set_opt_param_polar.py
+++ b/testing/vcs/test_vcs_polar_set_opt_param_polar.py
@@ -1,29 +1,13 @@
+import vcs, cdms2, sys, os, testing.regression as regression
 
-import vcs
-import cdms2
-import sys
-import os
-
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",slice(0,1),squeeze=1)
-x=vcs.init()
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",slice(0,1),squeeze=1)
+x = regression.init()
 x.setantialiasing(0)
 x.drawlogooff()
 x.setbgoutputdimensions(1200,1091,units="pixels")
-i=x.createisofill()
-p=x.getprojection("polar")
+i = x.createisofill()
+p = x.getprojection("polar")
 i.projection=p
 x.plot(s,i,bg=1)
-fnm= "test_polar_set_opt_param_polar.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-
+regression.run(x, "test_polar_set_opt_param_polar.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_remove_marker_none_1d.py b/testing/vcs/test_vcs_remove_marker_none_1d.py
index f868361b0f142bd72284ba77c2fdcdc5db5aab51..e112f86bc5a1d3405f803e82a313a567b158536c 100644
--- a/testing/vcs/test_vcs_remove_marker_none_1d.py
+++ b/testing/vcs/test_vcs_remove_marker_none_1d.py
@@ -3,31 +3,13 @@
 #
 # J-Y Peterschmitt - LSCE - 03/2015
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 dummy_data = numpy.arange(50, dtype=numpy.float32)
-
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 gm = x.createyxvsx('test_yxvsx')
 
-
-# Remove the marker
 gm.marker = None
-
 x.plot(gm, dummy_data,bg=1)
-
 fnm = "test_remove_marker_none_1d.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
-# The end
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_setcolormap.py b/testing/vcs/test_vcs_setcolormap.py
index c3e63966007f7058f284e80f87af198a9bd87908..78d7fad817a3a1301ec4e67252987de51e5f2aa1 100644
--- a/testing/vcs/test_vcs_setcolormap.py
+++ b/testing/vcs/test_vcs_setcolormap.py
@@ -1,32 +1,12 @@
 
-import cdms2
-import os
-import sys
-import vcs
-
-baselineFilename = sys.argv[1]
-checkImagePath = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(checkImagePath)
-import checkimage
+import cdms2, os, sys, vcs, testing.regression as regression
 
 cdmsfile = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data = cdmsfile('clt')
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
-
+x = regression.init()
 t=x.gettemplate('default')
 x.plot(data, t, bg=True)
 
 # This should force the image to update
 x.setcolormap('bl_to_drkorang')
-
-testFilename = "test_vcs_setcolormap.png"
-x.png(testFilename)
-
-ret = checkimage.check_result_image(testFilename,
-                                    baselineFilename,
-                                    checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_setcolormap.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba.py b/testing/vcs/test_vcs_settings_color_name_rgba.py
index 0b6aeef22f1a96f2a9dc87928301ede5b978900f..0fa6ec2884bbb5fcb3c9e876bd33dd1f4a9612cb 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba.py
@@ -1,28 +1,13 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
-f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
-data=f("clt",slice(0,1,))
+f = cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
+data = f("clt",slice(0,1,))
 gm = x.createisofill()
 gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isofill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+fnm = "test_vcs_settings_color_name_rgba_isofill.png"
+regression.run(x, fnm)
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
index 0fe844c2031c1b8ebad63c83e4e6798d36c1c7e2..8bca782a6816fe4919b667a77c6a794472c1b120 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_1d.py
@@ -1,16 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+x = regression.init()
 
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt")[:,5,8]
@@ -18,10 +8,4 @@ gm = x.create1d()
 gm.linecolor="salmon"
 gm.markercolor = [0,0,100]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_1d.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_1d.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
index 043d331c8a5ab4e88f7791158e9c5809a3e09eca..34228513e5f68b2bf5adfb2406eee24b4c082780 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createboxfill()
@@ -20,10 +9,4 @@ gm.levels = range(0,110,10)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_boxfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_boxfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
index 200293dbee85bb1594002c7cb50a82f41b0da0e6..c23edc2a6f4dbdc63693782e0eaba400b43ca067 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x=regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 data=f("clt",slice(0,1,))
 gm = x.createisoline()
@@ -19,10 +8,4 @@ gm.levels = range(0,110,10)
 gm.linecolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_isoline.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, 'test_vcs_settings_color_name_rgba_isoline.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
index 4a0858d1d1168f3755f7526bbfe320b38c27d8c5..0b3ffc8716219259a850d9237dde6c41b96d3a95 100644
--- a/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
+++ b/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py
@@ -1,17 +1,6 @@
-import vcs
-import numpy
-import os
-import sys
-import cdms2
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1090,units="pixels")
+import vcs, numpy, os, sys, cdms2, testing.regression as regression
 
+x = regression.init()
 f=cdms2.open(os.path.join(vcs.sample_data,"sampleCurveGrid4.nc"))
 data=f("sample")
 gm = x.createmeshfill()
@@ -19,10 +8,4 @@ gm.levels = range(0,1501,150)
 gm.fillareacolors = ["green","red","blue","bisque","yellow","grey",
         [100,0,0,50], [0,100,0],"salmon",[0,0,100,75]]
 x.plot(data,gm,bg=True)
-fnm = 'test_vcs_settings_color_name_rgba_meshfill.png'
-src = sys.argv[1]
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+ret = regression.run(x, 'test_vcs_settings_color_name_rgba_meshfill.png')
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_star_triangle_markers.py b/testing/vcs/test_vcs_star_triangle_markers.py
index 4564e4059cacd207586f3d2eb13a5b633f547b5a..2d75e05d06bdd5bbf7d0a5995029fc2d5c028e98 100644
--- a/testing/vcs/test_vcs_star_triangle_markers.py
+++ b/testing/vcs/test_vcs_star_triangle_markers.py
@@ -1,9 +1,5 @@
 
-import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import vcs, numpy, cdms2, MV2, os, sys, testing.regression as regression
 
 x=vcs.init()
 x.drawlogooff()
@@ -16,12 +12,5 @@ m.x = [[.1], [.3], [.5], [.7], [.9]]
 m.y = [[.1], [.3], [.5], [.7], [.9]]
 m.color = [200, 150, 160, 175, 125]
 m.size = [50, 50, 50, 50, 50]
-x.plot(m,bg=1)
-fnm = "test_star_triangle_markers.png"
-x.png(fnm)
-
-print "fnm:",fnm
-print "src:",src
-
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "test_star_triangle_markers.png")
\ No newline at end of file
diff --git a/testing/vcs/test_vcs_taylor_2quads.py b/testing/vcs/test_vcs_taylor_2quads.py
index 3458acb911997648b82e31a0e498f67fcdef0b16..84f1026156bae58d1210cd05e45a8d334482e862 100644
--- a/testing/vcs/test_vcs_taylor_2quads.py
+++ b/testing/vcs/test_vcs_taylor_2quads.py
@@ -1,17 +1,6 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs, MV2
+import sys, os, vcs, MV2, testing.regression as regression
 
-
-bg=True
-
-#
-# First create some sample data
-#
 data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.70056821,-0.27235352,],
  [ 0.05106154, 0.23012322,],
@@ -19,20 +8,8 @@ data = MV2.array([[-0.50428531,-0.8505522 ,],
  [ 0.85760801,-0.08336641,],
  [ 1.14083397,-0.78326507,]])
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-
-td=x.createtaylordiagram('new')
-
+x = regression.init()
+td = x.createtaylordiagram('new')
 td.quadrans = 2
-x.plot(data,td,skill = td.defaultSkillFunction,bg=bg)
-fnm = "test_vcs_taylor_2quads.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, td, skill = td.defaultSkillFunction, bg=1)
+regression.run(x, "test_vcs_taylor_2quads.png")
diff --git a/testing/vcs/test_vcs_taylor_template_ctl.py b/testing/vcs/test_vcs_taylor_template_ctl.py
index 40b78f0dce11fd8263b0169ba6e83234e5265dfa..b6f610c7dabbcb21bfeb8c0d6bb2551dea0f04f5 100644
--- a/testing/vcs/test_vcs_taylor_template_ctl.py
+++ b/testing/vcs/test_vcs_taylor_template_ctl.py
@@ -1,20 +1,8 @@
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import vcs,MV2
+import sys, os, vcs, MV2
+import testing.regression as regression
 
-bg=True
-
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-if bg:
-  x.setbgoutputdimensions(1200,1091,units="pixels")
-if not bg:
-    x.open()
+x = regression.init()
 
 ## Create a template from the default taylor diagram
 t=x.createtemplate('mytaylor','deftaylor')
@@ -61,12 +49,5 @@ t.xmintic2.priority=1
 # Create some dummy data for display purposes
 data=MV2.array([[1.52,.52,],[.83,.84]])
 
-x.plot(data,t,td,bg=bg)
-fnm="test_vcs_taylor_template_ctl.png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-if not bg:
-    raw_input("Press Enter")
-sys.exit(ret)
+x.plot(data, t, td, bg=1)
+regression.run(x, "test_vcs_taylor_template_ctl.png")
diff --git a/testing/vcs/test_vcs_user_passed_date.py b/testing/vcs/test_vcs_user_passed_date.py
index 643d2019cdc4de33839112b8fe4b396413eb0736..bb5b18a0d3c481b012687580a01e5d6d26c8a6c8 100644
--- a/testing/vcs/test_vcs_user_passed_date.py
+++ b/testing/vcs/test_vcs_user_passed_date.py
@@ -1,18 +1,9 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x=regression.init()
 x.plot(s,bg=1,time=cdtime.comptime(2015))
 fnm = os.path.split(__file__)[1][:-3]+".png"
-x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_user_passed_date_as_string.py b/testing/vcs/test_vcs_user_passed_date_as_string.py
index cd5d777f3db8d1a9188720a7e2c71f293d7db329..e9bdf83e62ae7f358791db59ef48a55e38c2e192 100644
--- a/testing/vcs/test_vcs_user_passed_date_as_string.py
+++ b/testing/vcs/test_vcs_user_passed_date_as_string.py
@@ -1,18 +1,11 @@
 import vcs,cdms2,os,sys,cdtime
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
+
+x = regression.init()
+
 f=cdms2.open(os.path.join(vcs.sample_data,"clt.nc"))
 s=f("clt",squeeze=1)
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
 x.plot(s,bg=1,time='2015-02-23')
 fnm = os.path.split(__file__)[1][:-3]+".png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, fnm)
diff --git a/testing/vcs/test_vcs_vectors_missing.py b/testing/vcs/test_vcs_vectors_missing.py
index fd53c97e11e8c31b6e91597fbe215b3c82375cbd..5e8b08e5be9357e50caaa2a10c5eef3a8c208204 100644
--- a/testing/vcs/test_vcs_vectors_missing.py
+++ b/testing/vcs/test_vcs_vectors_missing.py
@@ -1,6 +1,6 @@
 
-import sys,os
-import argparse
+import sys, os, argparse
+import vcs, cdms2, vtk, MV2, numpy, testing.regression as regression
 
 p = argparse.ArgumentParser(description="Basic gm testing code for vcs")
 p.add_argument("--source", dest="src", help="source image file")
@@ -14,28 +14,17 @@ if not args.show:
   src = args.src
   pth = os.path.join(os.path.dirname(__file__),"..")
   sys.path.append(pth)
-  import checkimage
-
-import vcs
-import sys
-import cdms2
-import vtk
-import os
-import MV2
-import numpy
-
 
 bg = not args.show
-
-x=vcs.init()
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
   x.setbgoutputdimensions(1200,1091,units="pixels")
 x.setcolormap("rainbow")
-gm=vcs.createvector()
+gm = vcs.createvector()
 gm.scale = args.scale
-nm_xtra=""
+nm_xtra = ""
 xtra = {}
 import cdms2
 import os
@@ -52,14 +41,12 @@ if args.show:
   pass
   #x.interact()
 else:
-  fnm = "test_vcs_vectors_missing" 
+  fnm = "test_vcs_vectors_missing"
   if args.scale!=1.:
     fnm+="_%.1g" % args.scale
   fnm+=nm_xtra
   x.png(fnm)
-  print "fnm:",fnm
-  print "src:",src
-  ret = checkimage.check_result_image(fnm+'.png',src,checkimage.defaultThreshold, cleanup=not args.keep)
+  ret = regression.check_result_image(fnm+'.png', src, regression.defaultThreshold, cleanup=not args.keep)
 if args.show:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcs/test_vcs_vectors_robinson.py b/testing/vcs/test_vcs_vectors_robinson.py
index 49052c679f9a0ca35a03fc03a63624d2d6ed8ac1..5cde92c82cd5e20dee9e2cc5493b75496554119a 100644
--- a/testing/vcs/test_vcs_vectors_robinson.py
+++ b/testing/vcs/test_vcs_vectors_robinson.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 u = f("u")
 v = f("v")
@@ -16,8 +10,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson.png")
diff --git a/testing/vcs/test_vcs_vectors_robinson_wrap.py b/testing/vcs/test_vcs_vectors_robinson_wrap.py
index 86491cc2f14da27a53e044538ded62881b9accaa..3cb30f06ffc29a5c90844cf450552830361f050f 100644
--- a/testing/vcs/test_vcs_vectors_robinson_wrap.py
+++ b/testing/vcs/test_vcs_vectors_robinson_wrap.py
@@ -1,13 +1,7 @@
 import vcs, cdms2, numpy, os, sys
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
-x = vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200, 1091, units="pixels")
+x = regression.init()
 f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc"))
 lon1 = -180
 u = f("clt")
@@ -19,8 +13,4 @@ p = x.createprojection()
 p.type = "robinson"
 V.projection = p
 x.plot(u,v,V, bg=1)
-
-fnm = "test_vcs_vectors_robinson_wrap.png"
-x.png(fnm)
-ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "test_vcs_vectors_robinson_wrap.png")
diff --git a/testing/vcs/test_vcs_vectors_scale_options.py b/testing/vcs/test_vcs_vectors_scale_options.py
new file mode 100644
index 0000000000000000000000000000000000000000..32898d129f8ac8d5b82678c67466a35c7d7863e6
--- /dev/null
+++ b/testing/vcs/test_vcs_vectors_scale_options.py
@@ -0,0 +1,59 @@
+import sys, cdms2, vcs, testing.regression as regression
+
+data = cdms2.open(vcs.sample_data+"/clt.nc")
+v = data['v'][...,::10,::10]
+u = data['u'][...,::10,::10]
+
+canvas = regression.init()
+gv = vcs.createvector()
+
+gv.scaletype = 'off'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_off.png'
+canvas.png(outFilename)
+ret = regression.check_result_image(outFilename, sys.argv[1])
+canvas.clear()
+
+v = data['v'][...,::4,::4]
+u = data['u'][...,::4,::4]
+gv.scaletype = 'constant'
+gv.scale = 0.1
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constant.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[2])
+canvas.clear()
+
+v = data['v']
+u = data['u']
+gv.scale = 1.0
+
+gv.scaletype = 'linear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_linear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[3])
+canvas.clear()
+
+gv.scaletype = 'normalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_normalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[4])
+canvas.clear()
+
+gv.scaletype = 'constantNLinear'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constantNLinear.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[5])
+canvas.clear()
+
+gv.scaletype = 'constantNNormalize'
+canvas.plot(u, v, gv)
+outFilename = 'test_vcs_vectors_scale_options_constantNNormalize.png'
+canvas.png(outFilename)
+ret += regression.check_result_image(outFilename, sys.argv[6])
+canvas.clear()
+
+sys.exit(ret)
diff --git a/testing/vcs/test_vcs_verify_boxfill_basics.py b/testing/vcs/test_vcs_verify_boxfill_basics.py
index 7d08a2b75f3c443b91fc3f80300a2703ff071c70..0768ff9f918b9b1b8f2d0191fe8b23a71a2f7992 100644
--- a/testing/vcs/test_vcs_verify_boxfill_basics.py
+++ b/testing/vcs/test_vcs_verify_boxfill_basics.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
@@ -12,12 +8,12 @@ b=x.createboxfill()
 assert(b.projection == "linear")
 assert(b.xticlabels1 == "*")
 assert(b.xticlabels2 == "*")
-assert(b.xmtics1 == "") 
+assert(b.xmtics1 == "")
 assert(b.xmtics2 == "")
 assert(b.yticlabels1 == "*")
 assert(b.yticlabels2 == "*")
-assert(b.ymtics1 == "")  
-assert(b.ymtics2 == "")  
+assert(b.ymtics1 == "")
+assert(b.ymtics2 == "")
 assert(numpy.allclose(b.datawc_x1, 1e+20))
 assert(numpy.allclose(b.datawc_x2, 1e+20))
 assert(numpy.allclose(b.datawc_y1, 1e+20))
@@ -71,12 +67,12 @@ assert(b.name == "test_b_ok")
 assert(b.projection == "test_bfill")
 assert(b.xticlabels1 == {23:"Hi"})
 assert(b.xticlabels2 == {23:"Hi"})
-assert(b.xmtics1 == {23:"Hi"}) 
+assert(b.xmtics1 == {23:"Hi"})
 assert(b.xmtics2 == {23:"Hi"})
 assert(b.yticlabels1 == {23:"Hi"})
 assert(b.yticlabels2 == {23:"Hi"})
-assert(b.ymtics1 == {23:"Hi"})  
-assert(b.ymtics2 == {23:"Hi"})  
+assert(b.ymtics1 == {23:"Hi"})
+assert(b.ymtics2 == {23:"Hi"})
 assert(numpy.allclose(b.datawc_x1, 56.7))
 assert(numpy.allclose(b.datawc_x2, 56.7))
 assert(numpy.allclose(b.datawc_y1, 56.7))
diff --git a/testing/vcs/test_vcs_verify_proj_basics.py b/testing/vcs/test_vcs_verify_proj_basics.py
index f1e1fa0a90590cfa430e3d6d66c7621fb9b5dcd9..822a6fc0ba82b47ed4029607768f8cc120c16785 100644
--- a/testing/vcs/test_vcs_verify_proj_basics.py
+++ b/testing/vcs/test_vcs_verify_proj_basics.py
@@ -1,9 +1,5 @@
-
-import vcs
-import numpy
-import cdtime
-
-from vcs_test_common import *
+import numpy, cdtime, vcs
+from testing.common import test_values_setting
 
 x=vcs.init()
 x.drawlogooff()
diff --git a/testing/vcs/test_vcs_wmo_marker.py b/testing/vcs/test_vcs_wmo_marker.py
index b4478372f0528b46026ed19508207ef1058b9c25..854e4a9beea56171f0d1b2ec44ce1d08de31742c 100644
--- a/testing/vcs/test_vcs_wmo_marker.py
+++ b/testing/vcs/test_vcs_wmo_marker.py
@@ -1,14 +1,10 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+
+import testing.regression as regression
+x = regression.init()
+
 
 m = x.createmarker()
 M=1
@@ -22,8 +18,5 @@ x.plot(m,bg=1)
 fnm = 'wmo_marker.png'
 x.png(fnm)
 
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+regression.run(x, "wmo_marker.png")
 
diff --git a/testing/vcs/test_vcs_wmo_markers.py b/testing/vcs/test_vcs_wmo_markers.py
index 5785e0925526d90ee090d9281e72d6114d3d0d06..5162eeeee5ad9834d655a103989f1e91d7113d5d 100644
--- a/testing/vcs/test_vcs_wmo_markers.py
+++ b/testing/vcs/test_vcs_wmo_markers.py
@@ -1,26 +1,20 @@
 
 import vcs,numpy,cdms2,MV2,os,sys
-src=sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
+import testing.regression as regression
 
 wmo = ['w00', 'w01', 'w02', 'w03', 'w04', 'w05', 'w06', 'w07', 'w08', 'w09',
        'w10', 'w11', 'w12', 'w13', 'w14', 'w15', 'w16', 'w17', 'w18', 'w19',
-       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29', 
+       'w20', 'w21', 'w22', 'w23', 'w24', 'w25', 'w26', 'w27', 'w28', 'w29',
        'w30', 'w31', 'w32', 'w33', 'w34', 'w35', 'w36', 'w37', 'w38', 'w39',
        'w40', 'w41', 'w42', 'w43', 'w44', 'w45', 'w46', 'w47', 'w48', 'w49',
        'w50', 'w51', 'w52', 'w53', 'w54', 'w55', 'w56', 'w57', 'w58', 'w59',
-       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69', 
+       'w60', 'w61', 'w62', 'w63', 'w64', 'w65', 'w66', 'w67', 'w68', 'w69',
        'w70', 'w71', 'w72', 'w73', 'w74', 'w75', 'w76', 'w77', 'w78', 'w79',
        'w80', 'w81', 'w82', 'w83', 'w84', 'w85', 'w86', 'w87', 'w88', 'w89',
        'w90', 'w91', 'w92', 'w93', 'w94', 'w95', 'w96', 'w97', 'w98', 'w99',
        'w100', 'w101', 'w102']
 
-x=vcs.init()
-x.setantialiasing(0)
-x.drawlogooff()
-x.setbgoutputdimensions(1200,1091,units="pixels")
+x = regression.init()
 
 m = x.createmarker()
 M=7
@@ -37,13 +31,6 @@ for Y in range(7):
 m.x = xs
 m.y = ys
 m.list()
-x.plot(m,bg=1)
-fnm = "wmo_markers.png"
-x.png(fnm)
-
-
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
-sys.exit(ret)
+x.plot(m, bg=1)
+regression.run(x, "wmo_markers.png");
 
diff --git a/testing/vcs/vtk_ui/vtk_ui_test.py b/testing/vcs/vtk_ui/vtk_ui_test.py
index facfd7f45123d944454984984cf9f9a50762d0fd..d5cbe18bfcb2ee7166ae12e294f39b97dc503a29 100644
--- a/testing/vcs/vtk_ui/vtk_ui_test.py
+++ b/testing/vcs/vtk_ui/vtk_ui_test.py
@@ -1,8 +1,4 @@
-import vtk
-import vcs.vtk_ui
-import os
-import sys
-import time
+import os, sys, time, vtk, vcs.vtk_ui
 
 
 def init():
@@ -101,15 +97,13 @@ class vtk_ui_test(object):
     def check_image(self, compare_against):
         """
         Checks the current render window's output against the image specified in the argument,
-        returns the result of checkimage.check_result_image
+        returns the result of regression.check_result_image
         """
         generate_png(self.win, self.test_file)
         pth = os.path.join(os.path.dirname(__file__), "../..")
         sys.path.append(pth)
-        import checkimage
-        print "fnm:", self.test_file
-        print "src:", compare_against
-        return checkimage.check_result_image(self.test_file, compare_against, checkimage.defaultThreshold)
+        import testing.regression as regression
+        return regression.check_result_image(self.test_file, compare_against)
 
     def test(self):
         self.do_test()
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
index b3cfa0fba6f9e79180f59e3500b90fa991c8fafb..0c029852778b6c68bda54cc4a5c06c0e39be8e35 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -20,9 +13,7 @@ for i in range(12):
     t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
index 42e0f64e4c94915deaa6a69fa8cc958271c9d048..b57b7672453a90a93e65149387399ee7a64f5e18 100644
--- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
+++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py
@@ -1,17 +1,13 @@
+import os, sys, testing.regression as regression
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
 import EzTemplate,vcs
 import cdms,EzTemplate,vcs,sys
 ## 12 plots 1 legend per row on the right
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
-bg=True
-M=EzTemplate.Multi(rows=4,columns=3)
+bg = True
+M = EzTemplate.Multi(rows=4,columns=3)
 M.legend.direction='vertical'
 for i in range(12):
     t=M.get(legend='local')
@@ -19,9 +15,7 @@ for i in range(12):
         t.legend.priority=0 # Turn off legend
 fnm = "test_12_plot_one_leg_per_row_right.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
index b6ca9eb34f759c6b7d46afc48783ad12dd2e6907..d46e7b9acd7f7c3b9f0ad0f0b74620139664faa6 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -26,10 +19,8 @@ for i in range(12):
       t=M.get()
 
 fnm = "test_EzTemplate_12_plots_legd_direction.png"
-M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+M.preview(fnm, bg=bg)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
index 40899f264b62183fef0e16e3aa6eb475c54dabcc..73b7c8dbf7f4993b0ac7aec10fe80c0922e58655 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py
@@ -1,14 +1,7 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
+import os, sys, EzTemplate, vcs, testing.regression as regression
 
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -25,9 +18,7 @@ for i in range(12):
       t=M.get()
 fnm = "test_EzTemplate_12_plots_margins_thickness.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
index 043e03de4981a2e71797ede26a02b74c704076e1..6e9398fe800a3fbb02fa89afd015f282c4ba1b73 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py
@@ -1,12 +1,4 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, EzTemplate, vcs, testing.regression as regression
 ## Initialize VCS
 x=vcs.init()
 x.drawlogooff()
@@ -24,9 +16,7 @@ for i in range(12):
 
 fnm = "test_EzTemplate_12_plots_mix_glb_local.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
index d17eb1a691200fdf23eb4351029d26ae23ef88dc..5d4cd293b96915ced0b7de71eb11af7a70233c33 100644
--- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
+++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py
@@ -1,14 +1,6 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-import EzTemplate,vcs
-## 12 plot one legend per row
-
+import os, sys, EzTemplate, vcs, testing.regression as regression
 ## Initialize VCS
-x=vcs.init()
+x = vcs.init()
 x.drawlogooff()
 
 bg = True
@@ -18,9 +10,7 @@ M.spacing.vertical=.1
 
 fnm = "test_EzTemplate_12_plots_spacing.png"
 M.preview(fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
index 2619fe5efc072c76327c1ecebbd6bc276cd330d4..ee645d16c0a51ee869daeef4a990ed64c1add380 100644
--- a/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
+++ b/testing/vcsaddons/test_vcs_addons_EzTemplate_2x2.py
@@ -1,21 +1,11 @@
+import os, sys, cdms2, testing.regression as regression, vcs, vcsaddons
 
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcs
-import vcsaddons
-import cdms2
-
-f=cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
-s=f("clt",time=slice(0,1),squeeze=1)
+f = cdms2.open(os.path.join(vcs.sample_data,'clt.nc'))
+s = f("clt",time=slice(0,1),squeeze=1)
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
-x=vcs.init()
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+x = vcs.init()
 x.setantialiasing(0)
 x.drawlogooff()
 if bg:
@@ -25,9 +15,7 @@ for i in range(4):
 
 fnm = "test_vcs_addons_EzTemplate_2x2.png"
 x.png(fnm)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)
diff --git a/testing/vcsaddons/test_vcsaddons_preview_2x2.py b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
index 754aa5ceaef64cc983996d3eb602819e36ae3361..a0318f25a9ea6268cd5b3f8573e0548ec22bb0ed 100644
--- a/testing/vcsaddons/test_vcsaddons_preview_2x2.py
+++ b/testing/vcsaddons/test_vcsaddons_preview_2x2.py
@@ -1,22 +1,12 @@
-
-import sys,os
-src = sys.argv[1]
-pth = os.path.join(os.path.dirname(__file__),"..")
-sys.path.append(pth)
-import checkimage
-
-import vcsaddons
+import os, sys, testing.regression as regression, vcsaddons
 
 bg = True
-
-M=vcsaddons.EzTemplate.Multi(rows=2,columns=2)
+M = vcsaddons.EzTemplate.Multi(rows=2,columns=2)
 if bg:
   M.x.setbgoutputdimensions(1200,1091,units="pixels")
 fnm = "test_vcsaddons_preview_2x2.png"
 M.preview(out=fnm,bg=bg)
-print "fnm:",fnm
-print "src:",src
-ret = checkimage.check_result_image(fnm,src,checkimage.defaultThreshold)
+ret = regression.check_result_image(fnm, sys.argv[1])
 if not bg:
     raw_input("Press Enter")
 sys.exit(ret)